diff options
author | obrien <obrien@FreeBSD.org> | 2002-05-09 20:02:13 +0000 |
---|---|---|
committer | obrien <obrien@FreeBSD.org> | 2002-05-09 20:02:13 +0000 |
commit | c8f5fc7032940ad6633f932ac40cade82ec4d0cc (patch) | |
tree | 29a0f0a6c79a69ecc64f612947a0fe5904311713 /contrib/gcc/config/rs6000 | |
parent | c9ab9ae440a8066b2c2b85b157b1fdadcf09916a (diff) | |
download | FreeBSD-src-c8f5fc7032940ad6633f932ac40cade82ec4d0cc.zip FreeBSD-src-c8f5fc7032940ad6633f932ac40cade82ec4d0cc.tar.gz |
Gcc 3.1.0 pre-release from the FSF anoncvs repo on 9-May-2002 15:57:15 EDT.
Diffstat (limited to 'contrib/gcc/config/rs6000')
-rw-r--r-- | contrib/gcc/config/rs6000/aix.h | 5 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/aix43.h | 4 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/aix51.h | 11 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/altivec.h | 8733 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/crtsavres.asm | 407 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/darwin.h | 14 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/eabi.asm | 362 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/gnu.h | 38 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/linux64.h | 78 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/netbsd.h | 5 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/ppc-asm.h | 1 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/rs6000-protos.h | 2 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/rs6000.c | 710 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/rs6000.h | 219 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/rs6000.md | 591 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/rtems.h | 12 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/sysv4.h | 104 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/t-aix43 | 2 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/t-linux64 | 16 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/t-ppccomm | 11 | ||||
-rw-r--r-- | contrib/gcc/config/rs6000/xcoff.h | 28 |
21 files changed, 9253 insertions, 2100 deletions
diff --git a/contrib/gcc/config/rs6000/aix.h b/contrib/gcc/config/rs6000/aix.h index 2738a37..d8dde5b 100644 --- a/contrib/gcc/config/rs6000/aix.h +++ b/contrib/gcc/config/rs6000/aix.h @@ -1,6 +1,6 @@ /* Definitions of target machine for GNU compiler, for IBM RS/6000 POWER running AIX. - Copyright (C) 2000, 2001 Free Software Foundation, Inc. + Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc. This file is part of GNU CC. @@ -27,6 +27,9 @@ Boston, MA 02111-1307, USA. */ collect has a chance to see them, so scan the object files directly. */ #define COLLECT_EXPORT_LIST +/* Handle #pragma weak and #pragma pack. */ +#define HANDLE_SYSV_PRAGMA + /* This is the only version of nm that collect2 can work with. */ #define REAL_NM_FILE_NAME "/usr/ucb/nm" diff --git a/contrib/gcc/config/rs6000/aix43.h b/contrib/gcc/config/rs6000/aix43.h index 93e186c..7aa8707 100644 --- a/contrib/gcc/config/rs6000/aix43.h +++ b/contrib/gcc/config/rs6000/aix43.h @@ -199,10 +199,6 @@ do { \ %{pthread:%{pg:gcrt0_r%O%s}%{!pg:%{p:mcrt0_r%O%s}%{!p:crt0_r%O%s}}}\ %{!pthread:%{pg:gcrt0%O%s}%{!pg:%{p:mcrt0%O%s}%{!p:crt0%O%s}}}}}}" -/* Since there are separate multilibs for pthreads, determine the - thread model based on the command-line arguments. */ -#define THREAD_MODEL_SPEC "%{pthread:posix}%{!pthread:single}" - /* AIX 4.3 typedefs ptrdiff_t as "long" while earlier releases used "int". */ #undef PTRDIFF_TYPE diff --git a/contrib/gcc/config/rs6000/aix51.h b/contrib/gcc/config/rs6000/aix51.h index ae01440..121c7ba 100644 --- a/contrib/gcc/config/rs6000/aix51.h +++ b/contrib/gcc/config/rs6000/aix51.h @@ -202,10 +202,6 @@ do { \ %{pthread:%{pg:gcrt0_r%O%s}%{!pg:%{p:mcrt0_r%O%s}%{!p:crt0_r%O%s}}}\ %{!pthread:%{pg:gcrt0%O%s}%{!pg:%{p:mcrt0%O%s}%{!p:crt0%O%s}}}}}}" -/* Since there are separate multilibs for pthreads, determine the - thread model based on the command-line arguments. */ -#define THREAD_MODEL_SPEC "%{pthread:posix}%{!pthread:single}" - /* AIX V5 typedefs ptrdiff_t as "long" while earlier releases used "int". */ #undef PTRDIFF_TYPE @@ -213,10 +209,13 @@ do { \ /* __WCHAR_TYPE__ is dynamic, so do not define it statically. */ #define NO_BUILTIN_WCHAR_TYPE -#undef WCHAR_TYPE -#undef WCHAR_TYPE_SIZE + +/* Type used for wchar_t, as a string used in a declaration. */ +#undef WCHAR_TYPE +#define WCHAR_TYPE (!TARGET_64BIT ? "short unsigned int" : "unsigned int") /* Width of wchar_t in bits. */ +#undef WCHAR_TYPE_SIZE #define WCHAR_TYPE_SIZE (!TARGET_64BIT ? 16 : 32) #define MAX_WCHAR_TYPE_SIZE 32 diff --git a/contrib/gcc/config/rs6000/altivec.h b/contrib/gcc/config/rs6000/altivec.h index 4d5b0a3..85869dc 100644 --- a/contrib/gcc/config/rs6000/altivec.h +++ b/contrib/gcc/config/rs6000/altivec.h @@ -35,18 +35,29 @@ Boston, MA 02111-1307, USA. */ /* Required by Motorola specs. */ #define __VEC__ 10206 +#ifndef __ALTIVEC__ #define __ALTIVEC__ 1 +#endif #define __vector __attribute__((vector_size(16))) -/* Dummy prototype. */ -extern void __altivec_link_error_invalid_argument (); - /* You are allowed to undef this for C++ compatability. */ #define vector __vector +#define bool signed +#define pixel short +#define __pixel short + +/* Dummy prototype. */ +extern int __altivec_link_error_invalid_argument (); + /* Helper macros. */ +#define __CR6_EQ 0 +#define __CR6_EQ_REV 1 +#define __CR6_LT 2 +#define __CR6_LT_REV 3 + #define __bin_args_eq(xtype, x, ytype, y) \ (__builtin_types_compatible_p (xtype, typeof (x)) \ && __builtin_types_compatible_p (ytype, typeof (y))) @@ -61,1398 +72,7784 @@ extern void __altivec_link_error_invalid_argument (); #define __ch(x, y, z) __builtin_choose_expr (x, y, z) +/* These are easy... Same exact arguments. */ + +#define vec_vaddcuw vec_addc +#define vec_vand vec_and +#define vec_vandc vec_andc +#define vec_vrfip vec_ceil +#define vec_vcmpbfp vec_cmpb +#define vec_vcmpgefp vec_cmpge +#define vec_vctsxs vec_cts +#define vec_vctuxs vec_ctu +#define vec_vexptefp vec_expte +#define vec_vrfim vec_floor +#define vec_lvx vec_ld +#define vec_lvxl vec_ldl +#define vec_vlogefp vec_loge +#define vec_vmaddfp vec_madd +#define vec_vmhaddshs vec_madds +#define vec_vmladduhm vec_mladd +#define vec_vmhraddshs vec_mradds +#define vec_vnmsubfp vec_nmsub +#define vec_vnor vec_nor +#define vec_vor vec_or +#define vec_vpkpx vec_packpx +#define vec_vperm vec_perm +#define vec_vrefp vec_re +#define vec_vrfin vec_round +#define vec_vrsqrtefp vec_rsqrte +#define vec_vsel vec_sel +#define vec_vsldoi vec_sld +#define vec_vsl vec_sll +#define vec_vslo vec_slo +#define vec_vspltisb vec_splat_s8 +#define vec_vspltish vec_splat_s16 +#define vec_vspltisw vec_splat_s32 +#define vec_vsr vec_srl +#define vec_vsro vec_sro +#define vec_stvx vec_st +#define vec_stvxl vec_stl +#define vec_vsubcuw vec_subc +#define vec_vsum2sws vec_sum2s +#define vec_vsumsws vec_sums +#define vec_vrfiz vec_trunc +#define vec_vxor vec_xor + #ifdef __cplusplus -/* C++ stuff here. */ +/* Prototypes for builtins that take literals and must always be + inlined. */ +inline vector float vec_ctf (vector unsigned int, const char) __attribute__ ((always_inline)); +inline vector float vec_ctf (vector signed int, const char) __attribute__ ((always_inline)); +inline vector signed int vec_cts (vector float, const char) __attribute__ ((always_inline)); +inline vector unsigned int vec_ctu (vector float, const char) __attribute__ ((always_inline)); +inline void vec_dss (const char) __attribute__ ((always_inline)); +inline void vec_dst (void *, int, const char) __attribute__ ((always_inline)); +inline void vec_dstst (void *, int, const char) __attribute__ ((always_inline)); +inline void vec_dststt (void *, int, const char) __attribute__ ((always_inline)); +inline void vec_dstt (void *, int, const char) __attribute__ ((always_inline)); +inline vector float vec_sld (vector float, vector float, const char) __attribute__ ((always_inline)); +inline vector signed int vec_sld (vector signed int, vector signed int, const char) __attribute__ ((always_inline)); +inline vector unsigned int vec_sld (vector unsigned int, vector unsigned int, const char) __attribute__ ((always_inline)); +inline vector signed short vec_sld (vector signed short, vector signed short, const char) __attribute__ ((always_inline)); +inline vector unsigned short vec_sld (vector unsigned short, vector unsigned short, const char) __attribute__ ((always_inline)); +inline vector signed char vec_sld (vector signed char, vector signed char, const char) __attribute__ ((always_inline)); +inline vector unsigned char vec_sld (vector unsigned char, vector unsigned char, const char) __attribute__ ((always_inline)); +inline vector signed char vec_splat (vector signed char, const char) __attribute__ ((always_inline)); +inline vector unsigned char vec_splat (vector unsigned char, const char) __attribute__ ((always_inline)); +inline vector signed short vec_splat (vector signed short, const char) __attribute__ ((always_inline)); +inline vector unsigned short vec_splat (vector unsigned short, const char) __attribute__ ((always_inline)); +inline vector float vec_splat (vector float, const char) __attribute__ ((always_inline)); +inline vector signed int vec_splat (vector signed int, const char) __attribute__ ((always_inline)); +inline vector unsigned int vec_splat (vector unsigned int, const char) __attribute__ ((always_inline)); +inline vector signed char vec_splat_s8 (const char) __attribute__ ((always_inline)); +inline vector signed short vec_splat_s16 (const char) __attribute__ ((always_inline)); +inline vector signed int vec_splat_s32 (const char) __attribute__ ((always_inline)); +inline vector unsigned char vec_splat_u8 (const char) __attribute__ ((always_inline)); +inline vector unsigned short vec_splat_u16 (const char) __attribute__ ((always_inline)); +inline vector unsigned int vec_splat_u32 (const char) __attribute__ ((always_inline)); + +/* vec_abs */ + +inline vector signed char +vec_abs (vector signed char a1) +{ + return __builtin_altivec_abs_v16qi (a1); +} + +inline vector signed short +vec_abs (vector signed short a1) +{ + return __builtin_altivec_abs_v8hi (a1); +} + +inline vector signed int +vec_abs (vector signed int a1) +{ + return __builtin_altivec_abs_v4si (a1); +} + +inline vector float +vec_abs (vector float a1) +{ + return __builtin_altivec_abs_v4sf (a1); +} + +/* vec_abss */ + +inline vector signed char +vec_abss (vector signed char a1) +{ + return __builtin_altivec_abss_v16qi (a1); +} + +inline vector signed short +vec_abss (vector signed short a1) +{ + return __builtin_altivec_abss_v8hi (a1); +} + +inline vector signed int +vec_abss (vector signed int a1) +{ + return __builtin_altivec_abss_v4si (a1); +} + +/* vec_add */ + +inline vector signed char +vec_add (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_add (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_add (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_add (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_add (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_add (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_add (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vaddfp ((vector float) a1, (vector float) a2); +} + +/* vec_vaddfp */ + +inline vector float +vec_vaddfp (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vaddfp ((vector float) a1, (vector float) a2); +} + +/* vec_vadduwm */ + +inline vector signed int +vec_vadduwm (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vadduwm (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vadduwm (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vadduwm (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vadduhm */ + +inline vector signed short +vec_vadduhm (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vadduhm (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vadduhm (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vadduhm (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vaddubm */ + +inline vector signed char +vec_vaddubm (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vaddubm (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vaddubm (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vaddubm (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_addc */ + +inline vector unsigned int +vec_addc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vaddcuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_adds */ + +inline vector unsigned char +vec_adds (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_adds (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_adds (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_adds (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddsbs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_adds (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_adds (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_adds (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_adds (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vaddshs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_adds (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_adds (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_adds (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_adds (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vaddsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vaddsws */ + +inline vector signed int +vec_vaddsws (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vaddsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vadduws */ + +inline vector unsigned int +vec_vadduws (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vadduws (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vadduws (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vaddshs */ +inline vector signed short +vec_vaddshs (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vaddshs ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vadduhs */ + +inline vector unsigned short +vec_vadduhs (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vadduhs (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vadduhs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vaddsbs */ + +inline vector signed char +vec_vaddsbs (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vaddsbs ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vaddubs */ + +inline vector unsigned char +vec_vaddubs (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vaddubs (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vaddubs (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_and */ + +inline vector float +vec_and (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_and (vector float a1, vector signed int a2) +{ + return (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_and (vector signed int a1, vector float a2) +{ + return (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_and (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_and (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_and (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_and (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_and (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_and (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_andc */ + +inline vector float +vec_andc (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_andc (vector float a1, vector signed int a2) +{ + return (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_andc (vector signed int a1, vector float a2) +{ + return (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_andc (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_andc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_andc (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_andc (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_andc (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_andc (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_avg */ + +inline vector unsigned char +vec_avg (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vavgub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_avg (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vavgsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_avg (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vavguh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_avg (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vavgsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_avg (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vavguw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_avg (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vavgsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vavgsw */ + +inline vector signed int +vec_vavgsw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vavgsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vavguw */ + +inline vector unsigned int +vec_vavguw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vavguw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vavgsh */ + +inline vector signed short +vec_vavgsh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vavgsh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vavguh */ + +inline vector unsigned short +vec_vavguh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vavguh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vavgsb */ + +inline vector signed char +vec_vavgsb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vavgsb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vavgub */ + +inline vector unsigned char +vec_vavgub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vavgub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_ceil */ + +inline vector float +vec_ceil (vector float a1) +{ + return (vector float) __builtin_altivec_vrfip ((vector float) a1); +} + +/* vec_cmpb */ + +inline vector signed int +vec_cmpb (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpbfp ((vector float) a1, (vector float) a2); +} + +/* vec_cmpeq */ + +inline vector signed char +vec_cmpeq (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmpeq (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmpeq (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmpeq (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmpeq (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpeq (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpeq (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpeqfp ((vector float) a1, (vector float) a2); +} + +/* vec_vcmpeqfp */ + +inline vector signed int +vec_vcmpeqfp (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpeqfp ((vector float) a1, (vector float) a2); +} + +/* vec_vcmpequw */ + +inline vector signed int +vec_vcmpequw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_vcmpequw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vcmpequh */ + +inline vector signed short +vec_vcmpequh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_vcmpequh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vcmpequb */ + +inline vector signed char +vec_vcmpequb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_vcmpequb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_cmpge */ + +inline vector signed int +vec_cmpge (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgefp ((vector float) a1, (vector float) a2); +} + +/* vec_cmpgt */ + +inline vector signed char +vec_cmpgt (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmpgt (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmpgt (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmpgt (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmpgt (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpgt (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmpgt (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2); +} + +/* vec_vcmpgtfp */ + +inline vector signed int +vec_vcmpgtfp (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2); +} + +/* vec_vcmpgtsw */ + +inline vector signed int +vec_vcmpgtsw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vcmpgtuw */ + +inline vector signed int +vec_vcmpgtuw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vcmpgtsh */ + +inline vector signed short +vec_cmpgtsh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vcmpgtuh */ + +inline vector signed short +vec_vcmpgtuh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vcmpgtsb */ + +inline vector signed char +vec_vcmpgtsb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vcmpgtub */ + +inline vector signed char +vec_vcmpgtub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_cmple */ + +inline vector signed int +vec_cmple (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgefp ((vector float) a1, (vector float) a2); +} + +/* vec_cmplt */ + +inline vector signed char +vec_cmplt (vector unsigned char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_cmplt (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_cmplt (vector unsigned short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_cmplt (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_cmplt (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmplt (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_cmplt (vector float a1, vector float a2) +{ + return (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2); +} + +/* vec_ctf */ + +inline vector float +vec_ctf (vector unsigned int a1, const char a2) +{ + return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, a2); +} + +inline vector float +vec_ctf (vector signed int a1, const char a2) +{ + return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, a2); +} + +/* vec_vcfsx */ + +inline vector float +vec_vcfsx (vector signed int a1, const char a2) +{ + return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, a2); +} + +/* vec_vcfux */ + +inline vector float +vec_vcfux (vector unsigned int a1, const char a2) +{ + return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, a2); +} + +/* vec_cts */ + +inline vector signed int +vec_cts (vector float a1, const char a2) +{ + return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, a2); +} + +/* vec_ctu */ + +inline vector unsigned int +vec_ctu (vector float a1, const char a2) +{ + return (vector unsigned int) __builtin_altivec_vctuxs ((vector float) a1, a2); +} + +/* vec_dss */ + +inline void +vec_dss (const char a1) +{ + __builtin_altivec_dss (a1); +} + +/* vec_dssall */ + +inline void +vec_dssall () +{ + __builtin_altivec_dssall (); +} + +/* vec_dst */ + +inline void +vec_dst (void *a1, int a2, const char a3) +{ + __builtin_altivec_dst ((void *) a1, a2, a3); +} + +/* vec_dstst */ + +inline void +vec_dstst (void *a1, int a2, const char a3) +{ + __builtin_altivec_dstst ((void *) a1, a2, a3); +} + +/* vec_dststt */ + +inline void +vec_dststt (void *a1, int a2, const char a3) +{ + __builtin_altivec_dststt ((void *) a1, a2, a3); +} + +/* vec_dstt */ + +inline void +vec_dstt (void *a1, int a2, const char a3) +{ + __builtin_altivec_dstt ((void *) a1, a2, a3); +} + +/* vec_expte */ + +inline vector float +vec_expte (vector float a1) +{ + return (vector float) __builtin_altivec_vexptefp ((vector float) a1); +} + +/* vec_floor */ + +inline vector float +vec_floor (vector float a1) +{ + return (vector float) __builtin_altivec_vrfim ((vector float) a1); +} + +/* vec_ld */ + +inline vector float +vec_ld (int a1, vector float *a2) +{ + return (vector float) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector float +vec_ld (int a1, float *a2) +{ + return (vector float) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed int +vec_ld (int a1, vector signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed int +vec_ld (int a1, signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned int +vec_ld (int a1, vector unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned int +vec_ld (int a1, unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed short +vec_ld (int a1, vector signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed short +vec_ld (int a1, signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned short +vec_ld (int a1, vector unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned short +vec_ld (int a1, unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed char +vec_ld (int a1, vector signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector signed char +vec_ld (int a1, signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned char +vec_ld (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvx (a1, (void *) a2); +} + +inline vector unsigned char +vec_ld (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvx (a1, (void *) a2); +} + +/* vec_lde */ + +inline vector signed char +vec_lde (int a1, signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +inline vector unsigned char +vec_lde (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +inline vector signed short +vec_lde (int a1, signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +inline vector unsigned short +vec_lde (int a1, unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +inline vector float +vec_lde (int a1, float *a2) +{ + return (vector float) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector signed int +vec_lde (int a1, signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector unsigned int +vec_lde (int a1, unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +/* vec_lvewx */ + +inline vector float +vec_lvewx (int a1, float *a2) +{ + return (vector float) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector signed int +vec_lvewx (int a1, signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +inline vector unsigned int +vec_lvewx (int a1, unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvewx (a1, (void *) a2); +} + +/* vec_lvehx */ + +inline vector signed short +vec_lvehx (int a1, signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +inline vector unsigned short +vec_lvehx (int a1, unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvehx (a1, (void *) a2); +} + +/* vec_lvebx */ + +inline vector signed char +vec_lvebx (int a1, signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvebx (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvebx (a1, (void *) a2); +} + +/* vec_ldl */ + +inline vector float +vec_ldl (int a1, vector float *a2) +{ + return (vector float) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector float +vec_ldl (int a1, float *a2) +{ + return (vector float) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed int +vec_ldl (int a1, vector signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed int +vec_ldl (int a1, signed int *a2) +{ + return (vector signed int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned int +vec_ldl (int a1, vector unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned int +vec_ldl (int a1, unsigned int *a2) +{ + return (vector unsigned int) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed short +vec_ldl (int a1, vector signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed short +vec_ldl (int a1, signed short *a2) +{ + return (vector signed short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned short +vec_ldl (int a1, vector unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned short +vec_ldl (int a1, unsigned short *a2) +{ + return (vector unsigned short) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed char +vec_ldl (int a1, vector signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector signed char +vec_ldl (int a1, signed char *a2) +{ + return (vector signed char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned char +vec_ldl (int a1, vector unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +inline vector unsigned char +vec_ldl (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvxl (a1, (void *) a2); +} + +/* vec_loge */ + +inline vector float +vec_loge (vector float a1) +{ + return (vector float) __builtin_altivec_vlogefp ((vector float) a1); +} + +/* vec_lvsl */ + +inline vector unsigned char +vec_lvsl (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, signed char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, unsigned short *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, signed short *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, unsigned int *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, signed int *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsl (int a1, float *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsl (a1, (void *) a2); +} + +/* vec_lvsr */ + +inline vector unsigned char +vec_lvsr (int a1, unsigned char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, signed char *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, unsigned short *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, signed short *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, unsigned int *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, signed int *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +inline vector unsigned char +vec_lvsr (int a1, float *a2) +{ + return (vector unsigned char) __builtin_altivec_lvsr (a1, (void *) a2); +} + +/* vec_madd */ + +inline vector float +vec_madd (vector float a1, vector float a2, vector float a3) +{ + return (vector float) __builtin_altivec_vmaddfp ((vector float) a1, (vector float) a2, (vector float) a3); +} + + +/* vec_madds */ + +inline vector signed short +vec_madds (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmhaddshs ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_max */ + +inline vector unsigned char +vec_max (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_max (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_max (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_max (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmaxsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_max (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_max (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_max (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_max (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmaxsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_max (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_max (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_max (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_max (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmaxsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_max (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmaxfp ((vector float) a1, (vector float) a2); +} + +/* vec_vmaxfp */ + +inline vector float +vec_vmaxfp (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmaxfp ((vector float) a1, (vector float) a2); +} + +/* vec_vmaxsw */ + +inline vector signed int +vec_vmaxsw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmaxsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmaxuw */ + +inline vector unsigned int +vec_vmaxuw (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vmaxuw (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vmaxuw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmaxsh */ + +inline vector signed short +vec_vmaxsh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmaxsh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmaxuh */ + +inline vector unsigned short +vec_vmaxuh (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vmaxuh (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vmaxuh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmaxsb */ + +inline vector signed char +vec_vmaxsb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmaxsb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vmaxub */ + +inline vector unsigned char +vec_vmaxub (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vmaxub (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vmaxub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_mergeh */ + +inline vector signed char +vec_mergeh (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_mergeh (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mergeh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_mergeh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector float +vec_mergeh (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_mergeh (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_mergeh (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmrghw */ + +inline vector float +vec_vmrghw (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_vmrghw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vmrghw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmrghh */ + +inline vector signed short +vec_vmrghh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vmrghh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmrghb */ + +inline vector signed char +vec_vmrghb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vmrghb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_mergel */ + +inline vector signed char +vec_mergel (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_mergel (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mergel (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_mergel (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector float +vec_mergel (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_mergel (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_mergel (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmrglw */ + +inline vector float +vec_vmrglw (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_vmrglw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vmrglw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vmrglh */ + +inline vector signed short +vec_vmrglh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vmrglh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmrglb */ + +inline vector signed char +vec_vmrglb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vmrglb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_mfvscr */ + +inline vector unsigned short +vec_mfvscr () +{ + return (vector unsigned short) __builtin_altivec_mfvscr (); +} + +/* vec_min */ + +inline vector unsigned char +vec_min (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_min (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_min (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_min (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vminsb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_min (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_min (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_min (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_min (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vminsh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_min (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_min (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_min (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_min (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vminsw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_min (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vminfp ((vector float) a1, (vector float) a2); +} + +/* vec_vminfp */ + +inline vector float +vec_vminfp (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vminfp ((vector float) a1, (vector float) a2); +} + +/* vec_vminsw */ + +inline vector signed int +vec_vminsw (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vminsw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vminuw */ + +inline vector unsigned int +vec_vminuw (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vminuw (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vminuw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vminsh */ + +inline vector signed short +vec_vminsh (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vminsh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vminuh */ + +inline vector unsigned short +vec_vminuh (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vminuh (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vminuh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vminsb */ + +inline vector signed char +vec_vminsb (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vminsb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vminub */ + +inline vector unsigned char +vec_vminub (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vminub (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vminub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_mladd */ + +inline vector signed short +vec_mladd (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector signed short +vec_mladd (vector signed short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector signed short +vec_mladd (vector unsigned short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +inline vector unsigned short +vec_mladd (vector unsigned short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector unsigned short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_mradds */ + +inline vector signed short +vec_mradds (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vmhraddshs ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3); +} + +/* vec_msum */ + +inline vector unsigned int +vec_msum (vector unsigned char a1, vector unsigned char a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumubm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msum (vector signed char a1, vector unsigned char a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsummbm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_msum (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumuhm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msum (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_vmsumshm */ + +inline vector signed int +vec_vmsumshm (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_vmsumuhm */ + +inline vector unsigned int +vec_vmsumuhm (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumuhm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_vmsummbm */ + +inline vector signed int +vec_vmsummbm (vector signed char a1, vector unsigned char a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsummbm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +/* vec_vmsumubm */ + +inline vector unsigned int +vec_vmsumubm (vector unsigned char a1, vector unsigned char a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumubm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3); +} + +/* vec_msums */ + +inline vector unsigned int +vec_msums (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumuhs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +inline vector signed int +vec_msums (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_vmsumshs */ + +inline vector signed int +vec_vmsumshs (vector signed short a1, vector signed short a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vmsumshs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_vmsumuhs */ + +inline vector unsigned int +vec_vmsumuhs (vector unsigned short a1, vector unsigned short a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vmsumuhs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3); +} + +/* vec_mtvscr */ + +inline void +vec_mtvscr (vector signed int a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned int a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector signed short a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned short a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector signed char a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +inline void +vec_mtvscr (vector unsigned char a1) +{ + __builtin_altivec_mtvscr ((vector signed int) a1); +} + +/* vec_mule */ + +inline vector unsigned short +vec_mule (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vmuleub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mule (vector signed char a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vmulesb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned int +vec_mule (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vmuleuh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_mule (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulesh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmulesh */ + +inline vector signed int +vec_vmulesh (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulesh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmuleuh */ + +inline vector unsigned int +vec_vmuleuh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vmuleuh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmuleub */ +inline vector unsigned short +vec_vmuleub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vmuleub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_mulo */ + +inline vector unsigned short +vec_mulo (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vmuloub ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_mulo (vector signed char a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vmulosb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned int +vec_mulo (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vmulouh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_mulo (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulosh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmulosh */ + +inline vector signed int +vec_vmulosh (vector signed short a1, vector signed short a2) +{ + return (vector signed int) __builtin_altivec_vmulosh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmulouh */ + +inline vector unsigned int +vec_vmulouh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vmulouh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vmulosb */ + +inline vector signed short +vec_vmulosb (vector signed char a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vmulosb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vmuloub */ + +inline vector unsigned short +vec_vmuloub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vmuloub ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_nmsub */ + +inline vector float +vec_nmsub (vector float a1, vector float a2, vector float a3) +{ + return (vector float) __builtin_altivec_vnmsubfp ((vector float) a1, (vector float) a2, (vector float) a3); +} + +/* vec_nor */ + +inline vector float +vec_nor (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_nor (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_nor (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_nor (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_nor (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_nor (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_nor (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_or */ + +inline vector float +vec_or (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_or (vector float a1, vector signed int a2) +{ + return (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_or (vector signed int a1, vector float a2) +{ + return (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_or (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_or (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_or (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_or (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_or (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_or (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_pack */ + +inline vector signed char +vec_pack (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned char +vec_pack (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_pack (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_pack (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkuwum */ + +inline vector signed short +vec_vpkuwum (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_vpkuwum (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkuhum */ + +inline vector signed char +vec_vpkuhum (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned char +vec_vpkuhum (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_packpx */ + +inline vector signed short +vec_packpx (vector unsigned int a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vpkpx ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_packs */ + +inline vector unsigned char +vec_packs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed char +vec_packs (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkshss ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_packs (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_packs (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkswss ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkswss */ + +inline vector signed short +vec_vpkswss (vector signed int a1, vector signed int a2) +{ + return (vector signed short) __builtin_altivec_vpkswss ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkuwus */ + +inline vector unsigned short +vec_vpkuwus (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkshss */ + +inline vector signed char +vec_vpkshss (vector signed short a1, vector signed short a2) +{ + return (vector signed char) __builtin_altivec_vpkshss ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vpkuhus */ + +inline vector unsigned char +vec_vpkuhus (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_packsu */ + +inline vector unsigned char +vec_packsu (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned char +vec_packsu (vector signed short a1, vector signed short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkshus ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_packsu (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_packsu (vector signed int a1, vector signed int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkswus */ + +inline vector unsigned short +vec_vpkswus (vector signed int a1, vector signed int a2) +{ + return (vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vpkshus */ + +inline vector unsigned char +vec_vpkshus (vector signed short a1, vector signed short a2) +{ + return (vector unsigned char) __builtin_altivec_vpkshus ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_perm */ + +inline vector float +vec_perm (vector float a1, vector float a2, vector unsigned char a3) +{ + return (vector float) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed int +vec_perm (vector signed int a1, vector signed int a2, vector unsigned char a3) +{ + return (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned int +vec_perm (vector unsigned int a1, vector unsigned int a2, vector unsigned char a3) +{ + return (vector unsigned int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed short +vec_perm (vector signed short a1, vector signed short a2, vector unsigned char a3) +{ + return (vector signed short) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned short +vec_perm (vector unsigned short a1, vector unsigned short a2, vector unsigned char a3) +{ + return (vector unsigned short) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector signed char +vec_perm (vector signed char a1, vector signed char a2, vector unsigned char a3) +{ + return (vector signed char) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +inline vector unsigned char +vec_perm (vector unsigned char a1, vector unsigned char a2, vector unsigned char a3) +{ + return (vector unsigned char) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3); +} + +/* vec_re */ + +inline vector float +vec_re (vector float a1) +{ + return (vector float) __builtin_altivec_vrefp ((vector float) a1); +} + +/* vec_rl */ + +inline vector signed char +vec_rl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_rl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_rl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_rl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_rl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_rl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vrlw */ + +inline vector signed int +vec_vrlw (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vrlw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vrlh */ + +inline vector signed short +vec_vrlh (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vrlh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vrlb */ + +inline vector signed char +vec_vrlb (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vrlb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_round */ + +inline vector float +vec_round (vector float a1) +{ + return (vector float) __builtin_altivec_vrfin ((vector float) a1); +} + +/* vec_rsqrte */ + +inline vector float +vec_rsqrte (vector float a1) +{ + return (vector float) __builtin_altivec_vrsqrtefp ((vector float) a1); +} + +/* vec_sel */ + +inline vector float +vec_sel (vector float a1, vector float a2, vector signed int a3) +{ + return (vector float) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector float +vec_sel (vector float a1, vector float a2, vector unsigned int a3) +{ + return (vector float) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed int +vec_sel (vector signed int a1, vector signed int a2, vector signed int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed int +vec_sel (vector signed int a1, vector signed int a2, vector unsigned int a3) +{ + return (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_sel (vector unsigned int a1, vector unsigned int a2, vector signed int a3) +{ + return (vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned int +vec_sel (vector unsigned int a1, vector unsigned int a2, vector unsigned int a3) +{ + return (vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed short +vec_sel (vector signed short a1, vector signed short a2, vector signed short a3) +{ + return (vector signed short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed short +vec_sel (vector signed short a1, vector signed short a2, vector unsigned short a3) +{ + return (vector signed short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned short +vec_sel (vector unsigned short a1, vector unsigned short a2, vector signed short a3) +{ + return (vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned short +vec_sel (vector unsigned short a1, vector unsigned short a2, vector unsigned short a3) +{ + return (vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed char +vec_sel (vector signed char a1, vector signed char a2, vector signed char a3) +{ + return (vector signed char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector signed char +vec_sel (vector signed char a1, vector signed char a2, vector unsigned char a3) +{ + return (vector signed char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned char +vec_sel (vector unsigned char a1, vector unsigned char a2, vector signed char a3) +{ + return (vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +inline vector unsigned char +vec_sel (vector unsigned char a1, vector unsigned char a2, vector unsigned char a3) +{ + return (vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3); +} + +/* vec_sl */ + +inline vector signed char +vec_sl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vslw */ + +inline vector signed int +vec_vslw (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vslw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vslh */ + +inline vector signed short +vec_vslh (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vslh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vslb */ + +inline vector signed char +vec_vslb (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vslb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_sld */ + +inline vector float +vec_sld (vector float a1, vector float a2, const char a3) +{ + return (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector signed int +vec_sld (vector signed int a1, vector signed int a2, const char a3) +{ + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector unsigned int +vec_sld (vector unsigned int a1, vector unsigned int a2, const char a3) +{ + return (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector signed short +vec_sld (vector signed short a1, vector signed short a2, const char a3) +{ + return (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector unsigned short +vec_sld (vector unsigned short a1, vector unsigned short a2, const char a3) +{ + return (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector signed char +vec_sld (vector signed char a1, vector signed char a2, const char a3) +{ + return (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +inline vector unsigned char +vec_sld (vector unsigned char a1, vector unsigned char a2, const char a3) +{ + return (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); +} + +/* vec_sll */ + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sll (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sll (vector unsigned int a1, vector unsigned char a2) +{ + return (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sll (vector signed short a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sll (vector unsigned short a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned int a2) +{ + return (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned short a2) +{ + return (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sll (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned int a2) +{ + return (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sll (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_slo */ + +inline vector float +vec_slo (vector float a1, vector signed char a2) +{ + return (vector float) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_slo (vector float a1, vector unsigned char a2) +{ + return (vector float) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_slo (vector signed int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_slo (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_slo (vector unsigned int a1, vector signed char a2) +{ + return (vector unsigned int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_slo (vector unsigned int a1, vector unsigned char a2) +{ + return (vector unsigned int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_slo (vector signed short a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_slo (vector signed short a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_slo (vector unsigned short a1, vector signed char a2) +{ + return (vector unsigned short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_slo (vector unsigned short a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_slo (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_slo (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_slo (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_slo (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_splat */ + +inline vector signed char +vec_splat (vector signed char a1, const char a2) +{ + return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, a2); +} + +inline vector unsigned char +vec_splat (vector unsigned char a1, const char a2) +{ + return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, a2); +} + +inline vector signed short +vec_splat (vector signed short a1, const char a2) +{ + return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, a2); +} + +inline vector unsigned short +vec_splat (vector unsigned short a1, const char a2) +{ + return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, a2); +} + +inline vector float +vec_splat (vector float a1, const char a2) +{ + return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +inline vector signed int +vec_splat (vector signed int a1, const char a2) +{ + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +inline vector unsigned int +vec_splat (vector unsigned int a1, const char a2) +{ + return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +/* vec_vspltw */ + +inline vector float +vec_vspltw (vector float a1, const char a2) +{ + return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +inline vector signed int +vec_vspltw (vector signed int a1, const char a2) +{ + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +inline vector unsigned int +vec_vspltw (vector unsigned int a1, const char a2) +{ + return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, a2); +} + +/* vec_vsplth */ + +inline vector signed short +vec_vsplth (vector signed short a1, const char a2) +{ + return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, a2); +} + +inline vector unsigned short +vec_vsplth (vector unsigned short a1, const char a2) +{ + return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, a2); +} + +/* vec_vspltb */ + +inline vector signed char +vec_vspltb (vector signed char a1, const char a2) +{ + return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, a2); +} + +inline vector unsigned char +vec_vspltb (vector unsigned char a1, const char a2) +{ + return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, a2); +} + +/* vec_splat_s8 */ + +inline vector signed char +vec_splat_s8 (const char a1) +{ + return (vector signed char) __builtin_altivec_vspltisb (a1); +} + +/* vec_splat_s16 */ + +inline vector signed short +vec_splat_s16 (const char a1) +{ + return (vector signed short) __builtin_altivec_vspltish (a1); +} + +/* vec_splat_s32 */ + +inline vector signed int +vec_splat_s32 (const char a1) +{ + return (vector signed int) __builtin_altivec_vspltisw (a1); +} + +/* vec_splat_u8 */ + +inline vector unsigned char +vec_splat_u8 (const char a1) +{ + return (vector unsigned char) __builtin_altivec_vspltisb (a1); +} + +/* vec_splat_u16 */ + +inline vector unsigned short +vec_splat_u16 (const char a1) +{ + return (vector unsigned short) __builtin_altivec_vspltish (a1); +} + +/* vec_splat_u32 */ + +inline vector unsigned int +vec_splat_u32 (const char a1) +{ + return (vector unsigned int) __builtin_altivec_vspltisw (a1); +} + +/* vec_sr */ + +inline vector signed char +vec_sr (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sr (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sr (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sr (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sr (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sr (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsrw */ + +inline vector signed int +vec_vsrw (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsrw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsrh */ + +inline vector signed short +vec_vsrh (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsrh (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsrb */ + +inline vector signed char +vec_vsrb (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsrb (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_sra */ + +inline vector signed char +vec_sra (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sra (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sra (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sra (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sra (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sra (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsraw */ + +inline vector signed int +vec_vsraw (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsraw (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsrah */ + +inline vector signed short +vec_vsrah (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsrah (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsrab */ + +inline vector signed char +vec_vsrab (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsrab (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_srl */ + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned int a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned short a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_srl (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned short a2) +{ + return (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_srl (vector unsigned int a1, vector unsigned char a2) +{ + return (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned int a2) +{ + return (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned short a2) +{ + return (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_srl (vector signed short a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned int a2) +{ + return (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_srl (vector unsigned short a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned int a2) +{ + return (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned short a2) +{ + return (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_srl (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned int a2) +{ + return (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned short a2) +{ + return (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_srl (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sro */ + +inline vector float +vec_sro (vector float a1, vector signed char a2) +{ + return (vector float) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_sro (vector float a1, vector unsigned char a2) +{ + return (vector float) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sro (vector signed int a1, vector signed char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sro (vector signed int a1, vector unsigned char a2) +{ + return (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sro (vector unsigned int a1, vector signed char a2) +{ + return (vector unsigned int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sro (vector unsigned int a1, vector unsigned char a2) +{ + return (vector unsigned int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sro (vector signed short a1, vector signed char a2) +{ + return (vector signed short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_sro (vector signed short a1, vector unsigned char a2) +{ + return (vector signed short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sro (vector unsigned short a1, vector signed char a2) +{ + return (vector unsigned short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_sro (vector unsigned short a1, vector unsigned char a2) +{ + return (vector unsigned short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sro (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_sro (vector signed char a1, vector unsigned char a2) +{ + return (vector signed char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sro (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_sro (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_st */ + +inline void +vec_st (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_st (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvx ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_ste */ + +inline void +vec_ste (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_ste (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_stvewx */ + +inline void +vec_stvewx (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stvewx (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stvewx (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvewx ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_stvehx */ + +inline void +vec_stvehx (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +inline void +vec_stvehx (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvehx ((vector signed short) a1, a2, (void *) a3); +} + +/* vec_stvebx */ + +inline void +vec_stvebx (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +inline void +vec_stvebx (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvebx ((vector signed char) a1, a2, (void *) a3); +} + +/* vec_stl */ + +inline void +vec_stl (vector float a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed int a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned int a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed short a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned short a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector signed char a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +inline void +vec_stl (vector unsigned char a1, int a2, void *a3) +{ + __builtin_altivec_stvxl ((vector signed int) a1, a2, (void *) a3); +} + +/* vec_sub */ + +inline vector signed char +vec_sub (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_sub (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed short +vec_sub (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_sub (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed int +vec_sub (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_sub (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_sub (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vsubfp ((vector float) a1, (vector float) a2); +} + +/* vec_vsubfp */ + +inline vector float +vec_vsubfp (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vsubfp ((vector float) a1, (vector float) a2); +} + +/* vec_vsubuwm */ + +inline vector signed int +vec_vsubuwm (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsubuwm (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsubuwm (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsubuwm (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsubuhm */ + +inline vector signed short +vec_vsubuhm (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubuhm (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubuhm (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubuhm (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsububm */ + +inline vector signed char +vec_vsububm (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsububm (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsububm (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsububm (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_subc */ + +inline vector unsigned int +vec_subc (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubcuw ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_subs */ + +inline vector unsigned char +vec_subs (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_subs (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_subs (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector signed char +vec_subs (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsubsbs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned short +vec_subs (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_subs (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_subs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector signed short +vec_subs (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubshs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned int +vec_subs (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_subs (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_subs (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_subs (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsubsws */ + +inline vector signed int +vec_vsubsws (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsubsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsubuws */ + +inline vector unsigned int +vec_vsubuws (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsubuws (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_vsubuws (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_vsubshs */ + +inline vector signed short +vec_vsubshs (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vsubshs ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsubuhs */ + +inline vector unsigned short +vec_vsubuhs (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubuhs (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsubuhs */ + +inline vector unsigned short +vec_vsubsuhs (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubsuhs (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +inline vector unsigned short +vec_vsubsuhs (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2); +} + +/* vec_vsubsbs */ + +inline vector signed char +vec_vsubsbs (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vsubsbs ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_vsububs */ + +inline vector unsigned char +vec_vsubsubs (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsubsubs (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +inline vector unsigned char +vec_vsubsubs (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2); +} + +/* vec_sum4s */ + +inline vector unsigned int +vec_sum4s (vector unsigned char a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsum4ubs ((vector signed char) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sum4s (vector signed char a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) a1, (vector signed int) a2); +} + +inline vector signed int +vec_sum4s (vector signed short a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4shs ((vector signed short) a1, (vector signed int) a2); +} + +/* vec_vsum4shs */ + +inline vector signed int +vec_vsum4shss (vector signed short a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4shs ((vector signed short) a1, (vector signed int) a2); +} + +/* vec_vsum4sbs */ + +inline vector signed int +vec_vsum4sbs (vector signed char a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) a1, (vector signed int) a2); +} + +/* vec_vsum4ubs */ + +inline vector unsigned int +vec_vsum4ubs (vector unsigned char a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vsum4ubs ((vector signed char) a1, (vector signed int) a2); +} + +/* vec_sum2s */ + +inline vector signed int +vec_sum2s (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsum2sws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_sums */ + +inline vector signed int +vec_sums (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vsumsws ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_trunc */ + +inline vector float +vec_trunc (vector float a1) +{ + return (vector float) __builtin_altivec_vrfiz ((vector float) a1); +} + +/* vec_unpackh */ + +inline vector signed short +vec_unpackh (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupkhsb ((vector signed char) a1); +} + +inline vector signed int +vec_unpackh (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupkhsh ((vector signed short) a1); +} + +/* vec_vupkhsh */ + +inline vector signed int +vec_vupkhsh (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupkhsh ((vector signed short) a1); +} + +/* vec_vupkhpx */ + +inline vector unsigned int +vec_vupkhpx (vector signed short a1) +{ + return (vector unsigned int) __builtin_altivec_vupkhpx ((vector signed short) a1); +} + +/* vec_vupkhsb */ + +inline vector signed short +vec_vupkhsb (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupkhsb ((vector signed char) a1); +} + +/* vec_unpackl */ + +inline vector signed short +vec_unpackl (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupklsb ((vector signed char) a1); +} + +inline vector unsigned int +vec_vupklpx (vector signed short a1) +{ + return (vector unsigned int) __builtin_altivec_vupklpx ((vector signed short) a1); +} + +inline vector signed int +vec_unpackl (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupklsh ((vector signed short) a1); +} + +/* vec_upklsh */ + +inline vector signed int +vec_vupklsh (vector signed short a1) +{ + return (vector signed int) __builtin_altivec_vupklsh ((vector signed short) a1); +} + +/* vec_vupklsb */ + +inline vector signed short +vec_vupklsb (vector signed char a1) +{ + return (vector signed short) __builtin_altivec_vupklsb ((vector signed char) a1); +} + +/* vec_xor */ + +inline vector float +vec_xor (vector float a1, vector float a2) +{ + return (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_xor (vector float a1, vector signed int a2) +{ + return (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector float +vec_xor (vector signed int a1, vector float a2) +{ + return (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed int +vec_xor (vector signed int a1, vector signed int a2) +{ + return (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector signed int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector unsigned int a1, vector signed int a2) +{ + return (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned int +vec_xor (vector unsigned int a1, vector unsigned int a2) +{ + return (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed short +vec_xor (vector signed short a1, vector signed short a2) +{ + return (vector signed short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector signed short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector unsigned short a1, vector signed short a2) +{ + return (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned short +vec_xor (vector unsigned short a1, vector unsigned short a2) +{ + return (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector signed char +vec_xor (vector signed char a1, vector signed char a2) +{ + return (vector signed char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector signed char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector unsigned char a1, vector signed char a2) +{ + return (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +inline vector unsigned char +vec_xor (vector unsigned char a1, vector unsigned char a2) +{ + return (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2); +} + +/* vec_all_eq */ + +inline int +vec_all_eq (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, a1, (vector signed char) a2); +} + +inline int +vec_all_eq (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, a1, a2); +} + +inline int +vec_all_eq (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_eq (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_eq (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_eq (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_eq (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT, a1, a2); +} + +/* vec_all_ge */ + +inline int +vec_all_ge (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_ge (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_ge (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_ge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_gt */ + +inline int +vec_all_gt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_gt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_gt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_gt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT, a1, a2); +} + +/* vec_all_in */ + +inline int +vec_all_in (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpbfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_le */ + +inline int +vec_all_le (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_le (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_le (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_le (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT, a2, a1); +} + +/* vec_all_lt */ + +inline int +vec_all_lt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_all_lt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_all_lt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_all_lt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT, a2, a1); +} + +/* vec_all_nan */ + +inline int +vec_all_nan (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1); +} + +/* vec_all_ne */ + +inline int +vec_all_ne (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_all_ne (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_all_ne (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_all_ne (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_nge */ + +inline int +vec_all_nge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_ngt */ + +inline int +vec_all_ngt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a1, a2); +} + +/* vec_all_nle */ + +inline int +vec_all_nle (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ, a2, a1); +} + +/* vec_all_nlt */ + +inline int +vec_all_nlt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ, a2, a1); +} + +/* vec_all_numeric */ + +inline int +vec_all_numeric (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ, a1, a1); +} + +/* vec_any_eq */ + +inline int +vec_any_eq (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_eq (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_eq (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_eq (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_ge */ + +inline int +vec_any_ge (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_ge (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_ge (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_ge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_gt */ + +inline int +vec_any_gt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_gt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_gt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_gt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_any_le */ + +inline int +vec_any_le (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_le (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_le (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_le (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_lt */ + +inline int +vec_any_lt (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) a2, (vector signed char) a1); +} + +inline int +vec_any_lt (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) a2, (vector signed short) a1); +} + +inline int +vec_any_lt (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) a2, (vector signed int) a1); +} + +inline int +vec_any_lt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, a2, a1); +} + +/* vec_any_nan */ + +inline int +vec_any_nan (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a1); +} + +/* vec_any_ne */ + +inline int +vec_any_ne (vector signed char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector signed char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector unsigned char a1, vector signed char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector unsigned char a1, vector unsigned char a2) +{ + return __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) a1, (vector signed char) a2); +} + +inline int +vec_any_ne (vector signed short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector signed short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector unsigned short a1, vector signed short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector unsigned short a1, vector unsigned short a2) +{ + return __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) a1, (vector signed short) a2); +} + +inline int +vec_any_ne (vector signed int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector signed int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector unsigned int a1, vector signed int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector unsigned int a1, vector unsigned int a2) +{ + return __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) a1, (vector signed int) a2); +} + +inline int +vec_any_ne (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_nge */ + +inline int +vec_any_nge (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_ngt */ + +inline int +vec_any_ngt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a1, a2); +} + +/* vec_any_nle */ + +inline int +vec_any_nle (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_nlt */ + +inline int +vec_any_nlt (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, a2, a1); +} + +/* vec_any_numeric */ + +inline int +vec_any_numeric (vector float a1) +{ + return __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, a1, a1); +} + +/* vec_any_out */ + +inline int +vec_any_out (vector float a1, vector float a2) +{ + return __builtin_altivec_vcmpbfp_p (__CR6_EQ_REV, a1, a2); +} + +/* vec_step */ + +template<typename _Tp> +struct __vec_step_help +{ + // All proper vector types will specialize _S_elem. +}; + +template<> +struct __vec_step_help<vector signed short> +{ + static const int _S_elem = 8; +}; + +template<> +struct __vec_step_help<vector unsigned short> +{ + static const int _S_elem = 8; +}; + +template<> +struct __vec_step_help<vector signed int> +{ + static const int _S_elem = 4; +}; + +template<> +struct __vec_step_help<vector unsigned int> +{ + static const int _S_elem = 4; +}; + +template<> +struct __vec_step_help<vector unsigned char> +{ + static const int _S_elem = 16; +}; + +template<> +struct __vec_step_help<vector signed char> +{ + static const int _S_elem = 16; +}; + +template<> +struct __vec_step_help<vector float> +{ + static const int _S_elem = 4; +}; + +#define vec_step(t) __vec_step_help<t>::_S_elem #else /* not C++ */ -/* Hairy macros that implement the AltiVec high-level programming - interface for C. */ +/* "... and so I think no man in a century will suffer as greatly as + you will." */ + +#define vec_abs(a) \ + __ch (__un_args_eq (vector signed char, (a)), \ + ((vector signed char) __builtin_altivec_abs_v16qi ((vector signed char) (a))), \ + __ch (__un_args_eq (vector signed short, (a)), \ + ((vector signed short) __builtin_altivec_abs_v8hi ((vector signed short) (a))), \ + __ch (__un_args_eq (vector signed int, (a)), \ + ((vector signed int) __builtin_altivec_abs_v4si ((vector signed int) (a))), \ + __ch (__un_args_eq (vector float, (a)), \ + ((vector float) __builtin_altivec_abs_v4sf ((vector float) (a))), \ + __altivec_link_error_invalid_argument ())))) + +#define vec_abss(a) \ + __ch (__un_args_eq (vector signed char, (a)), \ + ((vector signed char) __builtin_altivec_abss_v16qi ((vector signed char) (a))), \ + __ch (__un_args_eq (vector signed short, (a)), \ + ((vector signed short) __builtin_altivec_abss_v8hi ((vector signed short) (a))), \ + __ch (__un_args_eq (vector signed int, (a)), \ + ((vector signed int) __builtin_altivec_abss_v4si ((vector signed int) (a))), \ + __altivec_link_error_invalid_argument ()))) + +#define vec_step(t) \ + __ch (__builtin_types_compatible_p (t, vector signed int), 4, \ + __ch (__builtin_types_compatible_p (t, vector unsigned int), 4, \ + __ch (__builtin_types_compatible_p (t, vector signed short), 8, \ + __ch (__builtin_types_compatible_p (t, vector unsigned short), 8, \ + __ch (__builtin_types_compatible_p (t, vector signed char), 16, \ + __ch (__builtin_types_compatible_p (t, vector unsigned char), 16, \ + __ch (__builtin_types_compatible_p (t, vector float), 4, \ + __altivec_link_error_invalid_argument ()))))))) + +#define vec_vaddubm(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ())))) + +#define vec_vadduhm(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ())))) + +#define vec_vadduwm(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ())))) + +#define vec_vaddfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vaddfp ((vector float) (a1), (vector float) (a2))), \ + __altivec_link_error_invalid_argument ()) #define vec_add(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vaddfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vaddfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_addc(a1, a2) __builtin_altivec_vaddcuw (a1, a2) +#define vec_addc(a1, a2) \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vaddcuw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) #define vec_adds(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vaddsbs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vaddshs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vadduws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vaddsws ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vaddsbs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vaddshs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vaddsws ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))) +#define vec_vaddsws(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vaddsws ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vadduws(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vadduws ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()))) + +#define vec_vaddshs(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vaddshs ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vadduhs(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vadduhs ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()))) + +#define vec_vaddsbs(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vaddsbs ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vaddubs(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vaddubs ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()))) + #define vec_and(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector signed int, a2), \ - (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector float, a2), \ - (vector float) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vand ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector signed int, (a2)), \ + ((vector float) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vand ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))))) #define vec_andc(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector signed int, a2), \ - (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector float, a2), \ - (vector float) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vandc ((vector signed int) a1, (vector signed int) a2), \ - __altivec_link_error_invalid_argument ()))))))))))))))) +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector unsigned int, (a2)), \ + ((vector float) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vandc ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ())))))))))))))))))))))) #define vec_avg(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vavgub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vavgsb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vavguh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vavgsh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vavguw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vavgsw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vavgub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vavgsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vavguh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vavgsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vavguw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vavgsw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))) -#define vec_ceil(a1) __builtin_altivec_vrfip (a1) +#define vec_vavgsw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vavgsw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vavguw(a1, a2) \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vavguw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) -#define vec_cmpb(a1, a2) __builtin_altivec_vcmpbfp (a1, a2) +#define vec_vavgsh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vavgsh ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vavguh(a1, a2) \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vavguh ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vavgsb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vavgsb ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vavgub(a1, a2) \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vavgub ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_ceil(a1) __builtin_altivec_vrfip ((a1)) + +#define vec_cmpb(a1, a2) __builtin_altivec_vcmpbfp ((a1), (a2)) #define vec_cmpeq(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vcmpequb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vcmpequh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpequb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpequb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpequh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpequh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpequw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpequw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpeqfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_cmpge(a1, a2) __builtin_altivec_vcmpgefp (a1, a2) +#define vec_vcmpeqfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpeqfp ((vector float) (a1), (vector float) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpequw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpequw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpequw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ())) + +#define vec_vcmpequh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpequh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpequh ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ())) + +#define vec_vcmpequb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpequb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpequb ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ())) + +#define vec_cmpge(a1, a2) (vector signed int) __builtin_altivec_vcmpgefp ((a1), (a2)) #define vec_cmpgt(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_cmple(a1, a2) __builtin_altivec_vcmpgefp (a1, a2) +#define vec_vcmpgtfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtfp ((vector float) (a1), (vector float) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtsw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtuw(a1, a2) \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) (a1), (vector signed int) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtsh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtuh(a1, a2) \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) (a1), (vector signed short) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtsb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcmpgtub(a1, a2) \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) (a1), (vector signed char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_cmple(a1, a2) __builtin_altivec_vcmpgefp ((a1), (a2)) #define vec_cmplt(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vcmpgtsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vcmpgtsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector signed int) __builtin_altivec_vcmpgtfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))) #define vec_ctf(a1, a2) \ -__ch (__bin_args_eq (vector unsigned int, a1, const char, a2), \ - (vector float) __builtin_altivec_vcfux ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed int, a1, const char, a2), \ - (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, (const char) a2), \ +__ch (__bin_args_eq (vector unsigned int, (a1), int, (a2)), \ + ((vector float) __builtin_altivec_vcfux ((vector signed int) (a1), (const char) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), int, (a2)), \ + ((vector float) __builtin_altivec_vcfsx ((vector signed int) (a1), (const char) (a2))), \ __altivec_link_error_invalid_argument ())) -#define vec_cts(a1, a2) __builtin_altivec_vctsxs (a1, a2) +#define vec_vcfsx(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), int, (a2)), \ + ((vector float) __builtin_altivec_vcfsx ((vector signed int) (a1), (const char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_vcfux(a1, a2) \ +__ch (__bin_args_eq (vector unsigned int, (a1), int, (a2)), \ + ((vector float) __builtin_altivec_vcfux ((vector signed int) (a1), (const char) (a2))), \ + __altivec_link_error_invalid_argument ()) + +#define vec_cts(a1, a2) __builtin_altivec_vctsxs ((a1), (a2)) -#define vec_ctu(a1, a2) __builtin_altivec_vctuxs (a1, a2) +#define vec_ctu(a1, a2) (vector unsigned int) __builtin_altivec_vctuxs ((a1), (a2)) -#define vec_dss(a1) __builtin_altivec_dss (a1) +#define vec_dss(a1) __builtin_altivec_dss ((a1)) #define vec_dssall() __builtin_altivec_dssall () -#define vec_dst(a1, a2, a3) __builtin_altivec_dst (a1, a2, a3) +#define vec_dst(a1, a2, a3) __builtin_altivec_dst ((a1), (a2), (a3)) -#define vec_dstst(a1, a2, a3) __builtin_altivec_dstst (a1, a2, a3) +#define vec_dstst(a1, a2, a3) __builtin_altivec_dstst ((a1), (a2), (a3)) -#define vec_dststt(a1, a2, a3) __builtin_altivec_dststt (a1, a2, a3) +#define vec_dststt(a1, a2, a3) __builtin_altivec_dststt ((a1), (a2), (a3)) -#define vec_dstt(a1, a2, a3) __builtin_altivec_dstt (a1, a2, a3) +#define vec_dstt(a1, a2, a3) __builtin_altivec_dstt ((a1), (a2), (a3)) -#define vec_expte(a1) __builtin_altivec_vexptefp (a1) +#define vec_expte(a1) __builtin_altivec_vexptefp ((a1)) #define vec_floor(a1) __builtin_altivec_vrfim (a1) #define vec_ld(a, b) \ -__ch (__un_args_eq (vector unsigned char *, b), \ - (vector unsigned char) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (unsigned char *, b), \ - (vector unsigned char) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector signed char *, b), \ - (vector signed char) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (signed char *, b), \ - (vector signed char) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector unsigned short *, b), \ - (vector unsigned short) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (unsigned short *, b), \ - (vector unsigned short) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector signed short *, b), \ - (vector signed short) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (signed short *, b), \ - (vector signed short) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector unsigned int *, b), \ - (vector unsigned int) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (unsigned int *, b), \ - (vector unsigned int) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector signed int *, b), \ - (vector signed int) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (signed int *, b), \ - (vector signed int) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (vector float *, b), \ - (vector float) __builtin_altivec_lvx (a, b), \ -__ch (__un_args_eq (float *, b), \ - (vector float) __builtin_altivec_lvx (a, b), \ -__altivec_link_error_invalid_argument ())))))))))))))) +__ch (__un_args_eq (vector unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector float *, (b)), \ + ((vector float) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (vector float [], (b)), \ + ((vector float) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (float *, (b)), \ + ((vector float) __builtin_altivec_lvx ((a), (b))), \ +__ch (__un_args_eq (float [], (b)), \ + ((vector float) __builtin_altivec_lvx ((a), (b))), \ +__altivec_link_error_invalid_argument ())))))))))))))))))))))))))))) #define vec_lde(a, b) \ -__ch (__un_args_eq (unsigned char *, b), \ - (vector unsigned char) __builtin_altivec_lvebx (a, b), \ -__ch (__un_args_eq (signed char *, b), \ - (vector signed char) __builtin_altivec_lvebx (a, b), \ -__ch (__un_args_eq (unsigned short *, b), \ - (vector unsigned short) __builtin_altivec_lvehx (a, b), \ -__ch (__un_args_eq (signed short *, b), \ - (vector signed short) __builtin_altivec_lvehx (a, b), \ -__ch (__un_args_eq (unsigned int *, b), \ - (vector unsigned int) __builtin_altivec_lvewx (a, b), \ -__ch (__un_args_eq (signed int *, b), \ - (vector signed int) __builtin_altivec_lvewx (a, b), \ -__altivec_link_error_invalid_argument ())))))) - -#define vec_ldl(a, b) \ -__ch (__un_args_eq (vector unsigned char *, b), \ - (vector unsigned char) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (unsigned char *, b), \ - (vector unsigned char) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector signed char *, b), \ - (vector signed char) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (signed char *, b), \ - (vector signed char) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector unsigned short *, b), \ - (vector unsigned short) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (unsigned short *, b), \ - (vector unsigned short) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector signed short *, b), \ - (vector signed short) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (signed short *, b), \ - (vector signed short) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector unsigned int *, b), \ - (vector unsigned int) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (unsigned int *, b), \ - (vector unsigned int) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector signed int *, b), \ - (vector signed int) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (signed int *, b), \ - (vector signed int) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (vector float *, b), \ - (vector float) __builtin_altivec_lvxl (a, b), \ -__ch (__un_args_eq (float *, b), \ - (vector float) __builtin_altivec_lvxl (a, b), \ +__ch (__un_args_eq (unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (float *, (b)), \ + ((vector float) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (float [], (b)), \ + ((vector float) __builtin_altivec_lvewx ((a), (b))), \ __altivec_link_error_invalid_argument ())))))))))))))) -#define vec_loge(a1) __builtin_altivec_vlogefp (a1) - -#define vec_lvsl(a1, a2) __builtin_altivec_lvsl (a1, a2) - -#define vec_lvsr(a1, a2) __builtin_altivec_lvsr (a1, a2) +#define vec_lvewx(a, b) \ +__ch (__un_args_eq (unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (float *, (b)), \ + ((vector float) __builtin_altivec_lvewx ((a), (b))), \ +__ch (__un_args_eq (float [], (b)), \ + ((vector float) __builtin_altivec_lvewx ((a), (b))), \ +__altivec_link_error_invalid_argument ())))))) -#define vec_madd(a1, a2, a3) __builtin_altivec_vmaddfp (a1, a2, a3) +#define vec_lvehx(a, b) \ +__ch (__un_args_eq (unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvehx ((a), (b))), \ +__ch (__un_args_eq (signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvehx ((a), (b))), \ +__altivec_link_error_invalid_argument ())))) + +#define vec_lvebx(a, b) \ +__ch (__un_args_eq (unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvebx ((a), (b))), \ +__ch (__un_args_eq (signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvebx ((a), (b))), \ +__altivec_link_error_invalid_argument ())))) -#define vec_madds(a1, a2, a3) __builtin_altivec_vmhaddshs (a1, a2, a3) +#define vec_ldl(a, b) \ +__ch (__un_args_eq (vector unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned char *, (b)), \ + ((vector unsigned char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned char [], (b)), \ + ((vector unsigned char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed char *, (b)), \ + ((vector signed char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed char [], (b)), \ + ((vector signed char) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned short *, (b)), \ + ((vector unsigned short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned short [], (b)), \ + ((vector unsigned short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed short *, (b)), \ + ((vector signed short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed short [], (b)), \ + ((vector signed short) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned int *, (b)), \ + ((vector unsigned int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (unsigned int [], (b)), \ + ((vector unsigned int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed int *, (b)), \ + ((vector signed int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (signed int [], (b)), \ + ((vector signed int) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector float *, (b)), \ + ((vector float) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (vector float [], (b)), \ + ((vector float) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (float *, (b)), \ + ((vector float) __builtin_altivec_lvxl ((a), (b))), \ +__ch (__un_args_eq (float [], (b)), \ + ((vector float) __builtin_altivec_lvxl ((a), (b))), \ +__altivec_link_error_invalid_argument ())))))))))))))))))))))))))))) + +#define vec_loge(a1) __builtin_altivec_vlogefp ((a1)) + +#define vec_lvsl(a1, a2) ((vector unsigned char) __builtin_altivec_lvsl ((a1), (a2))) + +#define vec_lvsr(a1, a2) ((vector unsigned char) __builtin_altivec_lvsr ((a1), (a2))) + +#define vec_madd(a1, a2, a3) (__builtin_altivec_vmaddfp ((a1), (a2), (a3))) + +#define vec_madds(a1, a2, a3) __builtin_altivec_vmhaddshs ((a1), (a2), (a3)) #define vec_max(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vmaxsb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vmaxsh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vmaxsw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vmaxfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmaxsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmaxsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmaxsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmaxfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))) +#define vec_vmaxfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmaxfp ((vector float) (a1), (vector float) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmaxsw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmaxsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmaxuw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmaxuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vmaxsh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmaxsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmaxuh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmaxuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vmaxsb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmaxsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmaxub(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmaxub ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()))) + #define vec_mergeh(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vmrghb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vmrghh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vmrghw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmrghb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmrghb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmrghh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmrghh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))))))) +#define vec_vmrghw(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmrghw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vmrghh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmrghh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmrghh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vmrghb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmrghb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmrghb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) + #define vec_mergel(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vmrglb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vmrglh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmrglb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmrglb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmrglh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmrglh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_mfvscr() __builtin_altivec_mfvscr () +#define vec_vmrglw(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmrglw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vmrglh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vmrglh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmrglh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vmrglb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vmrglb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vmrglb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_mfvscr() (((vector unsigned short) __builtin_altivec_mfvscr ())) #define vec_min(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vminub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vminsb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vminuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vminsh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vminuw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vminsw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vminfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vminsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vminsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vminsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vminfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))) +#define vec_vminfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vminfp ((vector float) (a1), (vector float) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vminsw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vminsw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vminuw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vminuw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vminsh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vminsh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vminuh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vminuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vminsb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vminsb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vminub(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vminub ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()))) + #define vec_mladd(a1, a2, a3) \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector signed short, a3), \ - (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector unsigned short, a2, vector unsigned short, a3), \ - (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector signed short, a2, vector signed short, a3), \ - (vector signed short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector unsigned short, a3), \ - (vector unsigned short) __builtin_altivec_vmladduhm ((vector signed short) a1, (vector signed short) a2, (vector signed short) a3), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed short, (a3)), \ + ((vector signed short) __builtin_altivec_vmladduhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed short) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector unsigned short, (a2), vector unsigned short, (a3)), \ + ((vector signed short) __builtin_altivec_vmladduhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed short) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector signed short, (a2), vector signed short, (a3)), \ + ((vector signed short) __builtin_altivec_vmladduhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed short) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned short, (a3)), \ + ((vector unsigned short) __builtin_altivec_vmladduhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed short) (a3))), \ __altivec_link_error_invalid_argument ())))) -#define vec_mradds(a1, a2, a3) __builtin_altivec_vmhraddshs (a1, a2, a3) +#define vec_mradds(a1, a2, a3) __builtin_altivec_vmhraddshs ((a1), (a2), (a3)) #define vec_msum(a1, a2, a3) \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, vector unsigned int, a3), \ - (vector unsigned int) __builtin_altivec_vmsumubm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector unsigned char, a2, vector signed int, a3), \ - (vector signed int) __builtin_altivec_vmsummbm ((vector signed char) a1, (vector signed char) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector unsigned int, a3), \ - (vector unsigned int) __builtin_altivec_vmsumuhm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector signed int, a3), \ - (vector signed int) __builtin_altivec_vmsumshm ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3), \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumubm ((vector signed char) (a1), (vector signed char) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed char, (a1), vector unsigned char, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsummbm ((vector signed char) (a1), (vector signed char) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumuhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsumshm ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ __altivec_link_error_invalid_argument ())))) +#define vec_vmsumshm(a1, a2, a3) \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsumshm ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmsumuhm(a1, a2, a3) \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumuhm ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmsummbm(a1, a2, a3) \ +__ch (__tern_args_eq (vector signed char, (a1), vector unsigned char, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsummbm ((vector signed char) (a1), (vector signed char) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_msumubm(a1, a2, a3) \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumubm ((vector signed char) (a1), (vector signed char) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + #define vec_msums(a1, a2, a3) \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector unsigned int, a3), \ - (vector unsigned int) __builtin_altivec_vmsumuhs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector signed int, a3), \ - (vector signed int) __builtin_altivec_vmsumshs ((vector signed short) a1, (vector signed short) a2, (vector signed int) a3), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumuhs ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsumshs ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ __altivec_link_error_invalid_argument ())) +#define vec_vmsumshs(a1, a2, a3) \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vmsumshs ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmsumuhs(a1, a2, a3) \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vmsumuhs ((vector signed short) (a1), (vector signed short) (a2), (vector signed int) (a3))), \ +__altivec_link_error_invalid_argument ()) + #define vec_mtvscr(a1) \ -__ch (__un_args_eq (vector signed int, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ -__ch (__un_args_eq (vector unsigned int, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ -__ch (__un_args_eq (vector signed short, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ -__ch (__un_args_eq (vector unsigned short, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ -__ch (__un_args_eq (vector signed char, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ -__ch (__un_args_eq (vector unsigned char, a1), \ - __builtin_altivec_mtvscr ((vector signed int) a1), \ +__ch (__un_args_eq (vector signed int, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ +__ch (__un_args_eq (vector unsigned int, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ +__ch (__un_args_eq (vector signed short, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ +__ch (__un_args_eq (vector unsigned short, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ +__ch (__un_args_eq (vector signed char, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ +__ch (__un_args_eq (vector unsigned char, (a1)), \ + __builtin_altivec_mtvscr ((vector signed int) (a1)), \ __altivec_link_error_invalid_argument ())))))) #define vec_mule(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vmuleub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed short) __builtin_altivec_vmulesb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned int) __builtin_altivec_vmuleuh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vmulesh ((vector signed short) a1, (vector signed short) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmuleub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vmulesb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmuleuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed int) __builtin_altivec_vmulesh ((vector signed short) (a1), (vector signed short) (a2))), \ __altivec_link_error_invalid_argument ())))) +#define vec_vmulesh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed int) __builtin_altivec_vmulesh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmuleuh(a1, a2) \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmuleuh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmulesb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vmulesb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmuleub(a1, a2) \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmuleub ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + #define vec_mulo(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vmuloub ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed short) __builtin_altivec_vmulosb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned int) __builtin_altivec_vmulouh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vmulosh ((vector signed short) a1, (vector signed short) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmuloub ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vmulosb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmulouh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed int) __builtin_altivec_vmulosh ((vector signed short) (a1), (vector signed short) (a2))), \ __altivec_link_error_invalid_argument ())))) -#define vec_nmsub(a1, a2, a3) __builtin_altivec_vnmsubfp (a1, a2, a3) +#define vec_vmulosh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed int) __builtin_altivec_vmulosh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmulouh(a1, a2) \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vmulouh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_mulosb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vmulosb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vmuloub(a1, a2) \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vmuloub ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_nmsub(a1, a2, a3) \ +__ch (__tern_args_eq (vector float, ((a1)), vector float, ((a2)) , vector float, ((a3))), \ + ((vector float) __builtin_altivec_vnmsubfp ((vector float) ((a1)), (vector float) ((a2)), (vector float)((a3)))), \ + __altivec_link_error_invalid_argument ()) #define vec_nor(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vnor ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vnor ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))))))) #define vec_or(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector signed int, a2), \ - (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector float, a2), \ - (vector float) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vor ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector signed int, (a2)), \ + ((vector float) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vor ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))))) #define vec_pack(a1, a2) \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned char) __builtin_altivec_vpkuhum ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned short) __builtin_altivec_vpkuwum ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed char) __builtin_altivec_vpkuhum ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkuhum ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed short) __builtin_altivec_vpkuwum ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkuwum ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))) -#define vec_packpx(a1, a2) __builtin_altivec_vpkpx (a1, a2) +#define vec_vpkuwum(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed short) __builtin_altivec_vpkuwum ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkuwum ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vpkuhum(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed char) __builtin_altivec_vpkuhum ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkuhum ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_packpx(a1, a2) __builtin_altivec_vpkpx ((a1), (a2)) #define vec_packs(a1, a2) \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed char) __builtin_altivec_vpkshss ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed short) __builtin_altivec_vpkswss ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed char) __builtin_altivec_vpkshss ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed short) __builtin_altivec_vpkswss ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))) +#define vec_vpkswss(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed short) __builtin_altivec_vpkswss ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vpkuwus(a1, a2) \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vpkshss(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed char) __builtin_altivec_vpkshss ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vpkuhus(a1, a2) \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + #define vec_packsu(a1, a2) \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector unsigned char) __builtin_altivec_vpkshus ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkuhus ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkshus ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkuwus ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))) -#define vec_perm(a1, a2, a3, a4) \ -__ch (__tern_args_eq (vector float, a1, vector float, a2, vector unsigned char, a3), \ - (vector float) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, vector unsigned char, a3), \ - (vector signed int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, vector unsigned char, a3), \ - (vector unsigned int) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector unsigned char, a3), \ - (vector signed short) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector unsigned char, a3), \ - (vector unsigned short) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, vector unsigned char, a3), \ - (vector signed char) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, vector unsigned char, a3), \ - (vector unsigned char) __builtin_altivec_vperm_4si ((vector signed int) a1, (vector signed int) a2, (vector signed char) a3), \ +#define vec_vpkswus(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vpkswus ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vpkshus(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vpkshus ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_perm(a1, a2, a3) \ +__ch (__tern_args_eq (vector float, (a1), vector float, (a2), vector unsigned char, (a3)), \ + ((vector float) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector signed int, (a1), vector signed int, (a2), vector unsigned char, (a3)), \ + ((vector signed int) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector unsigned int, (a1), vector unsigned int, (a2), vector unsigned char, (a3)), \ + ((vector unsigned int) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector unsigned char, (a3)), \ + ((vector signed short) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned char, (a3)), \ + ((vector unsigned short) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector signed char, (a1), vector signed char, (a2), vector unsigned char, (a3)), \ + ((vector signed char) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), vector unsigned char, (a3)), \ + ((vector unsigned char) __builtin_altivec_vperm_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed char) (a3))), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_re(a1) __builtin_altivec_vrefp (a1) +#define vec_re(a1) __builtin_altivec_vrefp ((a1)) #define vec_rl(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vrlb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vrlh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vrlw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vrlb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vrlb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vrlh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vrlh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vrlw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vrlw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))) -#define vec_round(a1) __builtin_altivec_vrfin (a1) +#define vec_vrlw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vrlw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vrlw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vrlh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vrlh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vrlh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vrlb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vrlb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vrlb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) -#define vec_rsqrte(a1) __builtin_altivec_vrsqrtefp (a1) +#define vec_round(a1) __builtin_altivec_vrfin ((a1)) + +#define vec_rsqrte(a1) __builtin_altivec_vrsqrtefp ((a1)) #define vec_sel(a1, a2, a3) \ -__ch (__tern_args_eq (vector float, a1, vector float, a2, vector signed int, a3), \ - (vector float) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector float, a1, vector float, a2, vector unsigned int, a3), \ - (vector float) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, vector signed int, a3), \ - (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, vector unsigned int, a3), \ - (vector signed int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, vector signed int, a3), \ - (vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, vector unsigned int, a3), \ - (vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector signed short, a3), \ - (vector signed short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, vector unsigned short, a3), \ - (vector signed short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector signed short, a3), \ - (vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, vector unsigned short, a3), \ - (vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, vector signed char, a3), \ - (vector signed char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, vector unsigned char, a3), \ - (vector signed char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, vector signed char, a3), \ - (vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, vector unsigned char, a3), \ - (vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) a1, (vector signed int) a2, (vector signed int) a3), \ +__ch (__tern_args_eq (vector float, (a1), vector float, (a2), vector signed int, (a3)), \ + ((vector float) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector float, (a1), vector float, (a2), vector unsigned int, (a3)), \ + ((vector float) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed int, (a1), vector signed int, (a2), vector signed int, (a3)), \ + ((vector signed int) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed int, (a1), vector signed int, (a2), vector unsigned int, (a3)), \ + ((vector signed int) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned int, (a1), vector unsigned int, (a2), vector signed int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned int, (a1), vector unsigned int, (a2), vector unsigned int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector signed short, (a3)), \ + ((vector signed short) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), vector unsigned short, (a3)), \ + ((vector signed short) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector signed short, (a3)), \ + ((vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), vector unsigned short, (a3)), \ + ((vector unsigned short) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed char, (a1), vector signed char, (a2), vector signed char, (a3)), \ + ((vector signed char) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector signed char, (a1), vector signed char, (a2), vector unsigned char, (a3)), \ + ((vector signed char) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), vector signed char, (a3)), \ + ((vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), vector unsigned char, (a3)), \ + ((vector unsigned char) __builtin_altivec_vsel_4si ((vector signed int) (a1), (vector signed int) (a2), (vector signed int) (a3))), \ __altivec_link_error_invalid_argument ())))))))))))))) #define vec_sl(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vslb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vslh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vslw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vslb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vslb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vslh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vslh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vslw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vslw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))) +#define vec_vslw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vslw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vslw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vslh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vslh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vslh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vslb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vslb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vslb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) + #define vec_sld(a1, a2, a3) \ -__ch (__tern_args_eq (vector float, a1, vector float, a2, const char, a3), \ - (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed int, a1, vector signed int, a2, const char, a3), \ - (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned int, a1, vector unsigned int, a2, const char, a3), \ - (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed short, a1, vector signed short, a2, const char, a3), \ - (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned short, a1, vector unsigned short, a2, const char, a3), \ - (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector signed char, a1, vector signed char, a2, const char, a3), \ - (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ -__ch (__tern_args_eq (vector unsigned char, a1, vector unsigned char, a2, const char, a3), \ - (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3), \ +__ch (__tern_args_eq (vector float, (a1), vector float, (a2), int, (a3)), \ + ((vector float) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector signed int, (a1), vector signed int, (a2), int, (a3)), \ + ((vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector unsigned int, (a1), vector unsigned int, (a2), int, (a3)), \ + ((vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector signed short, (a1), vector signed short, (a2), int, (a3)), \ + ((vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector unsigned short, (a1), vector unsigned short, (a2), int, (a3)), \ + ((vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector signed char, (a1), vector signed char, (a2), int, (a3)), \ + ((vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ +__ch (__tern_args_eq (vector unsigned char, (a1), vector unsigned char, (a2), int, (a3)), \ + ((vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) (a1), (vector signed int) (a2), (const char) (a3))), \ __altivec_link_error_invalid_argument ()))))))) #define vec_sll(a1, a2) \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned short, a2), \ - (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned char, a2), \ - (vector unsigned int) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned int, a2), \ - (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned char, a2), \ - (vector signed short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned int, a2), \ - (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned int, a2), \ - (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned short, a2), \ - (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned int, a2), \ - (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned short, a2), \ - (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsl ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned short, (a2)), \ + ((vector signed int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned char, (a2)), \ + ((vector signed int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned int, (a2)), \ + ((vector signed short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned char, (a2)), \ + ((vector signed short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned int, (a2)), \ + ((vector signed char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned short, (a2)), \ + ((vector signed char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned int, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsl ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))))))))) #define vec_slo(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector signed char, a2), \ - (vector float) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector unsigned char, a2), \ - (vector float) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed char, a2), \ - (vector unsigned int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned char, a2), \ - (vector unsigned int) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed char, a2), \ - (vector signed short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned char, a2), \ - (vector signed short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed char, a2), \ - (vector unsigned short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vslo ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector float, (a1), vector signed char, (a2)), \ + ((vector float) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector unsigned char, (a2)), \ + ((vector float) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed char, (a2)), \ + ((vector signed int) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned char, (a2)), \ + ((vector signed int) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned char, (a2)), \ + ((vector signed short) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vslo ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))))) #define vec_splat(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, const char, a2), \ - (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, const char, a2), \ - (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, const char, a2), \ - (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, const char, a2), \ - (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2), \ -__ch (__bin_args_eq (vector float, a1, const char, a2), \ - (vector float) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector signed int, a1, const char, a2), \ - (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, const char, a2), \ - (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2), \ +__ch (__bin_args_eq (vector signed char, ((a1)), int, ((a2))), \ + ((vector signed char) __builtin_altivec_vspltb ((vector signed char) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned char, ((a1)), int, ((a2))), \ + ((vector unsigned char) __builtin_altivec_vspltb ((vector signed char) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector signed short, ((a1)), int, ((a2))), \ + ((vector signed short) __builtin_altivec_vsplth ((vector signed short) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), int, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vsplth ((vector signed short) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector float, ((a1)), int, ((a2))), \ + ((vector float) __builtin_altivec_vspltw ((vector signed int) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector signed int, ((a1)), int, ((a2))), \ + ((vector signed int) __builtin_altivec_vspltw ((vector signed int) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vspltw ((vector signed int) (a1), (const char) ((a2)))), \ __altivec_link_error_invalid_argument ()))))))) -#define vec_splat_s8(a1) __builtin_altivec_vspltisb (a1) +#define vec_vspltw(a1, a2) \ +__ch (__bin_args_eq (vector float, ((a1)), int, ((a2))), \ + ((vector float) __builtin_altivec_vspltw ((vector signed int) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector signed int, ((a1)), int, ((a2))), \ + ((vector signed int) __builtin_altivec_vspltw ((vector signed int) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vspltw ((vector signed int) (a1), (const char) ((a2)))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vsplth(a1, a2) \ +__ch (__bin_args_eq (vector signed short, ((a1)), int, ((a2))), \ + ((vector signed short) __builtin_altivec_vsplth ((vector signed short) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), int, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vsplth ((vector signed short) ((a1)), (const char) ((a2)))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vspltb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, ((a1)), int, ((a2))), \ + ((vector signed char) __builtin_altivec_vspltb ((vector signed char) ((a1)), (const char) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned char, ((a1)), int, ((a2))), \ + ((vector unsigned char) __builtin_altivec_vspltb ((vector signed char) ((a1)), (const char) ((a2)))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_splat_s8(a1) __builtin_altivec_vspltisb ((a1)) -#define vec_splat_s16(a1) __builtin_altivec_vspltish (a1) +#define vec_splat_s16(a1) __builtin_altivec_vspltish ((a1)) -#define vec_splat_s32(a1) __builtin_altivec_vspltisw (a1) +#define vec_splat_s32(a1) __builtin_altivec_vspltisw ((a1)) -#define vec_splat_u8(a1) __builtin_altivec_vspltisb (a1) +#define vec_splat_u8(a1) ((vector unsigned char) __builtin_altivec_vspltisb ((a1))) -#define vec_splat_u16(a1) __builtin_altivec_vspltish (a1) +#define vec_splat_u16(a1) ((vector unsigned short) __builtin_altivec_vspltish ((a1))) -#define vec_splat_u32(a1) __builtin_altivec_vspltisw (a1) +#define vec_splat_u32(a1) ((vector unsigned int) __builtin_altivec_vspltisw ((a1))) #define vec_sr(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsrb ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsrh ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsrw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsrb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsrb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsrh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsrh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsrw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsrw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))) +#define vec_vsrw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsrw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsrw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vsrh(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsrh ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsrh ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vsrb(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsrb ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsrb ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) + #define vec_sra(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsrab ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsrah ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsraw ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsrab ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsrab ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsrah ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsrah ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsraw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsraw ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))) +#define vec_vsraw(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsraw ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsraw ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vsrah(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsrah ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsrah ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())) + +#define vec_vsrab(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsrab ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsrab ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())) + #define vec_srl(a1, a2) \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned short, a2), \ - (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned char, a2), \ - (vector unsigned int) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned int, a2), \ - (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned char, a2), \ - (vector signed short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned int, a2), \ - (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned int, a2), \ - (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned short, a2), \ - (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned int, a2), \ - (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned short, a2), \ - (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsr ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector signed int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned short, (a2)), \ + ((vector signed int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned char, (a2)), \ + ((vector signed int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned short, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned int, (a2)), \ + ((vector signed short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector signed short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned char, (a2)), \ + ((vector signed short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned int, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned int, (a2)), \ + ((vector signed char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned short, (a2)), \ + ((vector signed char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned int, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned short, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsr ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))))))))) #define vec_sro(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector signed char, a2), \ - (vector float) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector unsigned char, a2), \ - (vector float) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed char, a2), \ - (vector unsigned int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned char, a2), \ - (vector unsigned int) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed char, a2), \ - (vector signed short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned char, a2), \ - (vector signed short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed char, a2), \ - (vector unsigned short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned char, a2), \ - (vector unsigned short) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsro ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector float, (a1), vector signed char, (a2)), \ + ((vector float) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector unsigned char, (a2)), \ + ((vector float) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed char, (a2)), \ + ((vector signed int) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned char, (a2)), \ + ((vector signed int) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned char, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed char, (a2)), \ + ((vector signed short) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned char, (a2)), \ + ((vector signed short) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned char, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector signed char) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsro ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))))) #define vec_st(a1, a2, a3) \ - __builtin_altivec_stvx ((vector signed int) a1, a2, a3) + __builtin_altivec_stvx ((vector signed int) (a1), (a2), (a3)) #define vec_stl(a1, a2, a3) \ - __builtin_altivec_stvxl ((vector signed int) a1, a2, a3) + __builtin_altivec_stvxl ((vector signed int) (a1), (a2), (a3)) #define vec_ste(a, b, c) \ -__ch (__un_args_eq (vector unsigned char, a), \ - __builtin_altivec_stvebx ((vector signed char) a, b, c), \ -__ch (__un_args_eq (vector signed char, a), \ - __builtin_altivec_stvebx ((vector signed char) a, b, c), \ -__ch (__un_args_eq (vector unsigned short, a), \ - __builtin_altivec_stvehx ((vector signed short) a, b, c), \ -__ch (__un_args_eq (vector signed short, a), \ - __builtin_altivec_stvehx ((vector signed short) a, b, c), \ -__ch (__un_args_eq (vector unsigned int, a), \ - __builtin_altivec_stvewx ((vector signed int) a, b, c), \ -__ch (__un_args_eq (vector signed int, a), \ - __builtin_altivec_stvewx ((vector signed int) a, b, c), \ -__ch (__un_args_eq (vector float, a), \ - __builtin_altivec_stvewx ((vector signed int) a, b, c), \ +__ch (__un_args_eq (vector unsigned char, (a)), \ + __builtin_altivec_stvebx ((vector signed char) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed char, (a)), \ + __builtin_altivec_stvebx ((vector signed char) (a), (b), (c)), \ +__ch (__un_args_eq (vector unsigned short, (a)), \ + __builtin_altivec_stvehx ((vector signed short) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed short, (a)), \ + __builtin_altivec_stvehx ((vector signed short) (a), (b), (c)), \ +__ch (__un_args_eq (vector unsigned int, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed int, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ +__ch (__un_args_eq (vector float, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ __altivec_link_error_invalid_argument ()))))))) +#define vec_stvewx(a, b, c) \ +__ch (__un_args_eq (vector unsigned int, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed int, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ +__ch (__un_args_eq (vector float, (a)), \ + __builtin_altivec_stvewx ((vector signed int) (a), (b), (c)), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_stvehx(a, b, c) \ +__ch (__un_args_eq (vector unsigned short, (a)), \ + __builtin_altivec_stvehx ((vector signed short) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed short, (a)), \ + __builtin_altivec_stvehx ((vector signed short) (a), (b), (c)), \ +__altivec_link_error_invalid_argument ())) + +#define vec_stvebx(a, b, c) \ +__ch (__un_args_eq (vector unsigned char, (a)), \ + __builtin_altivec_stvebx ((vector signed char) (a), (b), (c)), \ +__ch (__un_args_eq (vector signed char, (a)), \ + __builtin_altivec_stvebx ((vector signed char) (a), (b), (c)), \ +__altivec_link_error_invalid_argument ())) + #define vec_sub(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsububm ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vsubfp ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vsubfp ((vector float) (a1), (vector float) (a2))), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_subc(a1, a2) __builtin_altivec_vsubcuw (a1, a2) +#define vec_vsubfp(a1, a2) \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + ((vector float) __builtin_altivec_vsubfp ((vector float) (a1), (vector float) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vsubuwm(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuwm ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ())))) + +#define vec_vsubuhm(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhm ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ())))) + +#define vec_vsububm(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububm ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ())))) + +#define vec_subc(a1, a2) ((vector unsigned int) __builtin_altivec_vsubcuw ((vector unsigned int) (a1), (vector unsigned int) (a2))) #define vec_subs(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vsububs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vsubsbs ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vsubshs ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vsubsws ((vector signed int) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vsubsbs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vsubshs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsubsws ((vector signed int) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ())))))))))))) +#define vec_vsubsws(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsubsws ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vsubuws(a1, a2) \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsubuws ((vector signed int) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vsubshs(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + ((vector signed short) __builtin_altivec_vsubshs ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vsubuhs(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + ((vector unsigned short) __builtin_altivec_vsubuhs ((vector signed short) (a1), (vector signed short) (a2))), \ +__altivec_link_error_invalid_argument ()))) + +#define vec_vsubsbs(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + ((vector signed char) __builtin_altivec_vsubsbs ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vsububs(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + ((vector unsigned char) __builtin_altivec_vsububs ((vector signed char) (a1), (vector signed char) (a2))), \ +__altivec_link_error_invalid_argument ()))) + #define vec_sum4s(a1, a2) \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vsum4ubs ((vector signed char) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vsum4shs ((vector signed short) a1, (vector signed int) a2), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsum4ubs ((vector signed char) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) (a1), (vector signed int) (a2))), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsum4shs ((vector signed short) (a1), (vector signed int) (a2))), \ __altivec_link_error_invalid_argument ()))) -#define vec_sum2s(a1, a2) __builtin_altivec_vsum2sws (a1, a2) +#define vec_vsum4shs(a1, a2) \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsum4shs ((vector signed short) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vsum4sbs(a1, a2) \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed int, (a2)), \ + ((vector signed int) __builtin_altivec_vsum4sbs ((vector signed char) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) -#define vec_sums(a1, a2) __builtin_altivec_vsumsws (a1, a2) +#define vec_vsum4ubs(a1, a2) \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned int, (a2)), \ + ((vector unsigned int) __builtin_altivec_vsum4ubs ((vector signed char) (a1), (vector signed int) (a2))), \ +__altivec_link_error_invalid_argument ()) -#define vec_trunc(a1) __builtin_altivec_vrfiz (a1) +#define vec_sum2s(a1, a2) __builtin_altivec_vsum2sws ((a1), (a2)) + +#define vec_sums(a1, a2) __builtin_altivec_vsumsws ((a1), (a2)) + +#define vec_trunc(a1) __builtin_altivec_vrfiz ((a1)) #define vec_unpackh(a1) \ -__ch (__un_args_eq (vector signed char, a1), \ - (vector signed short) __builtin_altivec_vupkhsb ((vector signed char) a1), \ -__ch (__un_args_eq (vector signed short, a1), \ - (vector unsigned int) __builtin_altivec_vupkhpx ((vector signed short) a1), \ -__ch (__un_args_eq (vector signed short, a1), \ - (vector signed int) __builtin_altivec_vupkhsh ((vector signed short) a1), \ +__ch (__un_args_eq (vector signed char, (a1)), \ + ((vector signed short) __builtin_altivec_vupkhsb ((vector signed char) (a1))), \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector unsigned int) __builtin_altivec_vupkhpx ((vector signed short) (a1))), \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector signed int) __builtin_altivec_vupkhsh ((vector signed short) (a1))), \ __altivec_link_error_invalid_argument ()))) +#define vec_vupkhsh(a1) \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector signed int) __builtin_altivec_vupkhsh ((vector signed short) (a1))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vupkhpx(a1) \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector unsigned int) __builtin_altivec_vupkhpx ((vector signed short) (a1))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vupkhsb(a1) \ +__ch (__un_args_eq (vector signed char, (a1)), \ + ((vector signed short) __builtin_altivec_vupkhsb ((vector signed char) (a1))), \ +__altivec_link_error_invalid_argument ()) + #define vec_unpackl(a1) \ -__ch (__un_args_eq (vector signed char, a1), \ - (vector signed short) __builtin_altivec_vupklsb ((vector signed char) a1), \ -__ch (__un_args_eq (vector signed short, a1), \ - (vector unsigned int) __builtin_altivec_vupklpx ((vector signed short) a1), \ -__ch (__un_args_eq (vector signed short, a1), \ - (vector signed int) __builtin_altivec_vupklsh ((vector signed short) a1), \ +__ch (__un_args_eq (vector signed char, (a1)), \ + ((vector signed short) __builtin_altivec_vupklsb ((vector signed char) (a1))), \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector unsigned int) __builtin_altivec_vupklpx ((vector signed short) (a1))), \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector signed int) __builtin_altivec_vupklsh ((vector signed short) (a1))), \ __altivec_link_error_invalid_argument ()))) +#define vec_vupklsh(a1) \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector signed int) __builtin_altivec_vupklsh ((vector signed short) (a1))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vupklpx(a1) \ +__ch (__un_args_eq (vector signed short, (a1)), \ + ((vector unsigned int) __builtin_altivec_vupklpx ((vector signed short) (a1))), \ +__altivec_link_error_invalid_argument ()) + +#define vec_vupklsb(a1) \ +__ch (__un_args_eq (vector signed char, (a1)), \ + ((vector signed short) __builtin_altivec_vupklsb ((vector signed char) (a1))), \ +__altivec_link_error_invalid_argument ()) + #define vec_xor(a1, a2) \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector signed int, a2), \ - (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector float, a2), \ - (vector float) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector unsigned int) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector unsigned short) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector unsigned char) __builtin_altivec_vxor ((vector signed int) a1, (vector signed int) a2), \ - __altivec_link_error_invalid_argument ()))))))))))))))) +__ch (__bin_args_eq (vector float, ((a1)), vector float, ((a2))), \ + ((vector float) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector float, ((a1)), vector unsigned int, ((a2))), \ + ((vector float) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), vector float, ((a2))), \ + ((vector float) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed int, ((a1)), vector signed int, ((a2))), \ + ((vector signed int) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), vector unsigned int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed int, ((a1)), vector unsigned int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), vector signed int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned int, ((a1)), vector unsigned int, ((a2))), \ + ((vector unsigned int) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), vector unsigned short, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed short, ((a1)), vector unsigned short, ((a2))), \ + ((vector signed short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), vector signed short, ((a2))), \ + ((vector signed short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), vector unsigned short, ((a2))), \ + ((vector signed short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed short, ((a1)), vector signed short, ((a2))), \ + ((vector signed short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed short, ((a1)), vector unsigned short, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), vector signed short, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned short, ((a1)), vector unsigned short, ((a2))), \ + ((vector unsigned short) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned char, ((a1)), vector unsigned char, ((a2))), \ + ((vector unsigned char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed char, ((a1)), vector unsigned char, ((a2))), \ + ((vector unsigned char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed char, ((a1)), vector signed char, ((a2))), \ + ((vector signed char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned char, ((a1)), vector unsigned char, ((a2))), \ + ((vector signed char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector signed char, ((a1)), vector unsigned char, ((a2))), \ + ((vector signed char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ +__ch (__bin_args_eq (vector unsigned char, ((a1)), vector signed char, ((a2))), \ + ((vector signed char) __builtin_altivec_vxor ((vector signed int) ((a1)), (vector signed int) ((a2)))), \ + __altivec_link_error_invalid_argument ())))))))))))))))))))))) + +/* Predicates. */ #define vec_all_eq(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpeqfp_p (__CR6_LT, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_ge(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_gt(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgtfp_p (__CR6_LT, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_in(a1, a2) __builtin_altivec_vcmpbfp_p (a1, a2) +#define vec_all_in(a1, a2) __builtin_altivec_vcmpbfp_p (__CR6_EQ, (a1), (a2)) #define vec_all_le(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgefp_p (__CR6_LT, (vector float) (a2), (vector float) (a1)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_all_lt(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgtfp_p (__CR6_LT, (vector float) (a2), (vector float) (a1)), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_nan(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_all_nan(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ, (a1), (a1)) #define vec_all_ne(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpeqfp_p (__CR6_EQ, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_all_nge(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_all_nge(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_EQ, (a1), (a2)) -#define vec_all_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_all_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_EQ, (a1), (a2)) -#define vec_all_nle(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_all_nle(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_EQ, (a2), (a1)) -#define vec_all_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_all_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_EQ, (a2), (a1)) -#define vec_all_numeric(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_all_numeric(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ, (a1), (a1)) #define vec_any_eq(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_ge(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_gt(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_le(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgefp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgefp_p (__CR6_EQ_REV, (vector float) (a2), (vector float) (a1)), \ __altivec_link_error_invalid_argument ()))))))))))))) #define vec_any_lt(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtub_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtuw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpgtsw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpgtfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpgtub_p (__CR6_EQ_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpgtsb_p (__CR6_EQ_REV, (vector signed char) (a2), (vector signed char) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpgtuh_p (__CR6_EQ_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpgtsh_p (__CR6_EQ_REV, (vector signed short) (a2), (vector signed short) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpgtuw_p (__CR6_EQ_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpgtsw_p (__CR6_EQ_REV, (vector signed int) (a2), (vector signed int) (a1)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpgtfp_p (__CR6_EQ_REV, (vector float) (a2), (vector float) (a1)), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_any_nan(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_any_nan(a1) __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, a1, a1) #define vec_any_ne(a1, a2) \ -__ch (__bin_args_eq (vector signed char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector signed char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector unsigned char, a1, vector unsigned char, a2), \ - (vector signed int) __builtin_altivec_vcmpequb_p ((vector signed char) a1, (vector signed char) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector signed short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector unsigned short, a1, vector unsigned short, a2), \ - (vector signed int) __builtin_altivec_vcmpequh_p ((vector signed short) a1, (vector signed short) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector signed int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector signed int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector unsigned int, a1, vector unsigned int, a2), \ - (vector signed int) __builtin_altivec_vcmpequw_p ((vector signed int) a1, (vector signed int) a2), \ -__ch (__bin_args_eq (vector float, a1, vector float, a2), \ - (vector signed int) __builtin_altivec_vcmpeqfp_p ((vector float) a1, (vector float) a2), \ +__ch (__bin_args_eq (vector signed char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector signed char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector unsigned char, (a1), vector unsigned char, (a2)), \ + __builtin_altivec_vcmpequb_p (__CR6_LT_REV, (vector signed char) (a1), (vector signed char) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector signed short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector unsigned short, (a1), vector unsigned short, (a2)), \ + __builtin_altivec_vcmpequh_p (__CR6_LT_REV, (vector signed short) (a1), (vector signed short) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector signed int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector signed int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector unsigned int, (a1), vector unsigned int, (a2)), \ + __builtin_altivec_vcmpequw_p (__CR6_LT_REV, (vector signed int) (a1), (vector signed int) (a2)), \ +__ch (__bin_args_eq (vector float, (a1), vector float, (a2)), \ + __builtin_altivec_vcmpeqfp_p (__CR6_LT_REV, (vector float) (a1), (vector float) (a2)), \ __altivec_link_error_invalid_argument ()))))))))))))) -#define vec_any_nge(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_any_nge(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, (a1), (a2)) -#define vec_any_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_any_ngt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, (a1), (a2)) -#define vec_any_nle(a1, a2) __builtin_altivec_vcmpgefp_p (a1, a2) +#define vec_any_nle(a1, a2) __builtin_altivec_vcmpgefp_p (__CR6_LT_REV, (a2), (a1)) -#define vec_any_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (a1, a2) +#define vec_any_nlt(a1, a2) __builtin_altivec_vcmpgtfp_p (__CR6_LT_REV, (a2), (a1)) -#define vec_any_numeric(a1) __builtin_altivec_vcmpeqfp_p (a1) +#define vec_any_numeric(a1) __builtin_altivec_vcmpeqfp_p (__CR6_EQ_REV, (a1), (a1)) -#define vec_any_out(a1, a2) __builtin_altivec_vcmpbfp_p (a1, a2) +#define vec_any_out(a1, a2) __builtin_altivec_vcmpbfp_p (__CR6_EQ_REV, (a1), (a2)) #endif /* __cplusplus */ diff --git a/contrib/gcc/config/rs6000/crtsavres.asm b/contrib/gcc/config/rs6000/crtsavres.asm new file mode 100644 index 0000000..0c65182 --- /dev/null +++ b/contrib/gcc/config/rs6000/crtsavres.asm @@ -0,0 +1,407 @@ +/* + * Special support for eabi and SVR4 + * + * Copyright (C) 1995, 1996, 1998, 2000, 2001 Free Software Foundation, Inc. + * Written By Michael Meissner + * 64-bit support written by David Edelsohn + * + * This file is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the + * Free Software Foundation; either version 2, or (at your option) any + * later version. + * + * In addition to the permissions in the GNU General Public License, the + * Free Software Foundation gives you unlimited permission to link the + * compiled version of this file with other programs, and to distribute + * those programs without any restriction coming from the use of this + * file. (The General Public License restrictions do apply in other + * respects; for example, they cover modification of the file, and + * distribution when not linked into another program.) + * + * This file is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; see the file COPYING. If not, write to + * the Free Software Foundation, 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + * + * As a special exception, if you link this library with files + * compiled with GCC to produce an executable, this does not cause + * the resulting executable to be covered by the GNU General Public License. + * This exception does not however invalidate any other reasons why + * the executable file might be covered by the GNU General Public License. + */ + +/* Do any initializations needed for the eabi environment */ + + .file "crtsavres.asm" + .section ".text" + #include "ppc-asm.h" + +#ifndef __powerpc64__ + +/* Routines for saving floating point registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the floating point save area. */ + +FUNC_START(_savefpr_14) stfd 14,-144(11) /* save fp registers */ +FUNC_START(_savefpr_15) stfd 15,-136(11) +FUNC_START(_savefpr_16) stfd 16,-128(11) +FUNC_START(_savefpr_17) stfd 17,-120(11) +FUNC_START(_savefpr_18) stfd 18,-112(11) +FUNC_START(_savefpr_19) stfd 19,-104(11) +FUNC_START(_savefpr_20) stfd 20,-96(11) +FUNC_START(_savefpr_21) stfd 21,-88(11) +FUNC_START(_savefpr_22) stfd 22,-80(11) +FUNC_START(_savefpr_23) stfd 23,-72(11) +FUNC_START(_savefpr_24) stfd 24,-64(11) +FUNC_START(_savefpr_25) stfd 25,-56(11) +FUNC_START(_savefpr_26) stfd 26,-48(11) +FUNC_START(_savefpr_27) stfd 27,-40(11) +FUNC_START(_savefpr_28) stfd 28,-32(11) +FUNC_START(_savefpr_29) stfd 29,-24(11) +FUNC_START(_savefpr_30) stfd 30,-16(11) +FUNC_START(_savefpr_31) stfd 31,-8(11) + blr +FUNC_END(_savefpr_31) +FUNC_END(_savefpr_30) +FUNC_END(_savefpr_29) +FUNC_END(_savefpr_28) +FUNC_END(_savefpr_27) +FUNC_END(_savefpr_26) +FUNC_END(_savefpr_25) +FUNC_END(_savefpr_24) +FUNC_END(_savefpr_23) +FUNC_END(_savefpr_22) +FUNC_END(_savefpr_21) +FUNC_END(_savefpr_20) +FUNC_END(_savefpr_19) +FUNC_END(_savefpr_18) +FUNC_END(_savefpr_17) +FUNC_END(_savefpr_16) +FUNC_END(_savefpr_15) +FUNC_END(_savefpr_14) + +/* Routines for saving integer registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the integer save area. */ + +FUNC_START(_savegpr_14) stw 14,-72(11) /* save gp registers */ +FUNC_START(_savegpr_15) stw 15,-68(11) +FUNC_START(_savegpr_16) stw 16,-64(11) +FUNC_START(_savegpr_17) stw 17,-60(11) +FUNC_START(_savegpr_18) stw 18,-56(11) +FUNC_START(_savegpr_19) stw 19,-52(11) +FUNC_START(_savegpr_20) stw 20,-48(11) +FUNC_START(_savegpr_21) stw 21,-44(11) +FUNC_START(_savegpr_22) stw 22,-40(11) +FUNC_START(_savegpr_23) stw 23,-36(11) +FUNC_START(_savegpr_24) stw 24,-32(11) +FUNC_START(_savegpr_25) stw 25,-28(11) +FUNC_START(_savegpr_26) stw 26,-24(11) +FUNC_START(_savegpr_27) stw 27,-20(11) +FUNC_START(_savegpr_28) stw 28,-16(11) +FUNC_START(_savegpr_29) stw 29,-12(11) +FUNC_START(_savegpr_30) stw 30,-8(11) +FUNC_START(_savegpr_31) stw 31,-4(11) + blr +FUNC_END(_savegpr_31) +FUNC_END(_savegpr_30) +FUNC_END(_savegpr_29) +FUNC_END(_savegpr_28) +FUNC_END(_savegpr_27) +FUNC_END(_savegpr_26) +FUNC_END(_savegpr_25) +FUNC_END(_savegpr_24) +FUNC_END(_savegpr_23) +FUNC_END(_savegpr_22) +FUNC_END(_savegpr_21) +FUNC_END(_savegpr_20) +FUNC_END(_savegpr_19) +FUNC_END(_savegpr_18) +FUNC_END(_savegpr_17) +FUNC_END(_savegpr_16) +FUNC_END(_savegpr_15) +FUNC_END(_savegpr_14) + +/* Routines for restoring floating point registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the floating point save area. */ + +FUNC_START(_restfpr_14) lfd 14,-144(11) /* restore fp registers */ +FUNC_START(_restfpr_15) lfd 15,-136(11) +FUNC_START(_restfpr_16) lfd 16,-128(11) +FUNC_START(_restfpr_17) lfd 17,-120(11) +FUNC_START(_restfpr_18) lfd 18,-112(11) +FUNC_START(_restfpr_19) lfd 19,-104(11) +FUNC_START(_restfpr_20) lfd 20,-96(11) +FUNC_START(_restfpr_21) lfd 21,-88(11) +FUNC_START(_restfpr_22) lfd 22,-80(11) +FUNC_START(_restfpr_23) lfd 23,-72(11) +FUNC_START(_restfpr_24) lfd 24,-64(11) +FUNC_START(_restfpr_25) lfd 25,-56(11) +FUNC_START(_restfpr_26) lfd 26,-48(11) +FUNC_START(_restfpr_27) lfd 27,-40(11) +FUNC_START(_restfpr_28) lfd 28,-32(11) +FUNC_START(_restfpr_29) lfd 29,-24(11) +FUNC_START(_restfpr_30) lfd 30,-16(11) +FUNC_START(_restfpr_31) lfd 31,-8(11) + blr +FUNC_END(_restfpr_31) +FUNC_END(_restfpr_30) +FUNC_END(_restfpr_29) +FUNC_END(_restfpr_28) +FUNC_END(_restfpr_27) +FUNC_END(_restfpr_26) +FUNC_END(_restfpr_25) +FUNC_END(_restfpr_24) +FUNC_END(_restfpr_23) +FUNC_END(_restfpr_22) +FUNC_END(_restfpr_21) +FUNC_END(_restfpr_20) +FUNC_END(_restfpr_19) +FUNC_END(_restfpr_18) +FUNC_END(_restfpr_17) +FUNC_END(_restfpr_16) +FUNC_END(_restfpr_15) +FUNC_END(_restfpr_14) + +/* Routines for restoring integer registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the integer restore area. */ + +FUNC_START(_restgpr_14) lwz 14,-72(11) /* restore gp registers */ +FUNC_START(_restgpr_15) lwz 15,-68(11) +FUNC_START(_restgpr_16) lwz 16,-64(11) +FUNC_START(_restgpr_17) lwz 17,-60(11) +FUNC_START(_restgpr_18) lwz 18,-56(11) +FUNC_START(_restgpr_19) lwz 19,-52(11) +FUNC_START(_restgpr_20) lwz 20,-48(11) +FUNC_START(_restgpr_21) lwz 21,-44(11) +FUNC_START(_restgpr_22) lwz 22,-40(11) +FUNC_START(_restgpr_23) lwz 23,-36(11) +FUNC_START(_restgpr_24) lwz 24,-32(11) +FUNC_START(_restgpr_25) lwz 25,-28(11) +FUNC_START(_restgpr_26) lwz 26,-24(11) +FUNC_START(_restgpr_27) lwz 27,-20(11) +FUNC_START(_restgpr_28) lwz 28,-16(11) +FUNC_START(_restgpr_29) lwz 29,-12(11) +FUNC_START(_restgpr_30) lwz 30,-8(11) +FUNC_START(_restgpr_31) lwz 31,-4(11) + blr +FUNC_END(_restgpr_31) +FUNC_END(_restgpr_30) +FUNC_END(_restgpr_29) +FUNC_END(_restgpr_28) +FUNC_END(_restgpr_27) +FUNC_END(_restgpr_26) +FUNC_END(_restgpr_25) +FUNC_END(_restgpr_24) +FUNC_END(_restgpr_23) +FUNC_END(_restgpr_22) +FUNC_END(_restgpr_21) +FUNC_END(_restgpr_20) +FUNC_END(_restgpr_19) +FUNC_END(_restgpr_18) +FUNC_END(_restgpr_17) +FUNC_END(_restgpr_16) +FUNC_END(_restgpr_15) +FUNC_END(_restgpr_14) + +/* Routines for restoring floating point registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the floating point save area. */ +/* In addition to restoring the fp registers, it will return to the caller's */ +/* caller */ + +FUNC_START(_restfpr_14_x) lfd 14,-144(11) /* restore fp registers */ +FUNC_START(_restfpr_15_x) lfd 15,-136(11) +FUNC_START(_restfpr_16_x) lfd 16,-128(11) +FUNC_START(_restfpr_17_x) lfd 17,-120(11) +FUNC_START(_restfpr_18_x) lfd 18,-112(11) +FUNC_START(_restfpr_19_x) lfd 19,-104(11) +FUNC_START(_restfpr_20_x) lfd 20,-96(11) +FUNC_START(_restfpr_21_x) lfd 21,-88(11) +FUNC_START(_restfpr_22_x) lfd 22,-80(11) +FUNC_START(_restfpr_23_x) lfd 23,-72(11) +FUNC_START(_restfpr_24_x) lfd 24,-64(11) +FUNC_START(_restfpr_25_x) lfd 25,-56(11) +FUNC_START(_restfpr_26_x) lfd 26,-48(11) +FUNC_START(_restfpr_27_x) lfd 27,-40(11) +FUNC_START(_restfpr_28_x) lfd 28,-32(11) +FUNC_START(_restfpr_29_x) lfd 29,-24(11) +FUNC_START(_restfpr_30_x) lfd 30,-16(11) +FUNC_START(_restfpr_31_x) lwz 0,4(11) + lfd 31,-8(11) + mtlr 0 + mr 1,11 + blr +FUNC_END(_restfpr_31_x) +FUNC_END(_restfpr_30_x) +FUNC_END(_restfpr_29_x) +FUNC_END(_restfpr_28_x) +FUNC_END(_restfpr_27_x) +FUNC_END(_restfpr_26_x) +FUNC_END(_restfpr_25_x) +FUNC_END(_restfpr_24_x) +FUNC_END(_restfpr_23_x) +FUNC_END(_restfpr_22_x) +FUNC_END(_restfpr_21_x) +FUNC_END(_restfpr_20_x) +FUNC_END(_restfpr_19_x) +FUNC_END(_restfpr_18_x) +FUNC_END(_restfpr_17_x) +FUNC_END(_restfpr_16_x) +FUNC_END(_restfpr_15_x) +FUNC_END(_restfpr_14_x) + +/* Routines for restoring integer registers, called by the compiler. */ +/* Called with r11 pointing to the stack header word of the caller of the */ +/* function, just beyond the end of the integer restore area. */ + +FUNC_START(_restgpr_14_x) lwz 14,-72(11) /* restore gp registers */ +FUNC_START(_restgpr_15_x) lwz 15,-68(11) +FUNC_START(_restgpr_16_x) lwz 16,-64(11) +FUNC_START(_restgpr_17_x) lwz 17,-60(11) +FUNC_START(_restgpr_18_x) lwz 18,-56(11) +FUNC_START(_restgpr_19_x) lwz 19,-52(11) +FUNC_START(_restgpr_20_x) lwz 20,-48(11) +FUNC_START(_restgpr_21_x) lwz 21,-44(11) +FUNC_START(_restgpr_22_x) lwz 22,-40(11) +FUNC_START(_restgpr_23_x) lwz 23,-36(11) +FUNC_START(_restgpr_24_x) lwz 24,-32(11) +FUNC_START(_restgpr_25_x) lwz 25,-28(11) +FUNC_START(_restgpr_26_x) lwz 26,-24(11) +FUNC_START(_restgpr_27_x) lwz 27,-20(11) +FUNC_START(_restgpr_28_x) lwz 28,-16(11) +FUNC_START(_restgpr_29_x) lwz 29,-12(11) +FUNC_START(_restgpr_30_x) lwz 30,-8(11) +FUNC_START(_restgpr_31_x) lwz 0,4(11) + lwz 31,-4(11) + mtlr 0 + mr 1,11 + blr +FUNC_END(_restgpr_31_x) +FUNC_END(_restgpr_30_x) +FUNC_END(_restgpr_29_x) +FUNC_END(_restgpr_28_x) +FUNC_END(_restgpr_27_x) +FUNC_END(_restgpr_26_x) +FUNC_END(_restgpr_25_x) +FUNC_END(_restgpr_24_x) +FUNC_END(_restgpr_23_x) +FUNC_END(_restgpr_22_x) +FUNC_END(_restgpr_21_x) +FUNC_END(_restgpr_20_x) +FUNC_END(_restgpr_19_x) +FUNC_END(_restgpr_18_x) +FUNC_END(_restgpr_17_x) +FUNC_END(_restgpr_16_x) +FUNC_END(_restgpr_15_x) +FUNC_END(_restgpr_14_x) + +#else /* __powerpc64__ */ + + .section ".text" + .align 2 + +/* Routines for saving floating point registers, called by the compiler. */ + +.fsav: +FUNC_START(_savef14) stfd 14,-144(1) /* save fp registers */ +FUNC_START(_savef15) stfd 15,-136(1) +FUNC_START(_savef16) stfd 16,-128(1) +FUNC_START(_savef17) stfd 17,-120(1) +FUNC_START(_savef18) stfd 18,-112(1) +FUNC_START(_savef19) stfd 19,-104(1) +FUNC_START(_savef20) stfd 20,-96(1) +FUNC_START(_savef21) stfd 21,-88(1) +FUNC_START(_savef22) stfd 22,-80(1) +FUNC_START(_savef23) stfd 23,-72(1) +FUNC_START(_savef24) stfd 24,-64(1) +FUNC_START(_savef25) stfd 25,-56(1) +FUNC_START(_savef26) stfd 26,-48(1) +FUNC_START(_savef27) stfd 27,-40(1) +FUNC_START(_savef28) stfd 28,-32(1) +FUNC_START(_savef29) stfd 29,-24(1) +FUNC_START(_savef30) stfd 30,-16(1) +FUNC_START(_savef31) stfd 31,-8(1) + blr +.LTfsav: + .long 0 + .byte 0,12,0,0,0,0,0,0 + .long 0 + .long .LTfsav-.fsav + .short 4 + .ascii "fsav" +FUNC_END(_savef31) +FUNC_END(_savef30) +FUNC_END(_savef29) +FUNC_END(_savef28) +FUNC_END(_savef27) +FUNC_END(_savef26) +FUNC_END(_savef25) +FUNC_END(_savef24) +FUNC_END(_savef23) +FUNC_END(_savef22) +FUNC_END(_savef21) +FUNC_END(_savef20) +FUNC_END(_savef19) +FUNC_END(_savef18) +FUNC_END(_savef17) +FUNC_END(_savef16) +FUNC_END(_savef15) +FUNC_END(_savef14) + +/* Routines for restoring floating point registers, called by the compiler. */ + +.fres: +FUNC_START(_restf14) lfd 14,-144(1) /* restore fp registers */ +FUNC_START(_restf15) lfd 15,-136(1) +FUNC_START(_restf16) lfd 16,-128(1) +FUNC_START(_restf17) lfd 17,-120(1) +FUNC_START(_restf18) lfd 18,-112(1) +FUNC_START(_restf19) lfd 19,-104(1) +FUNC_START(_restf20) lfd 20,-96(1) +FUNC_START(_restf21) lfd 21,-88(1) +FUNC_START(_restf22) lfd 22,-80(1) +FUNC_START(_restf23) lfd 23,-72(1) +FUNC_START(_restf24) lfd 24,-64(1) +FUNC_START(_restf25) lfd 25,-56(1) +FUNC_START(_restf26) lfd 26,-48(1) +FUNC_START(_restf27) lfd 27,-40(1) +FUNC_START(_restf28) lfd 28,-32(1) +FUNC_START(_restf29) lfd 29,-24(1) +FUNC_START(_restf30) lfd 30,-16(1) +FUNC_START(_restf31) lfd 31,-8(1) + blr +.LTfres: + .long 0 + .byte 0,12,0,0,0,0,0,0 + .long 0 + .long .LTfres-.fres + .short 4 + .ascii "fres" +FUNC_END(_restf31) +FUNC_END(_restf30) +FUNC_END(_restf29) +FUNC_END(_restf28) +FUNC_END(_restf27) +FUNC_END(_restf26) +FUNC_END(_restf25) +FUNC_END(_restf24) +FUNC_END(_restf23) +FUNC_END(_restf22) +FUNC_END(_restf21) +FUNC_END(_restf20) +FUNC_END(_restf19) +FUNC_END(_restf18) +FUNC_END(_restf17) +FUNC_END(_restf16) +FUNC_END(_restf15) +FUNC_END(_restf14) + +#endif diff --git a/contrib/gcc/config/rs6000/darwin.h b/contrib/gcc/config/rs6000/darwin.h index cb6b4b77..81c24e7 100644 --- a/contrib/gcc/config/rs6000/darwin.h +++ b/contrib/gcc/config/rs6000/darwin.h @@ -35,6 +35,9 @@ Boston, MA 02111-1307, USA. */ #define TARGET_TOC 0 #define TARGET_NO_TOC 1 +/* Handle #pragma weak and #pragma pack. */ +#define HANDLE_SYSV_PRAGMA + /* The Darwin ABI always includes AltiVec, can't be (validly) turned off. */ @@ -57,8 +60,8 @@ Boston, MA 02111-1307, USA. */ #undef FRAME_POINTER_REGNUM #define FRAME_POINTER_REGNUM 30 -#undef PIC_OFFSET_TABLE_REGNUM -#define PIC_OFFSET_TABLE_REGNUM 31 +#undef RS6000_PIC_OFFSET_TABLE_REGNUM +#define RS6000_PIC_OFFSET_TABLE_REGNUM 31 /* Pad the outgoing args area to 16 bytes instead of the usual 8. */ @@ -218,7 +221,10 @@ Boston, MA 02111-1307, USA. */ && TYPE_FIELDS (STRUCT) != 0 \ && DECL_MODE (TYPE_FIELDS (STRUCT)) == DFmode \ ? MAX (MAX ((COMPUTED), (SPECIFIED)), 64) \ + : (TARGET_ALTIVEC && TREE_CODE (STRUCT) == VECTOR_TYPE) \ + ? MAX (MAX ((COMPUTED), (SPECIFIED)), 128) \ : MAX ((COMPUTED), (SPECIFIED))) + /* XXX: Darwin supports neither .quad, or .llong, but it also doesn't support 64 bit powerpc either, so this just keeps things happy. */ #define DOUBLE_INT_ASM_OP "\t.quad\t" @@ -227,3 +233,7 @@ Boston, MA 02111-1307, USA. */ space/speed. */ #undef MAX_LONG_TYPE_SIZE #define MAX_LONG_TYPE_SIZE 32 + +/* For binary compatibility with 2.95; Darwin C APIs use bool from + stdbool.h, which was an int-sized enum in 2.95. */ +#define BOOL_TYPE_SIZE INT_TYPE_SIZE diff --git a/contrib/gcc/config/rs6000/eabi.asm b/contrib/gcc/config/rs6000/eabi.asm index 85f2e1b..0808e9c 100644 --- a/contrib/gcc/config/rs6000/eabi.asm +++ b/contrib/gcc/config/rs6000/eabi.asm @@ -3,7 +3,6 @@ * * Copyright (C) 1995, 1996, 1998, 2000, 2001 Free Software Foundation, Inc. * Written By Michael Meissner - * 64-bit support written by David Edelsohn * * This file is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the @@ -297,365 +296,4 @@ FUNC_START(__eabi_uconvert) FUNC_END(__eabi_uconvert) -/* Routines for saving floating point registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the floating point save area. */ - -FUNC_START(_savefpr_14) stfd 14,-144(11) /* save fp registers */ -FUNC_START(_savefpr_15) stfd 15,-136(11) -FUNC_START(_savefpr_16) stfd 16,-128(11) -FUNC_START(_savefpr_17) stfd 17,-120(11) -FUNC_START(_savefpr_18) stfd 18,-112(11) -FUNC_START(_savefpr_19) stfd 19,-104(11) -FUNC_START(_savefpr_20) stfd 20,-96(11) -FUNC_START(_savefpr_21) stfd 21,-88(11) -FUNC_START(_savefpr_22) stfd 22,-80(11) -FUNC_START(_savefpr_23) stfd 23,-72(11) -FUNC_START(_savefpr_24) stfd 24,-64(11) -FUNC_START(_savefpr_25) stfd 25,-56(11) -FUNC_START(_savefpr_26) stfd 26,-48(11) -FUNC_START(_savefpr_27) stfd 27,-40(11) -FUNC_START(_savefpr_28) stfd 28,-32(11) -FUNC_START(_savefpr_29) stfd 29,-24(11) -FUNC_START(_savefpr_30) stfd 30,-16(11) -FUNC_START(_savefpr_31) stfd 31,-8(11) - blr -FUNC_END(_savefpr_31) -FUNC_END(_savefpr_30) -FUNC_END(_savefpr_29) -FUNC_END(_savefpr_28) -FUNC_END(_savefpr_27) -FUNC_END(_savefpr_26) -FUNC_END(_savefpr_25) -FUNC_END(_savefpr_24) -FUNC_END(_savefpr_23) -FUNC_END(_savefpr_22) -FUNC_END(_savefpr_21) -FUNC_END(_savefpr_20) -FUNC_END(_savefpr_19) -FUNC_END(_savefpr_18) -FUNC_END(_savefpr_17) -FUNC_END(_savefpr_16) -FUNC_END(_savefpr_15) -FUNC_END(_savefpr_14) - -/* Routines for saving integer registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the integer save area. */ - -FUNC_START(_savegpr_14) stw 14,-72(11) /* save gp registers */ -FUNC_START(_savegpr_15) stw 15,-68(11) -FUNC_START(_savegpr_16) stw 16,-64(11) -FUNC_START(_savegpr_17) stw 17,-60(11) -FUNC_START(_savegpr_18) stw 18,-56(11) -FUNC_START(_savegpr_19) stw 19,-52(11) -FUNC_START(_savegpr_20) stw 20,-48(11) -FUNC_START(_savegpr_21) stw 21,-44(11) -FUNC_START(_savegpr_22) stw 22,-40(11) -FUNC_START(_savegpr_23) stw 23,-36(11) -FUNC_START(_savegpr_24) stw 24,-32(11) -FUNC_START(_savegpr_25) stw 25,-28(11) -FUNC_START(_savegpr_26) stw 26,-24(11) -FUNC_START(_savegpr_27) stw 27,-20(11) -FUNC_START(_savegpr_28) stw 28,-16(11) -FUNC_START(_savegpr_29) stw 29,-12(11) -FUNC_START(_savegpr_30) stw 30,-8(11) -FUNC_START(_savegpr_31) stw 31,-4(11) - blr -FUNC_END(_savegpr_31) -FUNC_END(_savegpr_30) -FUNC_END(_savegpr_29) -FUNC_END(_savegpr_28) -FUNC_END(_savegpr_27) -FUNC_END(_savegpr_26) -FUNC_END(_savegpr_25) -FUNC_END(_savegpr_24) -FUNC_END(_savegpr_23) -FUNC_END(_savegpr_22) -FUNC_END(_savegpr_21) -FUNC_END(_savegpr_20) -FUNC_END(_savegpr_19) -FUNC_END(_savegpr_18) -FUNC_END(_savegpr_17) -FUNC_END(_savegpr_16) -FUNC_END(_savegpr_15) -FUNC_END(_savegpr_14) - -/* Routines for restoring floating point registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the floating point save area. */ - -FUNC_START(_restfpr_14) lfd 14,-144(11) /* restore fp registers */ -FUNC_START(_restfpr_15) lfd 15,-136(11) -FUNC_START(_restfpr_16) lfd 16,-128(11) -FUNC_START(_restfpr_17) lfd 17,-120(11) -FUNC_START(_restfpr_18) lfd 18,-112(11) -FUNC_START(_restfpr_19) lfd 19,-104(11) -FUNC_START(_restfpr_20) lfd 20,-96(11) -FUNC_START(_restfpr_21) lfd 21,-88(11) -FUNC_START(_restfpr_22) lfd 22,-80(11) -FUNC_START(_restfpr_23) lfd 23,-72(11) -FUNC_START(_restfpr_24) lfd 24,-64(11) -FUNC_START(_restfpr_25) lfd 25,-56(11) -FUNC_START(_restfpr_26) lfd 26,-48(11) -FUNC_START(_restfpr_27) lfd 27,-40(11) -FUNC_START(_restfpr_28) lfd 28,-32(11) -FUNC_START(_restfpr_29) lfd 29,-24(11) -FUNC_START(_restfpr_30) lfd 30,-16(11) -FUNC_START(_restfpr_31) lfd 31,-8(11) - blr -FUNC_END(_restfpr_31) -FUNC_END(_restfpr_30) -FUNC_END(_restfpr_29) -FUNC_END(_restfpr_28) -FUNC_END(_restfpr_27) -FUNC_END(_restfpr_26) -FUNC_END(_restfpr_25) -FUNC_END(_restfpr_24) -FUNC_END(_restfpr_23) -FUNC_END(_restfpr_22) -FUNC_END(_restfpr_21) -FUNC_END(_restfpr_20) -FUNC_END(_restfpr_19) -FUNC_END(_restfpr_18) -FUNC_END(_restfpr_17) -FUNC_END(_restfpr_16) -FUNC_END(_restfpr_15) -FUNC_END(_restfpr_14) - -/* Routines for restoring integer registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the integer restore area. */ - -FUNC_START(_restgpr_14) lwz 14,-72(11) /* restore gp registers */ -FUNC_START(_restgpr_15) lwz 15,-68(11) -FUNC_START(_restgpr_16) lwz 16,-64(11) -FUNC_START(_restgpr_17) lwz 17,-60(11) -FUNC_START(_restgpr_18) lwz 18,-56(11) -FUNC_START(_restgpr_19) lwz 19,-52(11) -FUNC_START(_restgpr_20) lwz 20,-48(11) -FUNC_START(_restgpr_21) lwz 21,-44(11) -FUNC_START(_restgpr_22) lwz 22,-40(11) -FUNC_START(_restgpr_23) lwz 23,-36(11) -FUNC_START(_restgpr_24) lwz 24,-32(11) -FUNC_START(_restgpr_25) lwz 25,-28(11) -FUNC_START(_restgpr_26) lwz 26,-24(11) -FUNC_START(_restgpr_27) lwz 27,-20(11) -FUNC_START(_restgpr_28) lwz 28,-16(11) -FUNC_START(_restgpr_29) lwz 29,-12(11) -FUNC_START(_restgpr_30) lwz 30,-8(11) -FUNC_START(_restgpr_31) lwz 31,-4(11) - blr -FUNC_END(_restgpr_31) -FUNC_END(_restgpr_30) -FUNC_END(_restgpr_29) -FUNC_END(_restgpr_28) -FUNC_END(_restgpr_27) -FUNC_END(_restgpr_26) -FUNC_END(_restgpr_25) -FUNC_END(_restgpr_24) -FUNC_END(_restgpr_23) -FUNC_END(_restgpr_22) -FUNC_END(_restgpr_21) -FUNC_END(_restgpr_20) -FUNC_END(_restgpr_19) -FUNC_END(_restgpr_18) -FUNC_END(_restgpr_17) -FUNC_END(_restgpr_16) -FUNC_END(_restgpr_15) -FUNC_END(_restgpr_14) - -/* Routines for restoring floating point registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the floating point save area. */ -/* In addition to restoring the fp registers, it will return to the caller's */ -/* caller */ - -FUNC_START(_restfpr_14_x) lfd 14,-144(11) /* restore fp registers */ -FUNC_START(_restfpr_15_x) lfd 15,-136(11) -FUNC_START(_restfpr_16_x) lfd 16,-128(11) -FUNC_START(_restfpr_17_x) lfd 17,-120(11) -FUNC_START(_restfpr_18_x) lfd 18,-112(11) -FUNC_START(_restfpr_19_x) lfd 19,-104(11) -FUNC_START(_restfpr_20_x) lfd 20,-96(11) -FUNC_START(_restfpr_21_x) lfd 21,-88(11) -FUNC_START(_restfpr_22_x) lfd 22,-80(11) -FUNC_START(_restfpr_23_x) lfd 23,-72(11) -FUNC_START(_restfpr_24_x) lfd 24,-64(11) -FUNC_START(_restfpr_25_x) lfd 25,-56(11) -FUNC_START(_restfpr_26_x) lfd 26,-48(11) -FUNC_START(_restfpr_27_x) lfd 27,-40(11) -FUNC_START(_restfpr_28_x) lfd 28,-32(11) -FUNC_START(_restfpr_29_x) lfd 29,-24(11) -FUNC_START(_restfpr_30_x) lfd 30,-16(11) -FUNC_START(_restfpr_31_x) lwz 0,4(11) - lfd 31,-8(11) - mtlr 0 - mr 1,11 - blr -FUNC_END(_restfpr_31_x) -FUNC_END(_restfpr_30_x) -FUNC_END(_restfpr_29_x) -FUNC_END(_restfpr_28_x) -FUNC_END(_restfpr_27_x) -FUNC_END(_restfpr_26_x) -FUNC_END(_restfpr_25_x) -FUNC_END(_restfpr_24_x) -FUNC_END(_restfpr_23_x) -FUNC_END(_restfpr_22_x) -FUNC_END(_restfpr_21_x) -FUNC_END(_restfpr_20_x) -FUNC_END(_restfpr_19_x) -FUNC_END(_restfpr_18_x) -FUNC_END(_restfpr_17_x) -FUNC_END(_restfpr_16_x) -FUNC_END(_restfpr_15_x) -FUNC_END(_restfpr_14_x) - -/* Routines for restoring integer registers, called by the compiler. */ -/* Called with r11 pointing to the stack header word of the caller of the */ -/* function, just beyond the end of the integer restore area. */ - -FUNC_START(_restgpr_14_x) lwz 14,-72(11) /* restore gp registers */ -FUNC_START(_restgpr_15_x) lwz 15,-68(11) -FUNC_START(_restgpr_16_x) lwz 16,-64(11) -FUNC_START(_restgpr_17_x) lwz 17,-60(11) -FUNC_START(_restgpr_18_x) lwz 18,-56(11) -FUNC_START(_restgpr_19_x) lwz 19,-52(11) -FUNC_START(_restgpr_20_x) lwz 20,-48(11) -FUNC_START(_restgpr_21_x) lwz 21,-44(11) -FUNC_START(_restgpr_22_x) lwz 22,-40(11) -FUNC_START(_restgpr_23_x) lwz 23,-36(11) -FUNC_START(_restgpr_24_x) lwz 24,-32(11) -FUNC_START(_restgpr_25_x) lwz 25,-28(11) -FUNC_START(_restgpr_26_x) lwz 26,-24(11) -FUNC_START(_restgpr_27_x) lwz 27,-20(11) -FUNC_START(_restgpr_28_x) lwz 28,-16(11) -FUNC_START(_restgpr_29_x) lwz 29,-12(11) -FUNC_START(_restgpr_30_x) lwz 30,-8(11) -FUNC_START(_restgpr_31_x) lwz 0,4(11) - lwz 31,-4(11) - mtlr 0 - mr 1,11 - blr -FUNC_END(_restgpr_31_x) -FUNC_END(_restgpr_30_x) -FUNC_END(_restgpr_29_x) -FUNC_END(_restgpr_28_x) -FUNC_END(_restgpr_27_x) -FUNC_END(_restgpr_26_x) -FUNC_END(_restgpr_25_x) -FUNC_END(_restgpr_24_x) -FUNC_END(_restgpr_23_x) -FUNC_END(_restgpr_22_x) -FUNC_END(_restgpr_21_x) -FUNC_END(_restgpr_20_x) -FUNC_END(_restgpr_19_x) -FUNC_END(_restgpr_18_x) -FUNC_END(_restgpr_17_x) -FUNC_END(_restgpr_16_x) -FUNC_END(_restgpr_15_x) -FUNC_END(_restgpr_14_x) - -#else /* __powerpc64__ */ - - .section ".text" - .align 2 - -/* Routines for saving floating point registers, called by the compiler. */ - -.fsav: -FUNC_START(_savef14) stfd 14,-144(1) /* save fp registers */ -FUNC_START(_savef15) stfd 15,-136(1) -FUNC_START(_savef16) stfd 16,-128(1) -FUNC_START(_savef17) stfd 17,-120(1) -FUNC_START(_savef18) stfd 18,-112(1) -FUNC_START(_savef19) stfd 19,-104(1) -FUNC_START(_savef20) stfd 20,-96(1) -FUNC_START(_savef21) stfd 21,-88(1) -FUNC_START(_savef22) stfd 22,-80(1) -FUNC_START(_savef23) stfd 23,-72(1) -FUNC_START(_savef24) stfd 24,-64(1) -FUNC_START(_savef25) stfd 25,-56(1) -FUNC_START(_savef26) stfd 26,-48(1) -FUNC_START(_savef27) stfd 27,-40(1) -FUNC_START(_savef28) stfd 28,-32(1) -FUNC_START(_savef29) stfd 29,-24(1) -FUNC_START(_savef30) stfd 30,-16(1) -FUNC_START(_savef31) stfd 31,-8(1) - blr -.LTfsav: - .long 0 - .byte 0,12,0,0,0,0,0,0 - .long 0 - .long .LTfsav-.fsav - .short 4 - .ascii "fsav" -FUNC_END(_savef31) -FUNC_END(_savef30) -FUNC_END(_savef29) -FUNC_END(_savef28) -FUNC_END(_savef27) -FUNC_END(_savef26) -FUNC_END(_savef25) -FUNC_END(_savef24) -FUNC_END(_savef23) -FUNC_END(_savef22) -FUNC_END(_savef21) -FUNC_END(_savef20) -FUNC_END(_savef19) -FUNC_END(_savef18) -FUNC_END(_savef17) -FUNC_END(_savef16) -FUNC_END(_savef15) -FUNC_END(_savef14) - -/* Routines for restoring floating point registers, called by the compiler. */ - -.fres: -FUNC_START(_restf14) lfd 14,-144(1) /* restore fp registers */ -FUNC_START(_restf15) lfd 15,-136(1) -FUNC_START(_restf16) lfd 16,-128(1) -FUNC_START(_restf17) lfd 17,-120(1) -FUNC_START(_restf18) lfd 18,-112(1) -FUNC_START(_restf19) lfd 19,-104(1) -FUNC_START(_restf20) lfd 20,-96(1) -FUNC_START(_restf21) lfd 21,-88(1) -FUNC_START(_restf22) lfd 22,-80(1) -FUNC_START(_restf23) lfd 23,-72(1) -FUNC_START(_restf24) lfd 24,-64(1) -FUNC_START(_restf25) lfd 25,-56(1) -FUNC_START(_restf26) lfd 26,-48(1) -FUNC_START(_restf27) lfd 27,-40(1) -FUNC_START(_restf28) lfd 28,-32(1) -FUNC_START(_restf29) lfd 29,-24(1) -FUNC_START(_restf30) lfd 30,-16(1) -FUNC_START(_restf31) lfd 31,-8(1) - blr -.LTfres: - .long 0 - .byte 0,12,0,0,0,0,0,0 - .long 0 - .long .LTfres-.fres - .short 4 - .ascii "fres" -FUNC_END(_restf31) -FUNC_END(_restf30) -FUNC_END(_restf29) -FUNC_END(_restf28) -FUNC_END(_restf27) -FUNC_END(_restf26) -FUNC_END(_restf25) -FUNC_END(_restf24) -FUNC_END(_restf23) -FUNC_END(_restf22) -FUNC_END(_restf21) -FUNC_END(_restf20) -FUNC_END(_restf19) -FUNC_END(_restf18) -FUNC_END(_restf17) -FUNC_END(_restf16) -FUNC_END(_restf15) -FUNC_END(_restf14) - #endif diff --git a/contrib/gcc/config/rs6000/gnu.h b/contrib/gcc/config/rs6000/gnu.h new file mode 100644 index 0000000..32bd906 --- /dev/null +++ b/contrib/gcc/config/rs6000/gnu.h @@ -0,0 +1,38 @@ +/* Definitions of target machine for GNU compiler, + for powerpc machines running GNU. + Copyright (C) 2001 Free Software Foundation, Inc. + +This file is part of GNU CC. + +GNU CC is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2, or (at your option) +any later version. + +GNU CC is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with GNU CC; see the file COPYING. If not, write to +the Free Software Foundation, 59 Temple Place - Suite 330, +Boston, MA 02111-1307, USA. */ + +#undef CPP_OS_DEFAULT_SPEC +#define CPP_OS_DEFAULT_SPEC "%(cpp_os_gnu)" + +#undef STARTFILE_DEFAULT_SPEC +#define STARTFILE_DEFAULT_SPEC "%(startfile_gnu)" + +#undef ENDFILE_DEFAULT_SPEC +#define ENDFILE_DEFAULT_SPEC "%(endfile_gnu)" + +#undef LINK_START_DEFAULT_SPEC +#define LINK_START_DEFAULT_SPEC "%(link_start_gnu)" + +#undef LINK_OS_DEFAULT_SPEC +#define LINK_OS_DEFAULT_SPEC "%(link_os_gnu)" + +#undef TARGET_VERSION +#define TARGET_VERSION fprintf (stderr, " (PowerPC GNU)"); diff --git a/contrib/gcc/config/rs6000/linux64.h b/contrib/gcc/config/rs6000/linux64.h index d014afe..40b64dc 100644 --- a/contrib/gcc/config/rs6000/linux64.h +++ b/contrib/gcc/config/rs6000/linux64.h @@ -1,6 +1,6 @@ /* Definitions of target machine for GNU compiler, for 64 bit powerpc linux. - Copyright (C) 2000, 2001 Free Software Foundation, Inc. + Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc. This file is part of GNU CC. @@ -87,10 +87,6 @@ Boston, MA 02111-1307, USA. */ #undef JUMP_TABLES_IN_TEXT_SECTION #define JUMP_TABLES_IN_TEXT_SECTION 1 -/* Define cutoff for using external functions to save floating point. */ -#undef FP_SAVE_INLINE -#define FP_SAVE_INLINE(FIRST_REG) ((FIRST_REG) == 62 || (FIRST_REG) == 63) - /* 64-bit PowerPC Linux always has GPR13 fixed. */ #define FIXED_R13 1 @@ -142,9 +138,29 @@ Boston, MA 02111-1307, USA. */ #define LINK_OS_DEFAULT_SPEC "%(link_os_linux)" #undef LINK_OS_LINUX_SPEC +#ifndef CROSS_COMPILE +#define LINK_OS_LINUX_SPEC "-m elf64ppc %{!shared: %{!static: \ + %{rdynamic:-export-dynamic} \ + %{!dynamic-linker:-dynamic-linker /lib64/ld.so.1}}}" +#else #define LINK_OS_LINUX_SPEC "-m elf64ppc %{!shared: %{!static: \ %{rdynamic:-export-dynamic} \ - %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}}}" + %{!dynamic-linker:-dynamic-linker ld.so.1}}}" +#endif + +#ifndef CROSS_COMPILE +#undef STARTFILE_LINUX_SPEC +#define STARTFILE_LINUX_SPEC "\ +%{!shared: %{pg:/usr/lib64/gcrt1.o%s} %{!pg:%{p:/usr/lib64/gcrt1.o%s} \ + %{!p:/usr/lib64/crt1.o%s}}} /usr/lib64/crti.o%s \ +%{!shared:crtbegin.o%s} %{shared:crtbeginS.o%s}" +#endif + +#ifndef CROSS_COMPILE +#undef ENDFILE_LINUX_SPEC +#define ENDFILE_LINUX_SPEC "\ +%{!shared:crtend.o%s} %{shared:crtendS.o%s} /usr/lib64/crtn.o%s" +#endif #undef TOC_SECTION_ASM_OP #define TOC_SECTION_ASM_OP "\t.section\t\".toc\",\"aw\"" @@ -208,17 +224,6 @@ Boston, MA 02111-1307, USA. */ && ! DECL_WEAK (DECL)) \ SYMBOL_REF_FLAG (XEXP (DECL_RTL (DECL), 0)) = 1; -/* This macro gets just the user-specified name - out of the string in a SYMBOL_REF. Discard - a leading * or @. */ -#define STRIP_NAME_ENCODING(VAR,SYMBOL_NAME) \ -do { \ - const char *_name = (SYMBOL_NAME); \ - while (*_name == '*' || *_name == '@') \ - _name++; \ - (VAR) = _name; \ -} while (0) - /* This is how to output a reference to a user-level label named NAME. `assemble_name' uses this. */ @@ -245,32 +250,39 @@ do { \ fputs (DOUBLE_INT_ASM_OP, (FILE)); \ putc ('.', (FILE)); \ assemble_name ((FILE), (NAME)); \ - putc ('\n', (FILE)); \ - fputs (DOUBLE_INT_ASM_OP, (FILE)); \ - fputs (".TOC.@tocbase, 0\n\t.previous\n", (FILE)); \ - \ - if (TREE_PUBLIC (DECL)) \ + fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", (FILE)); \ + assemble_name ((FILE), (NAME)); \ + fputs (",24\n\t.type\t.", (FILE)); \ + assemble_name ((FILE), (NAME)); \ + fputs (",@function\n", (FILE)); \ + if (TREE_PUBLIC (DECL) && ! DECL_WEAK (DECL)) \ { \ - if (DECL_WEAK (DECL)) \ - fputs ("\t.weak\t", (FILE)); \ - else \ - fputs ("\t.globl\t", (FILE)); \ - putc ('.', (FILE)); \ + fputs ("\t.globl\t.", (FILE)); \ assemble_name ((FILE), (NAME)); \ putc ('\n', (FILE)); \ } \ - fputs (TYPE_ASM_OP, (FILE)); \ - putc ('.', (FILE)); \ - assemble_name ((FILE), (NAME)); \ - putc (',', (FILE)); \ - fprintf ((FILE), TYPE_OPERAND_FMT, "function"); \ - putc ('\n', (FILE)); \ ASM_DECLARE_RESULT ((FILE), DECL_RESULT (DECL)); \ putc ('.', (FILE)); \ ASM_OUTPUT_LABEL ((FILE), (NAME)); \ } \ while (0) +/* This is how to declare the size of a function. */ +#undef ASM_DECLARE_FUNCTION_SIZE +#define ASM_DECLARE_FUNCTION_SIZE(FILE, FNAME, DECL) \ + do \ + { \ + if (!flag_inhibit_size_directive) \ + { \ + fputs ("\t.size\t.", (FILE)); \ + assemble_name ((FILE), (FNAME)); \ + fputs (",.-.", (FILE)); \ + assemble_name ((FILE), (FNAME)); \ + putc ('\n', (FILE)); \ + } \ + } \ + while (0) + /* Return non-zero if this entry is to be written into the constant pool in a special way. We do so if this is a SYMBOL_REF, LABEL_REF or a CONST containing one of them. If -mfp-in-toc (the default), diff --git a/contrib/gcc/config/rs6000/netbsd.h b/contrib/gcc/config/rs6000/netbsd.h index 0e58a45..95f6542 100644 --- a/contrib/gcc/config/rs6000/netbsd.h +++ b/contrib/gcc/config/rs6000/netbsd.h @@ -64,3 +64,8 @@ Boston, MA 02111-1307, USA. */ structure return convention. */ #undef DRAFT_V4_STRUCT_RET #define DRAFT_V4_STRUCT_RET 1 + +/* Use STABS debugging information by default. DWARF2 makes a mess of + the 1.5.2 linker. */ +#undef PREFERRED_DEBUGGING_TYPE +#define PREFERRED_DEBUGGING_TYPE DBX_DEBUG diff --git a/contrib/gcc/config/rs6000/ppc-asm.h b/contrib/gcc/config/rs6000/ppc-asm.h index 3a6fb2a..27f3635 100644 --- a/contrib/gcc/config/rs6000/ppc-asm.h +++ b/contrib/gcc/config/rs6000/ppc-asm.h @@ -161,6 +161,7 @@ GLUE(.L,name): \ #elif defined (__powerpc64__) #define FUNC_NAME(name) GLUE(.,name) +#define JUMP_TARGET(name) FUNC_NAME(name) #define FUNC_START(name) \ .section ".opd","aw"; \ name: \ diff --git a/contrib/gcc/config/rs6000/rs6000-protos.h b/contrib/gcc/config/rs6000/rs6000-protos.h index c40689e..19aeb07 100644 --- a/contrib/gcc/config/rs6000/rs6000-protos.h +++ b/contrib/gcc/config/rs6000/rs6000-protos.h @@ -40,6 +40,7 @@ extern int cc_reg_operand PARAMS ((rtx, enum machine_mode)); extern int cc_reg_not_cr0_operand PARAMS ((rtx, enum machine_mode)); extern int reg_or_short_operand PARAMS ((rtx, enum machine_mode)); extern int reg_or_neg_short_operand PARAMS ((rtx, enum machine_mode)); +extern int reg_or_aligned_short_operand PARAMS ((rtx, enum machine_mode)); extern int reg_or_u_short_operand PARAMS ((rtx, enum machine_mode)); extern int reg_or_cint_operand PARAMS ((rtx, enum machine_mode)); extern int reg_or_arith_cint_operand PARAMS ((rtx, enum machine_mode)); @@ -51,6 +52,7 @@ extern int got_no_const_operand PARAMS ((rtx, enum machine_mode)); extern int num_insns_constant PARAMS ((rtx, enum machine_mode)); extern int easy_fp_constant PARAMS ((rtx, enum machine_mode)); extern int zero_fp_constant PARAMS ((rtx, enum machine_mode)); +extern int zero_constant PARAMS ((rtx, enum machine_mode)); extern int volatile_mem_operand PARAMS ((rtx, enum machine_mode)); extern int offsettable_mem_operand PARAMS ((rtx, enum machine_mode)); extern int mem_or_easy_const_operand PARAMS ((rtx, enum machine_mode)); diff --git a/contrib/gcc/config/rs6000/rs6000.c b/contrib/gcc/config/rs6000/rs6000.c index f8375a4..45d6fdf 100644 --- a/contrib/gcc/config/rs6000/rs6000.c +++ b/contrib/gcc/config/rs6000/rs6000.c @@ -163,6 +163,8 @@ static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int static rtx altivec_expand_builtin PARAMS ((tree, rtx)); static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx)); +static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx)); +static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx)); static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree)); static void rs6000_parse_abi_options PARAMS ((void)); @@ -172,6 +174,7 @@ static void is_altivec_return_reg PARAMS ((rtx, void *)); int vrsave_operation PARAMS ((rtx, enum machine_mode)); static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int)); static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT)); +static int easy_vector_constant PARAMS ((rtx)); /* Default register names. */ char rs6000_reg_names[][8] = @@ -214,7 +217,7 @@ static const char alt_reg_names[][8] = "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15", "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23", "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31", - "%vrsave" + "vrsave" }; #endif @@ -480,11 +483,13 @@ rs6000_override_options (default_cpu) } } - if (flag_pic && DEFAULT_ABI == ABI_AIX) + if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX) { - warning ("-f%s ignored (all code is position independent)", - (flag_pic > 1) ? "PIC" : "pic"); flag_pic = 0; + + if (extra_warnings) + warning ("-f%s ignored (all code is position independent)", + (flag_pic > 1) ? "PIC" : "pic"); } #ifdef XCOFF_DEBUGGING_INFO @@ -586,6 +591,8 @@ rs6000_parse_abi_options () return; else if (! strcmp (rs6000_abi_string, "altivec")) rs6000_altivec_abi = 1; + else if (! strcmp (rs6000_abi_string, "no-altivec")) + rs6000_altivec_abi = 0; else error ("unknown ABI specified: '%s'", rs6000_abi_string); } @@ -708,6 +715,19 @@ count_register_operand (op, mode) return 0; } +/* Returns 1 if op is an altivec register. */ +int +altivec_register_operand (op, mode) + rtx op; + enum machine_mode mode ATTRIBUTE_UNUSED; +{ + + return (register_operand (op, mode) + && (GET_CODE (op) != REG + || REGNO (op) > FIRST_PSEUDO_REGISTER + || ALTIVEC_REGNO_P (REGNO (op)))); +} + int xer_operand (op, mode) rtx op; @@ -753,7 +773,7 @@ u_short_cint_operand (op, mode) enum machine_mode mode ATTRIBUTE_UNUSED; { return (GET_CODE (op) == CONST_INT - && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')); + && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K')); } /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */ @@ -849,6 +869,24 @@ reg_or_neg_short_operand (op, mode) return gpc_reg_operand (op, mode); } +/* Returns 1 if OP is either a constant integer valid for a DS-field or + a non-special register. If a register, it must be in the proper + mode unless MODE is VOIDmode. */ + +int +reg_or_aligned_short_operand (op, mode) + rtx op; + enum machine_mode mode; +{ + if (gpc_reg_operand (op, mode)) + return 1; + else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3)) + return 1; + + return 0; +} + + /* Return 1 if the operand is either a register or an integer whose high-order 16 bits are zero. */ @@ -1046,7 +1084,7 @@ num_insns_constant (op, mode) REAL_VALUE_FROM_CONST_DOUBLE (rv, op); REAL_VALUE_TO_TARGET_SINGLE (rv, l); - return num_insns_constant_wide ((HOST_WIDE_INT)l); + return num_insns_constant_wide ((HOST_WIDE_INT) l); } else if (GET_CODE (op) == CONST_DOUBLE) @@ -1076,10 +1114,10 @@ num_insns_constant (op, mode) else { - if (high == 0 && (low & 0x80000000) == 0) + if (high == 0 && low >= 0) return num_insns_constant_wide (low); - else if (high == -1 && (low & 0x80000000) != 0) + else if (high == -1 && low < 0) return num_insns_constant_wide (low); else if (mask64_operand (op, mode)) @@ -1161,6 +1199,60 @@ easy_fp_constant (op, mode) abort (); } +/* Return 1 if the operand is a CONST_INT and can be put into a + register with one instruction. */ + +static int +easy_vector_constant (op) + rtx op; +{ + rtx elt; + int units, i; + + if (GET_CODE (op) != CONST_VECTOR) + return 0; + + units = CONST_VECTOR_NUNITS (op); + + /* We can generate 0 easily. Look for that. */ + for (i = 0; i < units; ++i) + { + elt = CONST_VECTOR_ELT (op, i); + + /* We could probably simplify this by just checking for equality + with CONST0_RTX for the current mode, but let's be safe + instead. */ + + switch (GET_CODE (elt)) + { + case CONST_INT: + if (INTVAL (elt) != 0) + return 0; + break; + case CONST_DOUBLE: + if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0) + return 0; + break; + default: + return 0; + } + } + + /* We could probably generate a few other constants trivially, but + gcc doesn't generate them yet. FIXME later. */ + return 1; +} + +/* Return 1 if the operand is the constant 0. This works for scalars + as well as vectors. */ +int +zero_constant (op, mode) + rtx op; + enum machine_mode mode; +{ + return op == CONST0_RTX (mode); +} + /* Return 1 if the operand is 0.0. */ int zero_fp_constant (op, mode) @@ -1230,8 +1322,8 @@ add_operand (op, mode) enum machine_mode mode; { if (GET_CODE (op) == CONST_INT) - return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I') - || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L')); + return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I') + || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L')); return gpc_reg_operand (op, mode); } @@ -1244,8 +1336,8 @@ non_add_cint_operand (op, mode) enum machine_mode mode ATTRIBUTE_UNUSED; { return (GET_CODE (op) == CONST_INT - && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000 - && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L')); + && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I') + && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L')); } /* Return 1 if the operand is a non-special register or a constant that @@ -1317,6 +1409,11 @@ mask_operand (op, mode) c = INTVAL (op); + /* Fail in 64-bit mode if the mask wraps around because the upper + 32-bits of the mask will all be 1s, contrary to GCC's internal view. */ + if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001) + return 0; + /* We don't change the number of transitions by inverting, so make sure we start with the LS bit zero. */ if (c & 1) @@ -1629,8 +1726,8 @@ constant_pool_expr_1 (op, have_sym, have_toc) return 0; case PLUS: case MINUS: - return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) && - constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc); + return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) + && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc)); case CONST: return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc); case CONST_INT: @@ -1801,6 +1898,7 @@ rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win) *win = 1; return x; } + #if TARGET_MACHO if (DEFAULT_ABI == ABI_DARWIN && flag_pic && GET_CODE (x) == LO_SUM @@ -1826,7 +1924,8 @@ rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win) && GET_CODE (XEXP (x, 0)) == REG && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode) - && GET_CODE (XEXP (x, 1)) == CONST_INT) + && GET_CODE (XEXP (x, 1)) == CONST_INT + && !ALTIVEC_VECTOR_MODE (mode)) { HOST_WIDE_INT val = INTVAL (XEXP (x, 1)); HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000; @@ -1857,6 +1956,7 @@ rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win) #if TARGET_MACHO if (GET_CODE (x) == SYMBOL_REF && DEFAULT_ABI == ABI_DARWIN + && !ALTIVEC_VECTOR_MODE (mode) && flag_pic) { /* Darwin load of floating point constant. */ @@ -1875,8 +1975,8 @@ rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win) } #endif if (TARGET_TOC - && CONSTANT_POOL_EXPR_P (x) - && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode)) + && CONSTANT_POOL_EXPR_P (x) + && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode)) { (x) = create_TOC_reference (x); *win = 1; @@ -2151,9 +2251,10 @@ rs6000_emit_move (dest, source, mode) /* Handle the case where reload calls us with an invalid address; and the case of CONSTANT_P_RTX. */ - if (! general_operand (operands[1], mode) - || ! nonimmediate_operand (operands[0], mode) - || GET_CODE (operands[1]) == CONSTANT_P_RTX) + if (!VECTOR_MODE_P (mode) + && (! general_operand (operands[1], mode) + || ! nonimmediate_operand (operands[0], mode) + || GET_CODE (operands[1]) == CONSTANT_P_RTX)) { emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1])); return; @@ -2183,8 +2284,8 @@ rs6000_emit_move (dest, source, mode) case V8HImode: case V4SFmode: case V4SImode: - /* fixme: aldyh -- allow vector constants when they are implemented. */ - if (CONSTANT_P (operands[1])) + if (CONSTANT_P (operands[1]) + && !easy_vector_constant (operands[1])) operands[1] = force_const_mem (mode, operands[1]); break; @@ -3013,42 +3114,7 @@ rs6000_va_arg (valist, type) right-aligned, as if they were in fact integers. This only matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */ if (DEFAULT_ABI != ABI_V4) - { - HOST_WIDE_INT align, rounded_size; - enum machine_mode mode; - tree addr_tree; - - /* Compute the rounded size of the type. */ - align = PARM_BOUNDARY / BITS_PER_UNIT; - rounded_size = (((int_size_in_bytes (type) + align - 1) / align) - * align); - - addr_tree = valist; - - mode = TYPE_MODE (type); - if (mode != BLKmode) - { - HOST_WIDE_INT adj; - adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; - if (rounded_size > align) - adj = rounded_size; - - addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, - build_int_2 (rounded_size - adj, 0)); - } - - addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); - addr_rtx = copy_to_reg (addr_rtx); - - /* Compute new value for AP. */ - t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, - build (PLUS_EXPR, TREE_TYPE (valist), valist, - build_int_2 (rounded_size, 0))); - TREE_SIDE_EFFECTS (t) = 1; - expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); - - return addr_rtx; - } + return std_expand_builtin_va_arg (valist, type); f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node)); f_fpr = TREE_CHAIN (f_gpr); @@ -3072,7 +3138,8 @@ rs6000_va_arg (valist, type) n_reg = 1; sav_ofs = 0; sav_scale = 4; - size = rsize = UNITS_PER_WORD; + size = UNITS_PER_WORD; + rsize = 1; } else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT) { @@ -3099,50 +3166,62 @@ rs6000_va_arg (valist, type) lab_over = gen_label_rtx (); addr_rtx = gen_reg_rtx (Pmode); - emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL), - GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1, - lab_false); - - /* Long long is aligned in the registers. */ - if (n_reg > 1) + /* Vectors never go in registers. */ + if (TREE_CODE (type) != VECTOR_TYPE) { - u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg, - build_int_2 (n_reg - 1, 0)); - u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u); - u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u); - TREE_SIDE_EFFECTS (u) = 1; - expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL); - } + TREE_THIS_VOLATILE (reg) = 1; + emit_cmp_and_jump_insns + (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL), + GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1, + lab_false); - if (sav_ofs) - t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0)); - else - t = sav; + /* Long long is aligned in the registers. */ + if (n_reg > 1) + { + u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg, + build_int_2 (n_reg - 1, 0)); + u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u); + u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u); + TREE_SIDE_EFFECTS (u) = 1; + expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL); + } - u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0)); - TREE_SIDE_EFFECTS (u) = 1; + if (sav_ofs) + t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0)); + else + t = sav; - u = build1 (CONVERT_EXPR, integer_type_node, u); - TREE_SIDE_EFFECTS (u) = 1; + u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, + build_int_2 (n_reg, 0)); + TREE_SIDE_EFFECTS (u) = 1; - u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0)); - TREE_SIDE_EFFECTS (u) = 1; + u = build1 (CONVERT_EXPR, integer_type_node, u); + TREE_SIDE_EFFECTS (u) = 1; - t = build (PLUS_EXPR, ptr_type_node, t, u); - TREE_SIDE_EFFECTS (t) = 1; + u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0)); + TREE_SIDE_EFFECTS (u) = 1; - r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL); - if (r != addr_rtx) - emit_move_insn (addr_rtx, r); + t = build (PLUS_EXPR, ptr_type_node, t, u); + TREE_SIDE_EFFECTS (t) = 1; + + r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL); + if (r != addr_rtx) + emit_move_insn (addr_rtx, r); + + emit_jump_insn (gen_jump (lab_over)); + emit_barrier (); + } - emit_jump_insn (gen_jump (lab_over)); - emit_barrier (); emit_label (lab_false); /* ... otherwise out of the overflow area. */ - /* Make sure we don't find reg 7 for the next int arg. */ - if (n_reg > 1) + /* Make sure we don't find reg 7 for the next int arg. + + All AltiVec vectors go in the overflow area. So in the AltiVec + case we need to get the vectors from the overflow area, but + remember where the GPRs and FPRs are. */ + if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE) { t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0)); TREE_SIDE_EFFECTS (t) = 1; @@ -3154,8 +3233,16 @@ rs6000_va_arg (valist, type) t = ovf; else { - t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0)); - t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1)); + int align; + + /* Vectors are 16 byte aligned. */ + if (TREE_CODE (type) == VECTOR_TYPE) + align = 15; + else + align = 7; + + t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0)); + t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1)); } t = save_expr (t); @@ -3352,19 +3439,47 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS }, { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS }, { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp_p, "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp_p, "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb_p, "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh_p, "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw_p, "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp_p, "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp_p, "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb_p, "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh_p, "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw_p, "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub_p, "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh_p, "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P }, - { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw_p, "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P }, +}; + +/* AltiVec predicates. */ + +struct builtin_description_predicates +{ + const unsigned int mask; + const enum insn_code icode; + const char *opcode; + const char *const name; + const enum rs6000_builtins code; +}; + +static const struct builtin_description_predicates bdesc_altivec_preds[] = +{ + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P }, + { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P } +}; + +/* ABS* opreations. */ + +static const struct builtin_description bdesc_abs[] = +{ + { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI }, + { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI }, + { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF }, + { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI } }; /* Simple unary operations: VECb = foo (unsigned literal) or VECb = @@ -3422,6 +3537,42 @@ altivec_expand_unop_builtin (icode, arglist, target) return target; } + +static rtx +altivec_expand_abs_builtin (icode, arglist, target) + enum insn_code icode; + tree arglist; + rtx target; +{ + rtx pat, scratch1, scratch2; + tree arg0 = TREE_VALUE (arglist); + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + enum machine_mode tmode = insn_data[icode].operand[0].mode; + enum machine_mode mode0 = insn_data[icode].operand[1].mode; + + /* If we have invalid arguments, bail out before generating bad rtl. */ + if (arg0 == error_mark_node) + return NULL_RTX; + + if (target == 0 + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = copy_to_mode_reg (mode0, op0); + + scratch1 = gen_reg_rtx (mode0); + scratch2 = gen_reg_rtx (mode0); + + pat = GEN_FCN (icode) (target, op0, scratch1, scratch2); + if (! pat) + return 0; + emit_insn (pat); + + return target; +} + static rtx altivec_expand_binop_builtin (icode, arglist, target) enum insn_code icode; @@ -3460,6 +3611,87 @@ altivec_expand_binop_builtin (icode, arglist, target) } static rtx +altivec_expand_predicate_builtin (icode, opcode, arglist, target) + enum insn_code icode; + const char *opcode; + tree arglist; + rtx target; +{ + rtx pat, scratch; + tree cr6_form = TREE_VALUE (arglist); + tree arg0 = TREE_VALUE (TREE_CHAIN (arglist)); + tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + enum machine_mode tmode = SImode; + enum machine_mode mode0 = insn_data[icode].operand[1].mode; + enum machine_mode mode1 = insn_data[icode].operand[2].mode; + int cr6_form_int; + + if (TREE_CODE (cr6_form) != INTEGER_CST) + { + error ("argument 1 of __builtin_altivec_predicate must be a constant"); + return NULL_RTX; + } + else + cr6_form_int = TREE_INT_CST_LOW (cr6_form); + + if (mode0 != mode1) + abort (); + + /* If we have invalid arguments, bail out before generating bad rtl. */ + if (arg0 == error_mark_node || arg1 == error_mark_node) + return NULL_RTX; + + if (target == 0 + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = copy_to_mode_reg (mode0, op0); + if (! (*insn_data[icode].operand[2].predicate) (op1, mode1)) + op1 = copy_to_mode_reg (mode1, op1); + + scratch = gen_reg_rtx (mode0); + + pat = GEN_FCN (icode) (scratch, op0, op1, + gen_rtx (SYMBOL_REF, Pmode, opcode)); + if (! pat) + return 0; + emit_insn (pat); + + /* The vec_any* and vec_all* predicates use the same opcodes for two + different operations, but the bits in CR6 will be different + depending on what information we want. So we have to play tricks + with CR6 to get the right bits out. + + If you think this is disgusting, look at the specs for the + AltiVec predicates. */ + + switch (cr6_form_int) + { + case 0: + emit_insn (gen_cr6_test_for_zero (target)); + break; + case 1: + emit_insn (gen_cr6_test_for_zero_reverse (target)); + break; + case 2: + emit_insn (gen_cr6_test_for_lt (target)); + break; + case 3: + emit_insn (gen_cr6_test_for_lt_reverse (target)); + break; + default: + error ("argument 1 of __builtin_altivec_predicate is out of range"); + break; + } + + return target; +} + +static rtx altivec_expand_stv_builtin (icode, arglist) enum insn_code icode; tree arglist; @@ -3543,6 +3775,7 @@ altivec_expand_builtin (exp, target) rtx target; { struct builtin_description *d; + struct builtin_description_predicates *dp; size_t i; enum insn_code icode; tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); @@ -3818,6 +4051,12 @@ altivec_expand_builtin (exp, target) return NULL_RTX; } + /* Expand abs* operations. */ + d = (struct builtin_description *) bdesc_abs; + for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++) + if (d->code == fcode) + return altivec_expand_abs_builtin (d->icode, arglist, target); + /* Handle simple unary operations. */ d = (struct builtin_description *) bdesc_1arg; for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++) @@ -3830,6 +4069,12 @@ altivec_expand_builtin (exp, target) if (d->code == fcode) return altivec_expand_binop_builtin (d->icode, arglist, target); + /* Expand the AltiVec predicates. */ + dp = (struct builtin_description_predicates *) bdesc_altivec_preds; + for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++) + if (dp->code == fcode) + return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target); + /* LV* are funky. We initialized them differently. */ switch (fcode) { @@ -3899,7 +4144,8 @@ rs6000_init_builtins () static void altivec_init_builtins (void) { - struct builtin_description * d; + struct builtin_description *d; + struct builtin_description_predicates *dp; size_t i; tree endlink = void_list_node; @@ -4055,15 +4301,11 @@ altivec_init_builtins (void) /* void foo (void). */ tree void_ftype_void - = build_function_type (void_type_node, - tree_cons (NULL_TREE, void_type_node, - endlink)); + = build_function_type (void_type_node, void_list_node); /* vshort foo (void). */ tree v8hi_ftype_void - = build_function_type (V8HI_type_node, - tree_cons (NULL_TREE, void_type_node, - endlink)); + = build_function_type (V8HI_type_node, void_list_node); tree v4si_ftype_v4si_v4si = build_function_type (V4SI_type_node, @@ -4201,6 +4443,18 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, V4SF_type_node, endlink))); + tree v4si_ftype_v4si + = build_function_type (V4SI_type_node, + tree_cons (NULL_TREE, V4SI_type_node, endlink)); + + tree v8hi_ftype_v8hi + = build_function_type (V8HI_type_node, + tree_cons (NULL_TREE, V8HI_type_node, endlink)); + + tree v16qi_ftype_v16qi + = build_function_type (V16QI_type_node, + tree_cons (NULL_TREE, V16QI_type_node, endlink)); + tree v8hi_ftype_v16qi_v16qi = build_function_type (V8HI_type_node, tree_cons (NULL_TREE, V16QI_type_node, @@ -4265,6 +4519,38 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, V16QI_type_node, endlink))); + tree int_ftype_int_v4si_v4si + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V4SI_type_node, + tree_cons (NULL_TREE, V4SI_type_node, + endlink)))); + + tree int_ftype_int_v4sf_v4sf + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V4SF_type_node, + tree_cons (NULL_TREE, V4SF_type_node, + endlink)))); + + tree int_ftype_int_v8hi_v8hi + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V8HI_type_node, + tree_cons (NULL_TREE, V8HI_type_node, + endlink)))); + + tree int_ftype_int_v16qi_v16qi + = build_function_type + (integer_type_node, + tree_cons (NULL_TREE, integer_type_node, + tree_cons (NULL_TREE, V16QI_type_node, + tree_cons (NULL_TREE, V16QI_type_node, + endlink)))); + tree v16qi_ftype_int_pvoid = build_function_type (V16QI_type_node, tree_cons (NULL_TREE, integer_type_node, @@ -4313,7 +4599,7 @@ altivec_init_builtins (void) def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX); def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX); def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL); - + /* Add the simple ternary operators. */ d = (struct builtin_description *) bdesc_3arg; for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++) @@ -4412,6 +4698,36 @@ altivec_init_builtins (void) for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++) def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code); + /* Initialize the predicates. */ + dp = (struct builtin_description_predicates *) bdesc_altivec_preds; + for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++) + { + enum machine_mode mode1; + tree type; + + mode1 = insn_data[dp->icode].operand[1].mode; + + switch (mode1) + { + case V4SImode: + type = int_ftype_int_v4si_v4si; + break; + case V8HImode: + type = int_ftype_int_v8hi_v8hi; + break; + case V16QImode: + type = int_ftype_int_v16qi_v16qi; + break; + case V4SFmode: + type = int_ftype_int_v4sf_v4sf; + break; + default: + abort (); + } + + def_builtin (dp->mask, dp->name, type, dp->code); + } + /* Add the simple binary operators. */ d = (struct builtin_description *) bdesc_2arg; for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++) @@ -4530,6 +4846,36 @@ altivec_init_builtins (void) def_builtin (d->mask, d->name, type, d->code); } + /* Initialize the abs* operators. */ + d = (struct builtin_description *) bdesc_abs; + for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++) + { + enum machine_mode mode0; + tree type; + + mode0 = insn_data[d->icode].operand[0].mode; + + switch (mode0) + { + case V4SImode: + type = v4si_ftype_v4si; + break; + case V8HImode: + type = v8hi_ftype_v8hi; + break; + case V16QImode: + type = v16qi_ftype_v16qi; + break; + case V4SFmode: + type = v4sf_ftype_v4sf; + break; + default: + abort (); + } + + def_builtin (d->mask, d->name, type, d->code); + } + /* Add the simple unary operators. */ d = (struct builtin_description *) bdesc_1arg; for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++) @@ -5358,7 +5704,7 @@ min_max_operator (op, mode) /* Return 1 if ANDOP is a mask that has no bits on that are not in the mask required to convert the result of a rotate insn into a shift - left insn of SHIFTOP bits. Both are known to be CONST_INT. */ + left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */ int includes_lshift_p (shiftop, andop) @@ -5369,7 +5715,7 @@ includes_lshift_p (shiftop, andop) shift_mask <<= INTVAL (shiftop); - return (INTVAL (andop) & ~shift_mask) == 0; + return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0; } /* Similar, but for right shift. */ @@ -5383,7 +5729,7 @@ includes_rshift_p (shiftop, andop) shift_mask >>= INTVAL (shiftop); - return (INTVAL (andop) & ~shift_mask) == 0; + return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0; } /* Return 1 if ANDOP is a mask suitable for use with an rldic insn @@ -5769,8 +6115,8 @@ rs6000_got_register (value) /* The second flow pass currently (June 1999) can't update regs_ever_live without disturbing other parts of the compiler, so update it here to make the prolog/epilogue code happy. */ - if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM]) - regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1; + if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]) + regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1; current_function_uses_pic_offset_table = 1; @@ -6033,7 +6379,7 @@ print_operand (file, x, code) case 'm': /* MB value for a mask operand. */ - if (! mask_operand (x, VOIDmode)) + if (! mask_operand (x, SImode)) output_operand_lossage ("invalid %%m value"); val = INT_LOWPART (x); @@ -6068,7 +6414,7 @@ print_operand (file, x, code) case 'M': /* ME value for a mask operand. */ - if (! mask_operand (x, VOIDmode)) + if (! mask_operand (x, SImode)) output_operand_lossage ("invalid %%M value"); val = INT_LOWPART (x); @@ -6198,7 +6544,7 @@ print_operand (file, x, code) /* PowerPC64 mask position. All 0's and all 1's are excluded. CONST_INT 32-bit mask is considered sign-extended so any transition must occur within the CONST_INT, not on the boundary. */ - if (! mask64_operand (x, VOIDmode)) + if (! mask64_operand (x, DImode)) output_operand_lossage ("invalid %%S value"); val = INT_LOWPART (x); @@ -6898,9 +7244,18 @@ rs6000_emit_cmove (dest, op, true_cond, false_cond) rtx op0 = rs6000_compare_op0; rtx op1 = rs6000_compare_op1; REAL_VALUE_TYPE c1; - enum machine_mode mode = GET_MODE (op0); + enum machine_mode compare_mode = GET_MODE (op0); + enum machine_mode result_mode = GET_MODE (dest); rtx temp; + /* These modes should always match. */ + if (GET_MODE (op1) != compare_mode) + return 0; + if (GET_MODE (true_cond) != result_mode) + return 0; + if (GET_MODE (false_cond) != result_mode) + return 0; + /* First, work out if the hardware can do this at all, or if it's too slow... */ /* If the comparison is an integer one, since we only have fsel @@ -6943,11 +7298,11 @@ rs6000_emit_cmove (dest, op, true_cond, false_cond) /* At this point we know we can use fsel. */ /* Reduce the comparison to a comparison against zero. */ - temp = gen_reg_rtx (mode); + temp = gen_reg_rtx (compare_mode); emit_insn (gen_rtx_SET (VOIDmode, temp, - gen_rtx_MINUS (mode, op0, op1))); + gen_rtx_MINUS (compare_mode, op0, op1))); op0 = temp; - op1 = CONST0_RTX (mode); + op1 = CONST0_RTX (compare_mode); /* If we don't care about NaNs we can reduce some of the comparisons down to faster ones. */ @@ -6977,52 +7332,52 @@ rs6000_emit_cmove (dest, op, true_cond, false_cond) break; case LE: - temp = gen_reg_rtx (mode); - emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0))); + temp = gen_reg_rtx (compare_mode); + emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0))); op0 = temp; break; case ORDERED: - temp = gen_reg_rtx (mode); - emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0))); + temp = gen_reg_rtx (compare_mode); + emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0))); op0 = temp; break; case EQ: - temp = gen_reg_rtx (mode); + temp = gen_reg_rtx (compare_mode); emit_insn (gen_rtx_SET (VOIDmode, temp, - gen_rtx_NEG (mode, - gen_rtx_ABS (mode, op0)))); + gen_rtx_NEG (compare_mode, + gen_rtx_ABS (compare_mode, op0)))); op0 = temp; break; case UNGE: - temp = gen_reg_rtx (mode); + temp = gen_reg_rtx (result_mode); emit_insn (gen_rtx_SET (VOIDmode, temp, - gen_rtx_IF_THEN_ELSE (mode, + gen_rtx_IF_THEN_ELSE (result_mode, gen_rtx_GE (VOIDmode, op0, op1), true_cond, false_cond))); false_cond = temp; true_cond = false_cond; - temp = gen_reg_rtx (mode); - emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0))); + temp = gen_reg_rtx (compare_mode); + emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0))); op0 = temp; break; case GT: - temp = gen_reg_rtx (mode); + temp = gen_reg_rtx (result_mode); emit_insn (gen_rtx_SET (VOIDmode, temp, - gen_rtx_IF_THEN_ELSE (mode, + gen_rtx_IF_THEN_ELSE (result_mode, gen_rtx_GE (VOIDmode, op0, op1), true_cond, false_cond))); true_cond = temp; false_cond = true_cond; - temp = gen_reg_rtx (mode); - emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0))); + temp = gen_reg_rtx (compare_mode); + emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0))); op0 = temp; break; @@ -7031,7 +7386,7 @@ rs6000_emit_cmove (dest, op, true_cond, false_cond) } emit_insn (gen_rtx_SET (VOIDmode, dest, - gen_rtx_IF_THEN_ELSE (GET_MODE (dest), + gen_rtx_IF_THEN_ELSE (result_mode, gen_rtx_GE (VOIDmode, op0, op1), true_cond, false_cond))); @@ -7074,7 +7429,7 @@ first_reg_to_save () for (first_reg = 13; first_reg <= 31; first_reg++) if (regs_ever_live[first_reg] && (! call_used_regs[first_reg] - || (first_reg == PIC_OFFSET_TABLE_REGNUM + || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1) || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))) break; @@ -7128,8 +7483,8 @@ first_reg_to_save () #if TARGET_MACHO if (flag_pic && current_function_uses_pic_offset_table && - (first_reg > PIC_OFFSET_TABLE_REGNUM)) - return PIC_OFFSET_TABLE_REGNUM; + (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)) + return RS6000_PIC_OFFSET_TABLE_REGNUM; #endif return first_reg; @@ -7341,13 +7696,13 @@ rs6000_stack_info () /* Calculate which registers need to be saved & save area size. */ info_ptr->first_gp_reg_save = first_reg_to_save (); - /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM, + /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM, even if it currently looks like we won't. */ if (((TARGET_TOC && TARGET_MINIMAL_TOC) || (flag_pic == 1 && abi == ABI_V4) || (flag_pic && abi == ABI_DARWIN)) - && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM) - info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM); + && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM) + info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM); else info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save); @@ -7533,8 +7888,7 @@ rs6000_stack_info () info_ptr->push_p = 1; else if (abi == ABI_V4) - info_ptr->push_p = (total_raw_size > info_ptr->fixed_size - || info_ptr->calls_p); + info_ptr->push_p = total_raw_size > info_ptr->fixed_size; else info_ptr->push_p = (frame_pointer_needed @@ -7768,7 +8122,7 @@ rs6000_emit_load_toc_table (fromprolog) int fromprolog; { rtx dest; - dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); + dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM); if (TARGET_ELF && DEFAULT_ABI != ABI_AIX) { @@ -8390,7 +8744,10 @@ rs6000_emit_prologue () /* Get VRSAVE onto a GPR. */ reg = gen_rtx_REG (SImode, 12); vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO); - emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave)); + if (TARGET_MACHO) + emit_insn (gen_get_vrsave_internal (reg)); + else + emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave)); /* Save VRSAVE. */ offset = info->vrsave_save_offset + sp_offset; @@ -8506,7 +8863,7 @@ rs6000_emit_prologue () for (i = 0; i < 32 - info->first_gp_reg_save; i++) if ((regs_ever_live[info->first_gp_reg_save+i] && ! call_used_regs[info->first_gp_reg_save+i]) - || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM + || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1) || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))) { @@ -8608,10 +8965,10 @@ rs6000_emit_prologue () RTX_FRAME_RELATED_P (insn) = 1; } - /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */ + /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */ if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0) || (DEFAULT_ABI == ABI_V4 && flag_pic == 1 - && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])) + && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])) { /* If emit_load_toc_table will use the link register, we need to save it. We use R11 for this purpose because emit_load_toc_table @@ -8639,7 +8996,7 @@ rs6000_emit_prologue () rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest))); rs6000_maybe_dead ( - emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM), + emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM), gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM))); } } @@ -8680,13 +9037,24 @@ rs6000_output_function_prologue (file, size) if (! HAVE_prologue) { start_sequence (); - + /* A NOTE_INSN_DELETED is supposed to be at the start and end of the "toplevel" insn chain. */ emit_note (0, NOTE_INSN_DELETED); rs6000_emit_prologue (); emit_note (0, NOTE_INSN_DELETED); - + + /* Expand INSN_ADDRESSES so final() doesn't crash. */ + { + rtx insn; + unsigned addr = 0; + for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn)) + { + INSN_ADDRESSES_NEW (insn, addr); + addr += 4; + } + } + if (TARGET_DEBUG_STACK) debug_rtx_list (get_insns (), 100); final (get_insns (), file, FALSE, FALSE); @@ -8881,7 +9249,7 @@ rs6000_emit_epilogue (sibcall) for (i = 0; i < 32 - info->first_gp_reg_save; i++) if ((regs_ever_live[info->first_gp_reg_save+i] && ! call_used_regs[info->first_gp_reg_save+i]) - || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM + || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1) || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))) { @@ -9074,6 +9442,17 @@ rs6000_output_function_epilogue (file, size) rs6000_emit_epilogue (FALSE); emit_note (0, NOTE_INSN_DELETED); + /* Expand INSN_ADDRESSES so final() doesn't crash. */ + { + rtx insn; + unsigned addr = 0; + for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn)) + { + INSN_ADDRESSES_NEW (insn, addr); + addr += 4; + } + } + if (TARGET_DEBUG_STACK) debug_rtx_list (get_insns (), 100); final (get_insns (), file, FALSE, FALSE); @@ -9291,6 +9670,8 @@ rs6000_output_function_epilogue (file, size) Only emit this if the alloca bit was set above. */ if (frame_pointer_needed) fputs ("\t.byte 31\n", file); + + fputs ("\t.align 2\n", file); } return; } @@ -9863,7 +10244,7 @@ output_toc (file, x, labelno, mode) if (TARGET_MINIMAL_TOC) fputs (DOUBLE_INT_ASM_OP, file); else - fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low); + fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low); fprintf (file, "0x%lx%08lx\n", (long) high, (long) low); return; } @@ -9875,7 +10256,7 @@ output_toc (file, x, labelno, mode) fputs ("\t.long ", file); else fprintf (file, "\t.tc ID_%lx_%lx[TC],", - (long)high, (long)low); + (long) high, (long) low); fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low); } else @@ -10079,8 +10460,6 @@ output_profile_hook (labelno) const char *label_name; rtx fun; - labelno += 1; - ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno); STRIP_NAME_ENCODING (label_name, ggc_strdup (buf)); fun = gen_rtx_SYMBOL_REF (Pmode, label_name); @@ -10463,6 +10842,11 @@ rs6000_select_rtx_section (mode, x) { if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode)) toc_section (); + else if (flag_pic + && (GET_CODE (x) == SYMBOL_REF + || GET_CODE (x) == LABEL_REF + || GET_CODE (x) == CONST)) + data_section (); else const_section (); } diff --git a/contrib/gcc/config/rs6000/rs6000.h b/contrib/gcc/config/rs6000/rs6000.h index 2deaf75..3e4c5f2 100644 --- a/contrib/gcc/config/rs6000/rs6000.h +++ b/contrib/gcc/config/rs6000/rs6000.h @@ -191,7 +191,7 @@ extern int target_flags; function, and one less allocable register. */ #define MASK_MINIMAL_TOC 0x00000200 -/* Nonzero for the 64bit model: ints, longs, and pointers are 64 bits. */ +/* Nonzero for the 64bit model: longs and pointers are 64 bits. */ #define MASK_64BIT 0x00000400 /* Disable use of FPRs. */ @@ -604,6 +604,9 @@ extern int rs6000_altivec_abi; #define LIBGCC2_LONG_DOUBLE_TYPE_SIZE 64 #endif +/* Work around rs6000_long_double_type_size dependency in ada/targtyps.c. */ +#define WIDEST_HARDWARE_FP_SIZE 64 + /* Width in bits of a pointer. See also the macro `Pmode' defined below. */ #define POINTER_SIZE (TARGET_32BIT ? 32 : 64) @@ -626,9 +629,6 @@ extern int rs6000_altivec_abi; #define LOCAL_ALIGNMENT(TYPE, ALIGN) \ ((TARGET_ALTIVEC && TREE_CODE (TYPE) == VECTOR_TYPE) ? 128 : ALIGN) -/* Handle #pragma pack. */ -#define HANDLE_PRAGMA_PACK 1 - /* Alignment of field after `int : 0' in a structure. */ #define EMPTY_FIELD_BOUNDARY 32 @@ -638,10 +638,13 @@ extern int rs6000_altivec_abi; /* A bitfield declared as `int' forces `int' alignment for the struct. */ #define PCC_BITFIELD_TYPE_MATTERS 1 -/* Make strings word-aligned so strcpy from constants will be faster. */ -#define CONSTANT_ALIGNMENT(EXP, ALIGN) \ - (TREE_CODE (EXP) == STRING_CST \ - && (ALIGN) < BITS_PER_WORD ? BITS_PER_WORD : (ALIGN)) +/* Make strings word-aligned so strcpy from constants will be faster. + Make vector constants quadword aligned. */ +#define CONSTANT_ALIGNMENT(EXP, ALIGN) \ + (TREE_CODE (EXP) == STRING_CST \ + && (ALIGN) < BITS_PER_WORD \ + ? BITS_PER_WORD \ + : (ALIGN)) /* Make arrays of chars word-aligned for the same reasons. Align vectors to 128 bits. */ @@ -759,7 +762,7 @@ extern int rs6000_altivec_abi; #define XER_REGNO 76 #define FIRST_ALTIVEC_REGNO 77 #define LAST_ALTIVEC_REGNO 108 -#define TOTAL_ALTIVEC_REGS (LAST_ALTIVEC_REGNO - FIRST_ALTIVEC_REGNO) +#define TOTAL_ALTIVEC_REGS (LAST_ALTIVEC_REGNO - FIRST_ALTIVEC_REGNO + 1) #define VRSAVE_REGNO 109 /* List the order in which to allocate registers. Each register must be @@ -957,18 +960,24 @@ extern int rs6000_altivec_abi; for (i = 32; i < 64; i++) \ fixed_regs[i] = call_used_regs[i] \ = call_really_used_regs[i] = 1; \ - if (DEFAULT_ABI == ABI_V4 && flag_pic == 1) \ - fixed_regs[PIC_OFFSET_TABLE_REGNUM] \ - = call_used_regs[PIC_OFFSET_TABLE_REGNUM] \ - = call_really_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \ - if (DEFAULT_ABI == ABI_DARWIN && flag_pic) \ - global_regs[PIC_OFFSET_TABLE_REGNUM] \ - = fixed_regs[PIC_OFFSET_TABLE_REGNUM] \ - = call_used_regs[PIC_OFFSET_TABLE_REGNUM] \ - = call_really_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \ + if (DEFAULT_ABI == ABI_V4 \ + && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM \ + && flag_pic == 1) \ + fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] \ + = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] \ + = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1; \ + if (DEFAULT_ABI == ABI_DARWIN \ + && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) \ + global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] \ + = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] \ + = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] \ + = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1; \ if (! TARGET_ALTIVEC) \ - for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i) \ - fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1; \ + { \ + for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i) \ + fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1; \ + call_really_used_regs[VRSAVE_REGNO] = 1; \ + } \ if (TARGET_ALTIVEC_ABI) \ for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i) \ call_used_regs[i] = call_really_used_regs[i] = 1; \ @@ -1199,14 +1208,14 @@ enum reg_class 'Q' means that is a memory operand that is just an offset from a reg. 'R' is for AIX TOC entries. 'S' is a constant that can be placed into a 64-bit mask operand - 'T' is a consatnt that can be placed into a 32-bit mask operand + 'T' is a constant that can be placed into a 32-bit mask operand 'U' is for V.4 small data references. */ #define EXTRA_CONSTRAINT(OP, C) \ ((C) == 'Q' ? GET_CODE (OP) == MEM && GET_CODE (XEXP (OP, 0)) == REG \ : (C) == 'R' ? LEGITIMATE_CONSTANT_POOL_ADDRESS_P (OP) \ - : (C) == 'S' ? mask64_operand (OP, VOIDmode) \ - : (C) == 'T' ? mask_operand (OP, VOIDmode) \ + : (C) == 'S' ? mask64_operand (OP, DImode) \ + : (C) == 'T' ? mask_operand (OP, SImode) \ : (C) == 'U' ? (DEFAULT_ABI == ABI_V4 \ && small_data_operand (OP, GET_MODE (OP))) \ : 0) @@ -1539,7 +1548,7 @@ typedef struct rs6000_stack { On RS/6000, these are r3-r10 and fp1-fp13. On AltiVec, v2 - v13 are used for passing vectors. */ #define FUNCTION_ARG_REGNO_P(N) \ - ((unsigned)(((N) - GP_ARG_MIN_REG) < (unsigned)(GP_ARG_NUM_REG)) \ + (((unsigned)((N) - GP_ARG_MIN_REG) < (unsigned)(GP_ARG_NUM_REG)) \ || (TARGET_ALTIVEC && \ (unsigned)((N) - ALTIVEC_ARG_MIN_REG) < (unsigned)(ALTIVEC_ARG_NUM_REG)) \ || ((unsigned)((N) - FP_ARG_MIN_REG) < (unsigned)(FP_ARG_NUM_REG))) @@ -1591,8 +1600,7 @@ typedef struct rs6000_args #define RS6000_ARG_SIZE(MODE, TYPE) \ ((MODE) != BLKmode \ ? (GET_MODE_SIZE (MODE) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD \ - : ((unsigned HOST_WIDE_INT) int_size_in_bytes (TYPE) \ - + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) + : (int_size_in_bytes (TYPE) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a function whose data type is FNTYPE. @@ -1711,6 +1719,14 @@ typedef struct rs6000_args #define EXPAND_BUILTIN_VA_ARG(valist, type) \ rs6000_va_arg (valist, type) +/* For AIX, the rule is that structures are passed left-aligned in + their stack slot. However, GCC does not presently do this: + structures which are the same size as integer types are passed + right-aligned, as if they were in fact integers. This only + matters for structures of size 1 or 2, or 4 when TARGET_64BIT. + ABI_V4 does not use std_expand_builtin_va_arg. */ +#define PAD_VARARGS_DOWN (TYPE_MODE (type) != BLKmode) + /* Define this macro to be a nonzero value if the location where a function argument is passed depends on whether or not it is a named argument. */ #define STRICT_ARGUMENT_NAMING 1 @@ -1736,7 +1752,7 @@ typedef struct rs6000_args #define EPILOGUE_USES(REGNO) \ ((reload_completed && (REGNO) == LINK_REGISTER_REGNUM) \ - || (REGNO) == VRSAVE_REGNO \ + || (TARGET_ALTIVEC && (REGNO) == VRSAVE_REGNO) \ || (current_function_calls_eh_return \ && TARGET_AIX \ && (REGNO) == TOC_REGISTER)) @@ -1968,7 +1984,8 @@ typedef struct rs6000_args && GET_CODE (XEXP (X, 0)) == REG \ && INT_REG_OK_FOR_BASE_P (XEXP (X, 0), (STRICT)) \ && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (X, 1), 0) \ - && (! ALTIVEC_VECTOR_MODE (MODE) || INTVAL (X) == 0) \ + && (! ALTIVEC_VECTOR_MODE (MODE) \ + || (GET_CODE (XEXP (X,1)) == CONST_INT && INTVAL (XEXP (X,1)) == 0)) \ && (((MODE) != DFmode && (MODE) != DImode) \ || (TARGET_32BIT \ ? LEGITIMATE_ADDRESS_INTEGER_P (XEXP (X, 1), 4) \ @@ -2086,7 +2103,8 @@ do { \ this macro is not defined, it is up to the machine-dependent files to allocate such a register (if necessary). */ -#define PIC_OFFSET_TABLE_REGNUM 30 +#define RS6000_PIC_OFFSET_TABLE_REGNUM 30 +#define PIC_OFFSET_TABLE_REGNUM (flag_pic ? RS6000_PIC_OFFSET_TABLE_REGNUM : INVALID_REGNUM) #define TOC_REGISTER (TARGET_MINIMAL_TOC ? 30 : 2) @@ -2415,43 +2433,71 @@ extern int toc_initialized; #define RS6000_WEAK 0 #endif -/* This implementes the `alias' attribute. */ -#define ASM_OUTPUT_DEF_FROM_DECLS(FILE,decl,target) \ -do { \ - const char * alias = XSTR (XEXP (DECL_RTL (decl), 0), 0); \ - char * name = IDENTIFIER_POINTER (target); \ - if (TREE_CODE (decl) == FUNCTION_DECL \ - && DEFAULT_ABI == ABI_AIX) \ - { \ - if (TREE_PUBLIC (decl)) \ - { \ - if (RS6000_WEAK && DECL_WEAK (decl)) \ - { \ - fputs ("\t.weak .", FILE); \ - assemble_name (FILE, alias); \ - putc ('\n', FILE); \ - } \ - else \ - { \ - fputs ("\t.globl .", FILE); \ - assemble_name (FILE, alias); \ - putc ('\n', FILE); \ - } \ - } \ - else \ - { \ - fputs ("\t.lglobl .", FILE); \ - assemble_name (FILE, alias); \ - putc ('\n', FILE); \ - } \ - fputs ("\t.set .", FILE); \ - assemble_name (FILE, alias); \ - fputs (",.", FILE); \ - assemble_name (FILE, name); \ - fputc ('\n', FILE); \ - } \ - ASM_OUTPUT_DEF (FILE, alias, name); \ -} while (0) +#if RS6000_WEAK +/* Used in lieu of ASM_WEAKEN_LABEL. */ +#define ASM_WEAKEN_DECL(FILE, DECL, NAME, VAL) \ + do \ + { \ + fputs ("\t.weak\t", (FILE)); \ + assemble_name ((FILE), (NAME)); \ + if ((DECL) && TREE_CODE (DECL) == FUNCTION_DECL \ + && DEFAULT_ABI == ABI_AIX) \ + { \ + fputs ("\n\t.weak\t.", (FILE)); \ + assemble_name ((FILE), (NAME)); \ + } \ + fputc ('\n', (FILE)); \ + if (VAL) \ + { \ + ASM_OUTPUT_DEF ((FILE), (NAME), (VAL)); \ + if ((DECL) && TREE_CODE (DECL) == FUNCTION_DECL \ + && DEFAULT_ABI == ABI_AIX) \ + { \ + fputs ("\t.set\t.", (FILE)); \ + assemble_name ((FILE), (NAME)); \ + fputs (",.", (FILE)); \ + assemble_name ((FILE), (VAL)); \ + fputc ('\n', (FILE)); \ + } \ + } \ + } \ + while (0) +#endif + +/* This implements the `alias' attribute. */ +#undef ASM_OUTPUT_DEF_FROM_DECLS +#define ASM_OUTPUT_DEF_FROM_DECLS(FILE, DECL, TARGET) \ + do \ + { \ + const char *alias = XSTR (XEXP (DECL_RTL (DECL), 0), 0); \ + const char *name = IDENTIFIER_POINTER (TARGET); \ + if (TREE_CODE (DECL) == FUNCTION_DECL \ + && DEFAULT_ABI == ABI_AIX) \ + { \ + if (TREE_PUBLIC (DECL)) \ + { \ + if (!RS6000_WEAK || !DECL_WEAK (DECL)) \ + { \ + fputs ("\t.globl\t.", FILE); \ + assemble_name (FILE, alias); \ + putc ('\n', FILE); \ + } \ + } \ + else if (TARGET_XCOFF) \ + { \ + fputs ("\t.lglobl\t.", FILE); \ + assemble_name (FILE, alias); \ + putc ('\n', FILE); \ + } \ + fputs ("\t.set\t.", FILE); \ + assemble_name (FILE, alias); \ + fputs (",.", FILE); \ + assemble_name (FILE, name); \ + fputc ('\n', FILE); \ + } \ + ASM_OUTPUT_DEF (FILE, alias, name); \ + } \ + while (0) /* Output to assembler file text saying following lines may contain character constants, extra white space, comments, etc. */ @@ -2706,6 +2752,10 @@ extern char rs6000_reg_names[][8]; /* register names (0 vs. %r0). */ /* Define the codes that are matched by predicates in rs6000.c. */ #define PREDICATE_CODES \ + {"any_operand", {CONST_INT, CONST_DOUBLE, CONST, SYMBOL_REF, \ + LABEL_REF, SUBREG, REG, MEM, PARALLEL}}, \ + {"zero_constant", {CONST_INT, CONST_DOUBLE, CONST, SYMBOL_REF, \ + LABEL_REF, SUBREG, REG, MEM}}, \ {"short_cint_operand", {CONST_INT}}, \ {"u_short_cint_operand", {CONST_INT}}, \ {"non_short_cint_operand", {CONST_INT}}, \ @@ -2715,6 +2765,7 @@ extern char rs6000_reg_names[][8]; /* register names (0 vs. %r0). */ {"cc_reg_not_cr0_operand", {SUBREG, REG}}, \ {"reg_or_short_operand", {SUBREG, REG, CONST_INT}}, \ {"reg_or_neg_short_operand", {SUBREG, REG, CONST_INT}}, \ + {"reg_or_aligned_short_operand", {SUBREG, REG, CONST_INT}}, \ {"reg_or_u_short_operand", {SUBREG, REG, CONST_INT}}, \ {"reg_or_cint_operand", {SUBREG, REG, CONST_INT}}, \ {"reg_or_arith_cint_operand", {SUBREG, REG, CONST_INT}}, \ @@ -2761,6 +2812,7 @@ extern char rs6000_reg_names[][8]; /* register names (0 vs. %r0). */ GT, LEU, LTU, GEU, GTU}}, \ {"boolean_operator", {AND, IOR, XOR}}, \ {"boolean_or_operator", {IOR, XOR}}, \ + {"altivec_register_operand", {REG}}, \ {"min_max_operator", {SMIN, SMAX, UMIN, UMAX}}, /* uncomment for disabling the corresponding default options */ @@ -2938,19 +2990,6 @@ enum rs6000_builtins ALTIVEC_BUILTIN_VUPKLSB, ALTIVEC_BUILTIN_VUPKLPX, ALTIVEC_BUILTIN_VUPKLSH, - ALTIVEC_BUILTIN_VCMPBFP_P, - ALTIVEC_BUILTIN_VCMPEQFP_P, - ALTIVEC_BUILTIN_VCMPEQUB_P, - ALTIVEC_BUILTIN_VCMPEQUH_P, - ALTIVEC_BUILTIN_VCMPEQUW_P, - ALTIVEC_BUILTIN_VCMPGEFP_P, - ALTIVEC_BUILTIN_VCMPGTFP_P, - ALTIVEC_BUILTIN_VCMPGTSB_P, - ALTIVEC_BUILTIN_VCMPGTSH_P, - ALTIVEC_BUILTIN_VCMPGTSW_P, - ALTIVEC_BUILTIN_VCMPGTUB_P, - ALTIVEC_BUILTIN_VCMPGTUH_P, - ALTIVEC_BUILTIN_VCMPGTUW_P, ALTIVEC_BUILTIN_MTVSCR, ALTIVEC_BUILTIN_MFVSCR, ALTIVEC_BUILTIN_DSSALL, @@ -2970,5 +3009,25 @@ enum rs6000_builtins ALTIVEC_BUILTIN_STVEBX, ALTIVEC_BUILTIN_STVEHX, ALTIVEC_BUILTIN_STVEWX, - ALTIVEC_BUILTIN_STVXL + ALTIVEC_BUILTIN_STVXL, + ALTIVEC_BUILTIN_VCMPBFP_P, + ALTIVEC_BUILTIN_VCMPEQFP_P, + ALTIVEC_BUILTIN_VCMPEQUB_P, + ALTIVEC_BUILTIN_VCMPEQUH_P, + ALTIVEC_BUILTIN_VCMPEQUW_P, + ALTIVEC_BUILTIN_VCMPGEFP_P, + ALTIVEC_BUILTIN_VCMPGTFP_P, + ALTIVEC_BUILTIN_VCMPGTSB_P, + ALTIVEC_BUILTIN_VCMPGTSH_P, + ALTIVEC_BUILTIN_VCMPGTSW_P, + ALTIVEC_BUILTIN_VCMPGTUB_P, + ALTIVEC_BUILTIN_VCMPGTUH_P, + ALTIVEC_BUILTIN_VCMPGTUW_P, + ALTIVEC_BUILTIN_ABSS_V4SI, + ALTIVEC_BUILTIN_ABSS_V8HI, + ALTIVEC_BUILTIN_ABSS_V16QI, + ALTIVEC_BUILTIN_ABS_V4SI, + ALTIVEC_BUILTIN_ABS_V4SF, + ALTIVEC_BUILTIN_ABS_V8HI, + ALTIVEC_BUILTIN_ABS_V16QI }; diff --git a/contrib/gcc/config/rs6000/rs6000.md b/contrib/gcc/config/rs6000/rs6000.md index 8fb45ff..93c6fc3 100644 --- a/contrib/gcc/config/rs6000/rs6000.md +++ b/contrib/gcc/config/rs6000/rs6000.md @@ -2982,7 +2982,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:SI 4 "boolean_operator" [(match_operand:SI 1 "gpc_reg_operand" "") (match_operand:SI 2 "gpc_reg_operand" "")]) @@ -3070,7 +3070,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:SI 4 "boolean_operator" [(not:SI (match_operand:SI 1 "gpc_reg_operand" "")) (match_operand:SI 2 "gpc_reg_operand" "")]) @@ -3136,7 +3136,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:SI 4 "boolean_operator" [(not:SI (match_operand:SI 1 "gpc_reg_operand" "")) (not:SI (match_operand:SI 2 "gpc_reg_operand" ""))]) @@ -7295,7 +7295,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:DI 4 "boolean_operator" [(match_operand:DI 1 "gpc_reg_operand" "") (match_operand:DI 2 "gpc_reg_operand" "")]) @@ -7395,7 +7395,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:DI 4 "boolean_operator" [(not:DI (match_operand:DI 1 "gpc_reg_operand" "")) (match_operand:DI 2 "gpc_reg_operand" "")]) @@ -7461,7 +7461,7 @@ (set_attr "length" "4,8")]) (define_split - [(set (match_operand:CC 3 "cc_reg_operand" "") + [(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "") (compare:CC (match_operator:DI 4 "boolean_operator" [(not:DI (match_operand:DI 1 "gpc_reg_operand" "")) (not:DI (match_operand:DI 2 "gpc_reg_operand" ""))]) @@ -7606,7 +7606,7 @@ operands2[0] = operands[0]; operands2[1] = operands[1]; operands2[2] = operands[2]; - operands2[3] = gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM); + operands2[3] = gen_rtx_REG (SImode, RS6000_PIC_OFFSET_TABLE_REGNUM); output_asm_insn (\"{l|lwz} %0,lo16(%2)(%1)\", operands); /* We cannot rely on ha16(low half)==ha16(high half), alas, although in practice it almost always is. */ @@ -9173,7 +9173,7 @@ (define_insn "*movdi_update1" [(set (match_operand:DI 3 "gpc_reg_operand" "=r,r") (mem:DI (plus:DI (match_operand:DI 1 "gpc_reg_operand" "0,0") - (match_operand:DI 2 "reg_or_short_operand" "r,I")))) + (match_operand:DI 2 "reg_or_aligned_short_operand" "r,I")))) (set (match_operand:DI 0 "gpc_reg_operand" "=b,b") (plus:DI (match_dup 1) (match_dup 2)))] "TARGET_POWERPC64 && TARGET_UPDATE" @@ -9195,7 +9195,7 @@ (define_insn "movdi_update" [(set (mem:DI (plus:DI (match_operand:DI 1 "gpc_reg_operand" "0,0") - (match_operand:DI 2 "reg_or_short_operand" "r,I"))) + (match_operand:DI 2 "reg_or_aligned_short_operand" "r,I"))) (match_operand:DI 3 "gpc_reg_operand" "r,r")) (set (match_operand:DI 0 "gpc_reg_operand" "=b,b") (plus:DI (match_dup 1) (match_dup 2)))] @@ -9544,7 +9544,7 @@ ;; Code to initialize the TOC register... (define_insn "load_toc_aix_si" - [(parallel [(set (match_operand:SI 0 "register_operand" "=r") + [(parallel [(set (match_operand:SI 0 "gpc_reg_operand" "=r") (unspec:SI [(const_int 0)] 7)) (use (reg:SI 2))])] "DEFAULT_ABI == ABI_AIX && TARGET_32BIT" @@ -9559,14 +9559,19 @@ [(set_attr "type" "load")]) (define_insn "load_toc_aix_di" - [(parallel [(set (match_operand:DI 0 "register_operand" "=r") + [(parallel [(set (match_operand:DI 0 "gpc_reg_operand" "=r") (unspec:DI [(const_int 0)] 7)) (use (reg:DI 2))])] "DEFAULT_ABI == ABI_AIX && TARGET_64BIT" "* { char buf[30]; +#ifdef TARGET_RELOCATABLE + ASM_GENERATE_INTERNAL_LABEL (buf, \"LCTOC\", + !TARGET_MINIMAL_TOC || TARGET_RELOCATABLE); +#else ASM_GENERATE_INTERNAL_LABEL (buf, \"LCTOC\", 1); +#endif if (TARGET_ELF) strcat (buf, \"@toc\"); operands[1] = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)); @@ -10841,18 +10846,17 @@ (match_dup 4)))]) (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r,r,r,r,r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r,&r,&r,&r,&r") (plus:SI (eq:SI (match_operand:SI 1 "gpc_reg_operand" "%r,r,r,r,r") (match_operand:SI 2 "reg_or_cint_operand" "r,O,K,L,I")) - (match_operand:SI 3 "gpc_reg_operand" "r,r,r,r,r"))) - (clobber (match_scratch:SI 4 "=&r,&r,&r,&r,&r"))] + (match_operand:SI 3 "gpc_reg_operand" "r,r,r,r,r")))] "! TARGET_POWERPC64" "@ - xor %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze|addze} %0,%3 - {sfi|subfic} %4,%1,0\;{aze|addze} %0,%3 - {xoril|xori} %4,%1,%b2\;{sfi|subfic} %4,%4,0\;{aze|addze} %0,%3 - {xoriu|xoris} %4,%1,%u2\;{sfi|subfic} %4,%4,0\;{aze|addze} %0,%3 - {sfi|subfic} %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze|addze} %0,%3" + xor %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3 + {sfi|subfic} %0,%1,0\;{aze|addze} %0,%3 + {xoril|xori} %0,%1,%b2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3 + {xoriu|xoris} %0,%1,%u2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3 + {sfi|subfic} %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze|addze} %0,%3" [(set_attr "length" "12,8,12,12,12")]) (define_insn "" @@ -10899,23 +10903,22 @@ "") (define_insn "" - [(set (match_operand:CC 5 "cc_reg_operand" "=x,x,x,x,x,?y,?y,?y,?y,?y") + [(set (match_operand:CC 4 "cc_reg_operand" "=x,x,x,x,x,?y,?y,?y,?y,?y") (compare:CC (plus:SI (eq:SI (match_operand:SI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r,r,r") (match_operand:SI 2 "reg_or_cint_operand" "r,O,K,L,I,r,O,K,L,I")) (match_operand:SI 3 "gpc_reg_operand" "r,r,r,r,r,r,r,r,r,r")) (const_int 0))) - (set (match_operand:SI 0 "gpc_reg_operand" "=r,r,r,r,r,r,r,r,r,r") - (plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3))) - (clobber (match_scratch:SI 4 "=&r,&r,&r,&r,&r,&r,&r,&r,&r,&r"))] + (set (match_operand:SI 0 "gpc_reg_operand" "=&r,&r,&r,&r,&r,&r,&r,&r,&r,&r") + (plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3)))] "! TARGET_POWERPC64" "@ - xor %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %0,%3 - {sfi|subfic} %4,%1,0\;{aze.|addze.} %0,%3 - {xoril|xori} %4,%1,%b2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %0,%3 - {xoriu|xoris} %4,%1,%u2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %0,%3 - {sfi|subfic} %4,%1,%2\;{sfi|subfic} %4,%4,0\;{aze.|addze.} %0,%3 + xor %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3 + {sfi|subfic} %0,%1,0\;{aze.|addze.} %0,%3 + {xoril|xori} %0,%1,%b2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3 + {xoriu|xoris} %0,%1,%u2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3 + {sfi|subfic} %0,%1,%2\;{sfi|subfic} %0,%0,0\;{aze.|addze.} %0,%3 # # # @@ -10925,7 +10928,7 @@ (set_attr "length" "12,8,12,12,12,16,12,16,16,16")]) (define_split - [(set (match_operand:CC 5 "cc_reg_not_cr0_operand" "") + [(set (match_operand:CC 4 "cc_reg_not_cr0_operand" "") (compare:CC (plus:SI (eq:SI (match_operand:SI 1 "gpc_reg_operand" "") @@ -10933,13 +10936,11 @@ (match_operand:SI 3 "gpc_reg_operand" "")) (const_int 0))) (set (match_operand:SI 0 "gpc_reg_operand" "") - (plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3))) - (clobber (match_scratch:SI 4 ""))] + (plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3)))] "! TARGET_POWERPC64 && reload_completed" - [(parallel [(set (match_dup 0) + [(set (match_dup 0) (plus:SI (eq:SI (match_dup 1) (match_dup 2)) (match_dup 3))) - (clobber (match_dup 4))]) - (set (match_dup 5) + (set (match_dup 4) (compare:CC (match_dup 0) (const_int 0)))] "") @@ -11008,7 +11009,8 @@ (const_int 31)) (match_operand:SI 2 "gpc_reg_operand" "r,r")) (const_int 0))) - (clobber (match_scratch:SI 3 "=&r,&r"))] + (clobber (match_scratch:SI 3 "=&r,&r")) + (clobber (match_scratch:SI 4 "=X,&r"))] "! TARGET_POWERPC64" "@ {ai|addic} %3,%1,-1\;{aze.|addze.} %3,%2 @@ -11024,12 +11026,14 @@ (const_int 31)) (match_operand:SI 2 "gpc_reg_operand" "")) (const_int 0))) - (clobber (match_scratch:SI 3 ""))] + (clobber (match_scratch:SI 3 "")) + (clobber (match_scratch:SI 4 ""))] "! TARGET_POWERPC64 && reload_completed" - [(set (match_dup 3) - (plus:SI (lshiftrt:SI (neg:SI (abs:SI (match_dup 1))) - (const_int 31)) - (match_dup 2))) + [(parallel [(set (match_dup 3) + (plus:SI (lshiftrt:SI (neg:SI (abs:SI (match_dup 1))) + (const_int 31)) + (match_dup 2))) + (clobber (match_dup 4))]) (set (match_dup 0) (compare:CC (match_dup 3) (const_int 0)))] @@ -11387,13 +11391,12 @@ (set_attr "length" "12,16")]) (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r") (plus:SI (leu:SI (match_operand:SI 1 "gpc_reg_operand" "r") (match_operand:SI 2 "reg_or_short_operand" "rI")) - (match_operand:SI 3 "gpc_reg_operand" "r"))) - (clobber (match_scratch:SI 4 "=&r"))] + (match_operand:SI 3 "gpc_reg_operand" "r")))] "! TARGET_POWERPC64" - "{sf%I2|subf%I2c} %4,%1,%2\;{aze|addze} %0,%3" + "{sf%I2|subf%I2c} %0,%1,%2\;{aze|addze} %0,%3" [(set_attr "length" "8")]) (define_insn "" @@ -11725,15 +11728,14 @@ "") (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r,r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r,&r") (plus:SI (ltu:SI (match_operand:SI 1 "gpc_reg_operand" "r,r") (match_operand:SI 2 "reg_or_neg_short_operand" "r,P")) - (match_operand:SI 3 "reg_or_short_operand" "rI,rI"))) - (clobber (match_scratch:SI 4 "=&r,&r"))] + (match_operand:SI 3 "reg_or_short_operand" "rI,rI")))] "! TARGET_POWERPC64" "@ - {sf|subfc} %4,%2,%1\;{sfe|subfe} %4,%4,%4\;{sf%I3|subf%I3c} %0,%4,%3 - {ai|addic} %4,%1,%n2\;{sfe|subfe} %4,%4,%4\;{sf%I3|subf%I3c} %0,%4,%3" + {sf|subfc} %0,%2,%1\;{sfe|subfe} %0,%0,%0\;{sf%I3|subf%I3c} %0,%0,%3 + {ai|addic} %0,%1,%n2\;{sfe|subfe} %0,%0,%0\;{sf%I3|subf%I3c} %0,%0,%3" [(set_attr "length" "12")]) (define_insn "" @@ -12034,15 +12036,14 @@ "") (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r,r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r,&r") (plus:SI (geu:SI (match_operand:SI 1 "gpc_reg_operand" "r,r") (match_operand:SI 2 "reg_or_neg_short_operand" "r,P")) - (match_operand:SI 3 "gpc_reg_operand" "r,r"))) - (clobber (match_scratch:SI 4 "=&r,&r"))] + (match_operand:SI 3 "gpc_reg_operand" "r,r")))] "! TARGET_POWERPC64" "@ - {sf|subfc} %4,%2,%1\;{aze|addze} %0,%3 - {ai|addic} %4,%1,%n2\;{aze|addze} %0,%3" + {sf|subfc} %0,%2,%1\;{aze|addze} %0,%3 + {ai|addic} %0,%1,%n2\;{aze|addze} %0,%3" [(set_attr "length" "8")]) (define_insn "" @@ -12335,13 +12336,12 @@ "") (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r") (plus:SI (gt:SI (match_operand:SI 1 "gpc_reg_operand" "r") (const_int 0)) - (match_operand:SI 2 "gpc_reg_operand" "r"))) - (clobber (match_scratch:SI 3 "=&r"))] + (match_operand:SI 2 "gpc_reg_operand" "r")))] "! TARGET_POWERPC64" - "{a|addc} %3,%1,%1\;{sfe|subfe} %3,%1,%3\;{aze|addze} %0,%2" + "{a|addc} %0,%1,%1\;{sfe|subfe} %0,%1,%0\;{aze|addze} %0,%2" [(set_attr "length" "12")]) (define_insn "" @@ -12671,15 +12671,14 @@ "") (define_insn "" - [(set (match_operand:SI 0 "gpc_reg_operand" "=r,r") + [(set (match_operand:SI 0 "gpc_reg_operand" "=&r,&r") (plus:SI (gtu:SI (match_operand:SI 1 "gpc_reg_operand" "r,r") (match_operand:SI 2 "reg_or_short_operand" "I,rI")) - (match_operand:SI 3 "reg_or_short_operand" "r,rI"))) - (clobber (match_scratch:SI 4 "=&r,&r"))] + (match_operand:SI 3 "reg_or_short_operand" "r,rI")))] "! TARGET_POWERPC64" "@ - {ai|addic} %4,%1,%k2\;{aze|addze} %0,%3 - {sf%I2|subf%I2c} %4,%1,%2\;{sfe|subfe} %4,%4,%4\;{sf%I3|subf%I3c} %0,%4,%3" + {ai|addic} %0,%1,%k2\;{aze|addze} %0,%3 + {sf%I2|subf%I2c} %0,%1,%2\;{sfe|subfe} %0,%0,%0\;{sf%I3|subf%I3c} %0,%0,%3" [(set_attr "length" "8,12")]) (define_insn "" @@ -13256,15 +13255,15 @@ (define_insn "*ctrdi_internal1" [(set (pc) - (if_then_else (ne (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (ne (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 1)) (label_ref (match_operand 0 "" "")) (pc))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&x")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64" "* { @@ -13276,19 +13275,19 @@ return \"bdz $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) (define_insn "*ctrdi_internal2" [(set (pc) - (if_then_else (ne (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (ne (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 1)) (pc) (label_ref (match_operand 0 "" "")))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&x")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64" "* { @@ -13300,7 +13299,7 @@ return \"{bdn|bdnz} $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) ;; Similar, but we can use GE since we have a REG_NONNEG. @@ -13354,15 +13353,15 @@ (define_insn "*ctrdi_internal3" [(set (pc) - (if_then_else (ge (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (ge (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 0)) (label_ref (match_operand 0 "" "")) (pc))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&X")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64 && find_reg_note (insn, REG_NONNEG, 0)" "* { @@ -13374,19 +13373,19 @@ return \"bdz $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) (define_insn "*ctrdi_internal4" [(set (pc) - (if_then_else (ge (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (ge (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 0)) (pc) (label_ref (match_operand 0 "" "")))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&X")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64 && find_reg_note (insn, REG_NONNEG, 0)" "* { @@ -13398,7 +13397,7 @@ return \"{bdn|bdnz} $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) ;; Similar but use EQ @@ -13452,15 +13451,15 @@ (define_insn "*ctrdi_internal5" [(set (pc) - (if_then_else (eq (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (eq (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 1)) (label_ref (match_operand 0 "" "")) (pc))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&x")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64" "* { @@ -13472,19 +13471,19 @@ return \"{bdn|bdnz} $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) (define_insn "*ctrdi_internal6" [(set (pc) - (if_then_else (eq (match_operand:DI 1 "register_operand" "c,*r,*r") + (if_then_else (eq (match_operand:DI 1 "register_operand" "c,*r,*r,!*f") (const_int 1)) (pc) (label_ref (match_operand 0 "" "")))) - (set (match_operand:DI 2 "register_operand" "=1,*r,m*q*c*l") + (set (match_operand:DI 2 "register_operand" "=1,*r,m*c*l,!*f") (plus:DI (match_dup 1) (const_int -1))) - (clobber (match_scratch:CC 3 "=X,&x,&x")) - (clobber (match_scratch:DI 4 "=X,X,r"))] + (clobber (match_scratch:CC 3 "=X,&x,&x,&x")) + (clobber (match_scratch:DI 4 "=X,X,r,r"))] "TARGET_POWERPC64" "* { @@ -13496,7 +13495,7 @@ return \"bdz $+8\;b %l0\"; }" [(set_attr "type" "branch") - (set_attr "length" "*,12,16")]) + (set_attr "length" "*,12,16,24")]) ;; Now the splitters if we could not allocate the CTR register @@ -13567,7 +13566,7 @@ (const_int -1))) (clobber (match_scratch:CC 3 "")) (clobber (match_scratch:DI 4 ""))] - "TARGET_POWERPC64 && reload_completed" + "TARGET_POWERPC64 && reload_completed && INT_REGNO_P (REGNO (operands[0]))" [(parallel [(set (match_dup 3) (compare:CC (plus:DI (match_dup 1) (const_int -1)) @@ -13610,6 +13609,45 @@ " { operands[7] = gen_rtx (GET_CODE (operands[2]), VOIDmode, operands[3], const0_rtx); }") + +(define_split + [(set (pc) + (if_then_else (match_operator 2 "comparison_operator" + [(match_operand:DI 1 "gpc_reg_operand" "") + (const_int 1)]) + (match_operand 5 "" "") + (match_operand 6 "" ""))) + (set (match_operand:DI 0 "gpc_reg_operand" "") + (plus:DI (match_dup 1) + (const_int -1))) + (clobber (match_scratch:CC 3 "")) + (clobber (match_scratch:DI 4 ""))] + "TARGET_POWERPC64 && reload_completed && FP_REGNO_P (REGNO (operands[0]))" + [(set (match_dup 8) + (match_dup 1)) + (set (match_dup 4) + (match_dup 8)) + (parallel [(set (match_dup 3) + (compare:CC (plus:DI (match_dup 4) + (const_int -1)) + (const_int 0))) + (set (match_dup 4) + (plus:DI (match_dup 4) + (const_int -1)))]) + (set (match_dup 8) + (match_dup 4)) + (set (match_dup 0) + (match_dup 8)) + (set (pc) (if_then_else (match_dup 7) + (match_dup 5) + (match_dup 6)))] + " +{ + operands[7] = gen_rtx (GET_CODE (operands[2]), VOIDmode, operands[3], + const0_rtx); + operands[8] = assign_stack_temp (DImode, GET_MODE_SIZE (DImode), 0); +}") + (define_insn "trap" [(trap_if (const_int 1) (const_int 0))] @@ -13871,28 +13909,28 @@ ;; Generic LVX load instruction. (define_insn "altivec_lvx_4si" - [(set (match_operand:V4SI 0 "register_operand" "=v") + [(set (match_operand:V4SI 0 "altivec_register_operand" "=v") (match_operand:V4SI 1 "memory_operand" "m"))] "TARGET_ALTIVEC" "lvx %0,%y1" [(set_attr "type" "vecload")]) (define_insn "altivec_lvx_8hi" - [(set (match_operand:V8HI 0 "register_operand" "=v") + [(set (match_operand:V8HI 0 "altivec_register_operand" "=v") (match_operand:V8HI 1 "memory_operand" "m"))] "TARGET_ALTIVEC" "lvx %0,%y1" [(set_attr "type" "vecload")]) (define_insn "altivec_lvx_16qi" - [(set (match_operand:V16QI 0 "register_operand" "=v") + [(set (match_operand:V16QI 0 "altivec_register_operand" "=v") (match_operand:V16QI 1 "memory_operand" "m"))] "TARGET_ALTIVEC" "lvx %0,%y1" [(set_attr "type" "vecload")]) (define_insn "altivec_lvx_4sf" - [(set (match_operand:V4SF 0 "register_operand" "=v") + [(set (match_operand:V4SF 0 "altivec_register_operand" "=v") (match_operand:V4SF 1 "memory_operand" "m"))] "TARGET_ALTIVEC" "lvx %0,%y1" @@ -13901,28 +13939,28 @@ ;; Generic STVX store instruction. (define_insn "altivec_stvx_4si" [(set (match_operand:V4SI 0 "memory_operand" "=m") - (match_operand:V4SI 1 "register_operand" "v"))] + (match_operand:V4SI 1 "altivec_register_operand" "v"))] "TARGET_ALTIVEC" "stvx %1,%y0" [(set_attr "type" "vecstore")]) (define_insn "altivec_stvx_8hi" [(set (match_operand:V8HI 0 "memory_operand" "=m") - (match_operand:V8HI 1 "register_operand" "v"))] + (match_operand:V8HI 1 "altivec_register_operand" "v"))] "TARGET_ALTIVEC" "stvx %1,%y0" [(set_attr "type" "vecstore")]) (define_insn "altivec_stvx_16qi" [(set (match_operand:V16QI 0 "memory_operand" "=m") - (match_operand:V16QI 1 "register_operand" "v"))] + (match_operand:V16QI 1 "altivec_register_operand" "v"))] "TARGET_ALTIVEC" "stvx %1,%y0" [(set_attr "type" "vecstore")]) (define_insn "altivec_stvx_4sf" [(set (match_operand:V4SF 0 "memory_operand" "=m") - (match_operand:V4SF 1 "register_operand" "v"))] + (match_operand:V4SF 1 "altivec_register_operand" "v"))] "TARGET_ALTIVEC" "stvx %1,%y0" [(set_attr "type" "vecstore")]) @@ -13935,14 +13973,18 @@ "{ rs6000_emit_move (operands[0], operands[1], V4SImode); DONE; }") (define_insn "*movv4si_internal" - [(set (match_operand:V4SI 0 "nonimmediate_operand" "=m,v,v") - (match_operand:V4SI 1 "input_operand" "v,m,v"))] + [(set (match_operand:V4SI 0 "nonimmediate_operand" "=m,v,v,o,r,r") + (match_operand:V4SI 1 "input_operand" "v,m,v,r,o,r"))] "TARGET_ALTIVEC" "@ stvx %1,%y0 lvx %0,%y1 - vor %0,%1,%1" - [(set_attr "type" "altivec")]) + vor %0,%1,%1 + stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0 + lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1 + mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1" + [(set_attr "type" "altivec") + (set_attr "length" "*,*,*,16,16,16")]) (define_expand "movv8hi" [(set (match_operand:V8HI 0 "nonimmediate_operand" "") @@ -13951,14 +13993,18 @@ "{ rs6000_emit_move (operands[0], operands[1], V8HImode); DONE; }") (define_insn "*movv8hi_internal1" - [(set (match_operand:V8HI 0 "nonimmediate_operand" "=m,v,v") - (match_operand:V8HI 1 "input_operand" "v,m,v"))] + [(set (match_operand:V8HI 0 "nonimmediate_operand" "=m,v,v,o,r,r") + (match_operand:V8HI 1 "input_operand" "v,m,v,r,o,r"))] "TARGET_ALTIVEC" "@ stvx %1,%y0 lvx %0,%y1 - vor %0,%1,%1" - [(set_attr "type" "altivec")]) + vor %0,%1,%1 + stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0 + lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1 + mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1" + [(set_attr "type" "altivec") + (set_attr "length" "*,*,*,16,16,16")]) (define_expand "movv16qi" [(set (match_operand:V16QI 0 "nonimmediate_operand" "") @@ -13967,14 +14013,18 @@ "{ rs6000_emit_move (operands[0], operands[1], V16QImode); DONE; }") (define_insn "*movv16qi_internal1" - [(set (match_operand:V16QI 0 "nonimmediate_operand" "=m,v,v") - (match_operand:V16QI 1 "input_operand" "v,m,v"))] + [(set (match_operand:V16QI 0 "nonimmediate_operand" "=m,v,v,o,r,r") + (match_operand:V16QI 1 "input_operand" "v,m,v,r,o,r"))] "TARGET_ALTIVEC" "@ stvx %1,%y0 lvx %0,%y1 - vor %0,%1,%1" - [(set_attr "type" "altivec")]) + vor %0,%1,%1 + stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0 + lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1 + mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1" + [(set_attr "type" "altivec") + (set_attr "length" "*,*,*,16,16,16")]) (define_expand "movv4sf" [(set (match_operand:V4SF 0 "nonimmediate_operand" "") @@ -13983,13 +14033,30 @@ "{ rs6000_emit_move (operands[0], operands[1], V4SFmode); DONE; }") (define_insn "*movv4sf_internal1" - [(set (match_operand:V4SF 0 "nonimmediate_operand" "=m,v,v") - (match_operand:V4SF 1 "input_operand" "v,m,v"))] + [(set (match_operand:V4SF 0 "nonimmediate_operand" "=m,v,v,o,r,r") + (match_operand:V4SF 1 "input_operand" "v,m,v,r,o,r"))] "TARGET_ALTIVEC" "@ stvx %1,%y0 lvx %0,%y1 - vor %0,%1,%1" + vor %0,%1,%1 + stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0 + lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1 + mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1" + [(set_attr "type" "altivec") + (set_attr "length" "*,*,*,16,16,16")]) + +(define_insn "get_vrsave_internal" + [(set (match_operand:SI 0 "register_operand" "=r") + (unspec:SI [(reg:SI 109)] 214))] + "TARGET_ALTIVEC" + "* +{ + if (TARGET_MACHO) + return \"mtspr 256,%0\"; + else + return \"mtvrsave %0\"; +}" [(set_attr "type" "altivec")]) (define_insn "*set_vrsave_internal" @@ -13998,9 +14065,45 @@ (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r") (reg:SI 109)] 30))])] "TARGET_ALTIVEC" - "mtvrsave %1" + "* +{ + if (TARGET_MACHO) + return \"mfspr %1,256\"; + else + return \"mtvrsave %1\"; +}" [(set_attr "type" "altivec")]) +;; Vector clears +(define_insn "*movv4si_const0" + [(set (match_operand:V4SI 0 "altivec_register_operand" "=v") + (match_operand:V4SI 1 "zero_constant" ""))] + "TARGET_ALTIVEC" + "vxor %0,%0,%0" + [(set_attr "type" "vecsimple")]) + +(define_insn "*movv4sf_const0" + [(set (match_operand:V4SF 0 "altivec_register_operand" "=v") + (match_operand:V4SF 1 "zero_constant" ""))] + + "TARGET_ALTIVEC" + "vxor %0,%0,%0" + [(set_attr "type" "vecsimple")]) + +(define_insn "*movv8hi_const0" + [(set (match_operand:V8HI 0 "altivec_register_operand" "=v") + (match_operand:V8HI 1 "zero_constant" ""))] + "TARGET_ALTIVEC" + "vxor %0,%0,%0" + [(set_attr "type" "vecsimple")]) + +(define_insn "*movv16qi_const0" + [(set (match_operand:V16QI 0 "altivec_register_operand" "=v") + (match_operand:V16QI 1 "zero_constant" ""))] + "TARGET_ALTIVEC" + "vxor %0,%0,%0" + [(set_attr "type" "vecsimple")]) + ;; Simple binary operations. (define_insn "addv16qi3" @@ -15033,6 +15136,7 @@ "TARGET_ALTIVEC" "vspltb %0,%1,%2" [(set_attr "type" "vecperm")]) + (define_insn "altivec_vsplth" [(set (match_operand:V8HI 0 "register_operand" "=v") (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") @@ -15318,110 +15422,79 @@ ;; AltiVec predicates. -(define_insn "altivec_vcmpequb_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 173))] - "TARGET_ALTIVEC" - "vcmpequb. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpequh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 174))] +(define_expand "cr6_test_for_zero" + [(set (match_operand:SI 0 "register_operand" "=r") + (eq:SI (reg:CC 74) + (const_int 0)))] "TARGET_ALTIVEC" - "vcmpequh. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpequw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 175))] +(define_expand "cr6_test_for_zero_reverse" + [(set (match_operand:SI 0 "register_operand" "=r") + (eq:SI (reg:CC 74) + (const_int 0))) + (set (match_dup 0) (minus:SI (const_int 1) (match_dup 0)))] "TARGET_ALTIVEC" - "vcmpequw. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpeqfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 176))] +(define_expand "cr6_test_for_lt" + [(set (match_operand:SI 0 "register_operand" "=r") + (lt:SI (reg:CC 74) + (const_int 0)))] "TARGET_ALTIVEC" - "vcmpeqfp. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtub_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 177))] +(define_expand "cr6_test_for_lt_reverse" + [(set (match_operand:SI 0 "register_operand" "=r") + (lt:SI (reg:CC 74) + (const_int 0))) + (set (match_dup 0) (minus:SI (const_int 1) (match_dup 0)))] "TARGET_ALTIVEC" - "vcmpgtub. %0,%1,%2" -[(set_attr "type" "veccmp")]) + "") -(define_insn "altivec_vcmpgtsb_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 178))] - "TARGET_ALTIVEC" - "vcmpgtsb. %0,%1,%2" -[(set_attr "type" "veccmp")]) +;; We can get away with generating the opcode on the fly (%3 below) +;; because all the predicates have the same scheduling parameters. -(define_insn "altivec_vcmpgtuw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 179))] +(define_insn "altivec_predicate_v4si" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 173)) + (clobber (match_scratch:V4SI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgtuw. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgtsw_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 180))] +(define_insn "altivec_predicate_v4sf" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 174)) + (clobber (match_scratch:V4SF 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgtsw. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgefp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 181))] +(define_insn "altivec_predicate_v8hi" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 175)) + (clobber (match_scratch:V8HI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgefp. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpgtfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 182))] +(define_insn "altivec_predicate_v16qi" + [(set (reg:CC 74) + (unspec:CC [(match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v") + (match_operand 3 "any_operand" "")] 175)) + (clobber (match_scratch:V16QI 0 "=v"))] "TARGET_ALTIVEC" - "vcmpgtfp. %0,%1,%2" + "%3 %0,%1,%2" [(set_attr "type" "veccmp")]) -(define_insn "altivec_vcmpbfp_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 183))] - "TARGET_ALTIVEC" - "vcmpbfp. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpgtuh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 184))] - "TARGET_ALTIVEC" - "vcmpgtuh. %0,%1,%2" -[(set_attr "type" "veccmp")]) - -(define_insn "altivec_vcmpgtsh_p" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 185))] - "TARGET_ALTIVEC" - "vcmpgtsh. %0,%1,%2" - [(set_attr "type" "veccmp")]) - (define_insn "altivec_mtvscr" [(unspec [(match_operand:V4SI 0 "register_operand" "v")] 186)] "TARGET_ALTIVEC" @@ -15495,49 +15568,61 @@ "lvsr %0,%1,%2" [(set_attr "type" "vecload")]) +;; Parallel some of the LVE* and STV*'s with unspecs because some have +;; identical rtl but different instructions-- and gcc gets confused. + (define_insn "altivec_lvebx" - [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:SI 1 "register_operand" "b") - (match_operand:SI 2 "register_operand" "r")] 196))] + [(parallel + [(set (match_operand:V16QI 0 "register_operand" "=v") + (mem:V16QI (plus:SI (match_operand:SI 1 "register_operand" "b") + (match_operand:SI 2 "register_operand" "r")))) + (unspec [(const_int 0)] 196)])] "TARGET_ALTIVEC" "lvebx %0,%1,%2" [(set_attr "type" "vecload")]) (define_insn "altivec_lvehx" - [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:SI 1 "register_operand" "b") - (match_operand:SI 2 "register_operand" "r")] 197))] + [(parallel + [(set (match_operand:V8HI 0 "register_operand" "=v") + (mem:V8HI + (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b") + (match_operand:SI 2 "register_operand" "r")) + (const_int -2)))) + (unspec [(const_int 0)] 197)])] "TARGET_ALTIVEC" "lvehx %0,%1,%2" [(set_attr "type" "vecload")]) (define_insn "altivec_lvewx" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:SI 1 "register_operand" "b") - (match_operand:SI 2 "register_operand" "r")] 198))] + [(parallel + [(set (match_operand:V4SI 0 "register_operand" "=v") + (mem:V4SI + (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b") + (match_operand:SI 2 "register_operand" "r")) + (const_int -4)))) + (unspec [(const_int 0)] 198)])] "TARGET_ALTIVEC" "lvewx %0,%1,%2" [(set_attr "type" "vecload")]) (define_insn "altivec_lvxl" - [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:SI 1 "register_operand" "b") - (match_operand:SI 2 "register_operand" "r")] 199))] + [(parallel + [(set (match_operand:V4SI 0 "register_operand" "=v") + (mem:V4SI (plus:SI (match_operand:SI 1 "register_operand" "b") + (match_operand:SI 2 "register_operand" "r")))) + (unspec [(const_int 0)] 213)])] "TARGET_ALTIVEC" "lvxl %0,%1,%2" [(set_attr "type" "vecload")]) (define_insn "altivec_lvx" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:SI 1 "register_operand" "b") - (match_operand:SI 2 "register_operand" "r")] 200))] + (mem:V4SI (plus:SI (match_operand:SI 1 "register_operand" "b") + (match_operand:SI 2 "register_operand" "r"))))] "TARGET_ALTIVEC" "lvx %0,%1,%2" [(set_attr "type" "vecload")]) -;; Parallel the STV*'s with unspecs because some of them have -;; identical rtl but are different instructions-- and gcc gets confused. - (define_insn "altivec_stvx" [(parallel [(set (mem:V4SI @@ -15596,3 +15681,73 @@ "TARGET_ALTIVEC" "stvewx %2,%0,%1" [(set_attr "type" "vecstore")]) + +(define_insn "absv16qi2" + [(set (match_operand:V16QI 0 "register_operand" "=v") + (abs:V16QI (match_operand:V16QI 1 "register_operand" "v"))) + (clobber (match_scratch:V16QI 2 "=v")) + (clobber (match_scratch:V16QI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsububm %3,%2,%1\;vmaxsb %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv8hi2" + [(set (match_operand:V8HI 0 "register_operand" "=v") + (abs:V8HI (match_operand:V8HI 1 "register_operand" "v"))) + (clobber (match_scratch:V8HI 2 "=v")) + (clobber (match_scratch:V8HI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubuhm %3,%2,%1\;vmaxsh %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv4si2" + [(set (match_operand:V4SI 0 "register_operand" "=v") + (abs:V4SI (match_operand:V4SI 1 "register_operand" "v"))) + (clobber (match_scratch:V4SI 2 "=v")) + (clobber (match_scratch:V4SI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubuwm %3,%2,%1\;vmaxsw %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv4sf2" + [(set (match_operand:V4SF 0 "register_operand" "=v") + (abs:V4SF (match_operand:V4SF 1 "register_operand" "v"))) + (clobber (match_scratch:V4SF 2 "=v")) + (clobber (match_scratch:V4SF 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisw %2, -1\;vslw %3,%2,%2\;vandc %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v16qi" + [(set (match_operand:V16QI 0 "register_operand" "=v") + (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")] 210)) + (clobber (match_scratch:V16QI 2 "=v")) + (clobber (match_scratch:V16QI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubsbs %3,%2,%1\;vmaxsb %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v8hi" + [(set (match_operand:V8HI 0 "register_operand" "=v") + (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")] 211)) + (clobber (match_scratch:V8HI 2 "=v")) + (clobber (match_scratch:V8HI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubshs %3,%2,%1\;vmaxsh %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v4si" + [(set (match_operand:V4SI 0 "register_operand" "=v") + (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")] 212)) + (clobber (match_scratch:V4SI 2 "=v")) + (clobber (match_scratch:V4SI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubsws %3,%2,%1\;vmaxsw %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) diff --git a/contrib/gcc/config/rs6000/rtems.h b/contrib/gcc/config/rs6000/rtems.h index 6810d38..aa68130 100644 --- a/contrib/gcc/config/rs6000/rtems.h +++ b/contrib/gcc/config/rs6000/rtems.h @@ -1,5 +1,5 @@ /* Definitions for rtems targeting a PowerPC using elf. - Copyright (C) 1996, 1997, 2000, 2001 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 2000, 2001, 2002 Free Software Foundation, Inc. Contributed by Joel Sherrill (joel@OARcorp.com). This file is part of GNU CC. @@ -22,13 +22,5 @@ Boston, MA 02111-1307, USA. */ /* Specify predefined symbols in preprocessor. */ #undef CPP_PREDEFINES -#define CPP_PREDEFINES "-DPPC -Drtems -D__rtems__ \ +#define CPP_PREDEFINES "-DPPC -D__rtems__ \ -Asystem=rtems -Acpu=powerpc -Amachine=powerpc" - -/* Generate calls to memcpy, memcmp and memset. */ -#ifndef TARGET_MEM_FUNCTIONS -#define TARGET_MEM_FUNCTIONS -#endif - -#undef STARTFILE_DEFAULT_SPEC -#define STARTFILE_DEFAULT_SPEC "crt0.o%s" diff --git a/contrib/gcc/config/rs6000/sysv4.h b/contrib/gcc/config/rs6000/sysv4.h index c20eaa0..b807bb7 100644 --- a/contrib/gcc/config/rs6000/sysv4.h +++ b/contrib/gcc/config/rs6000/sysv4.h @@ -197,6 +197,8 @@ do { \ rs6000_current_abi = ABI_V4; \ else if (!strcmp (rs6000_abi_name, "linux")) \ rs6000_current_abi = ABI_V4; \ + else if (!strcmp (rs6000_abi_name, "gnu")) \ + rs6000_current_abi = ABI_V4; \ else if (!strcmp (rs6000_abi_name, "netbsd")) \ rs6000_current_abi = ABI_V4; \ else if (!strcmp (rs6000_abi_name, "i960-old")) \ @@ -383,7 +385,7 @@ do { \ /* No data type wants to be aligned rounder than this. */ #undef BIGGEST_ALIGNMENT -#define BIGGEST_ALIGNMENT (TARGET_EABI ? 64 : 128) +#define BIGGEST_ALIGNMENT ((TARGET_EABI && !TARGET_ALTIVEC) ? 64 : 128) /* An expression for the alignment of a structure field FIELD if the alignment computed in the usual way is COMPUTED. */ @@ -397,7 +399,8 @@ do { \ SPECIFIED. */ #define ROUND_TYPE_ALIGN(TYPE, COMPUTED, SPECIFIED) \ ((TARGET_ALTIVEC && TREE_CODE (TYPE) == VECTOR_TYPE) \ - ? 128 : MAX (COMPUTED, SPECIFIED)) + ? MAX (MAX ((COMPUTED), (SPECIFIED)), 128) \ + : MAX (COMPUTED, SPECIFIED)) #undef BIGGEST_FIELD_ALIGNMENT #undef ADJUST_FIELD_ALIGN @@ -877,6 +880,7 @@ do { \ %{mcall-freebsd: -mbig} \ %{mcall-i960-old: -mlittle} \ %{mcall-linux: -mbig} \ + %{mcall-gnu: -mbig} \ %{mcall-netbsd: -mbig} \ }}}}" @@ -889,7 +893,7 @@ do { \ } \ }}" -#define CC1_ENDIAN_DEFAULT_SPEC "%(cc1_endian_big_spec)" +#define CC1_ENDIAN_DEFAULT_SPEC "%(cc1_endian_big)" /* Pass -G xxx to the compiler and set correct endian mode. */ #define CC1_SPEC "%{G*} \ @@ -900,10 +904,11 @@ do { \ %{mcall-freebsd: -mbig %(cc1_endian_big) } \ %{mcall-i960-old: -mlittle %(cc1_endian_little) } \ %{mcall-linux: -mbig %(cc1_endian_big) } \ + %{mcall-gnu: -mbig %(cc1_endian_big) } \ %{mcall-netbsd: -mbig %(cc1_endian_big) } \ - %{!mcall-aixdesc: %{!mcall-freebsd: %{!mcall-i960-old: %{!mcall-linux: %{!mcall-netbsd: \ + %{!mcall-aixdesc: %{!mcall-freebsd: %{!mcall-i960-old: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: \ %(cc1_endian_default) \ - }}}}} \ + }}}}}} \ }}}} \ %{mno-sdata: -msdata=none } \ %{meabi: %{!mcall-*: -mcall-sysv }} \ @@ -912,6 +917,7 @@ do { \ %{mcall-freebsd: -mno-eabi } \ %{mcall-i960-old: -meabi } \ %{mcall-linux: -mno-eabi } \ + %{mcall-gnu: -mno-eabi } \ %{mcall-netbsd: -mno-eabi }}} \ %{msdata: -msdata=default} \ %{mno-sdata: -msdata=none} \ @@ -942,9 +948,10 @@ do { \ %{msim: %(link_start_sim) } \ %{mcall-freebsd: %(link_start_freebsd) } \ %{mcall-linux: %(link_start_linux) } \ +%{mcall-gnu: %(link_start_gnu) } \ %{mcall-netbsd: %(link_start_netbsd) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-linux: \ - %{!mcall-netbsd: %{!mcall-freebsd: %(link_start_default) }}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-linux: %{!mcall-gnu: \ + %{!mcall-netbsd: %{!mcall-freebsd: %(link_start_default) }}}}}}}}" #define LINK_START_DEFAULT_SPEC "" @@ -998,8 +1005,9 @@ do { \ %{msim: %(link_os_sim) } \ %{mcall-freebsd: %(link_os_freebsd) } \ %{mcall-linux: %(link_os_linux) } \ +%{mcall-gnu: %(link_os_gnu) } \ %{mcall-netbsd: %(link_os_netbsd) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-netbsd: %(link_os_default) }}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: %(link_os_default) }}}}}}}}" #define LINK_OS_DEFAULT_SPEC "" @@ -1044,10 +1052,11 @@ do { \ %{!mlittle: %{!mlittle-endian: %{!mbig: %{!mbig-endian: \ %{mcall-freebsd: %(cpp_endian_big) } \ %{mcall-linux: %(cpp_endian_big) } \ + %{mcall-gnu: %(cpp_endian_big) } \ %{mcall-netbsd: %(cpp_endian_big) } \ %{mcall-i960-old: %(cpp_endian_little) } \ %{mcall-aixdesc: %(cpp_endian_big) } \ - %{!mcall-linux: %{!mcall-freebsd: %{!mcall-netbsd: %{!mcall-aixdesc: %(cpp_endian_default) }}}}}}}}" + %{!mcall-linux: %{!mcall-gnu: %{!mcall-freebsd: %{!mcall-netbsd: %{!mcall-aixdesc: %(cpp_endian_default) }}}}}}}}}" #define CPP_ENDIAN_DEFAULT_SPEC "%(cpp_endian_big)" @@ -1060,8 +1069,9 @@ do { \ %{msim: %(cpp_os_sim) } \ %{mcall-freebsd: %(cpp_os_freebsd) } \ %{mcall-linux: %(cpp_os_linux) } \ +%{mcall-gnu: %(cpp_os_gnu) } \ %{mcall-netbsd: %(cpp_os_netbsd) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-netbsd: %(cpp_os_default) }}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: %(cpp_os_default) }}}}}}}}" #define CPP_OS_DEFAULT_SPEC "" @@ -1074,8 +1084,9 @@ do { \ %{msim: %(startfile_sim) } \ %{mcall-freebsd: %(startfile_freebsd) } \ %{mcall-linux: %(startfile_linux) } \ +%{mcall-gnu: %(startfile_gnu) } \ %{mcall-netbsd: %(startfile_netbsd) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-netbsd: %(startfile_default) }}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: %(startfile_default) }}}}}}}}" #define STARTFILE_DEFAULT_SPEC "" @@ -1088,23 +1099,25 @@ do { \ %{msim: %(lib_sim) } \ %{mcall-freebsd: %(lib_freebsd) } \ %{mcall-linux: %(lib_linux) } \ +%{mcall-gnu: %(lib_gnu) } \ %{mcall-netbsd: %(lib_netbsd) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-netbsd: %(lib_default) }}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: %(lib_default) }}}}}}}}" #define LIB_DEFAULT_SPEC "" /* Override svr4.h definition. */ #undef ENDFILE_SPEC -#define ENDFILE_SPEC "\ +#define ENDFILE_SPEC "crtsavres.o%s \ %{mads: %(endfile_ads)} \ %{myellowknife: %(endfile_yellowknife)} \ %{mmvme: %(endfile_mvme)} \ %{msim: %(endfile_sim)} \ %{mcall-freebsd: %(endfile_freebsd) } \ %{mcall-linux: %(endfile_linux) } \ +%{mcall-gnu: %(endfile_gnu) } \ %{mcall-netbsd: %(endfile_netbsd) } \ %{mvxworks: %(endfile_vxworks) } \ -%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-netbsd: %{!mvxworks: %(endfile_default) }}}}}}}}" +%{!mads: %{!myellowknife: %{!mmvme: %{!msim: %{!mcall-freebsd: %{!mcall-linux: %{!mcall-gnu: %{!mcall-netbsd: %{!mvxworks: %(endfile_default) }}}}}}}}}" #define ENDFILE_DEFAULT_SPEC "" @@ -1184,10 +1197,18 @@ do { \ %{profile:-lc_p} %{!profile:-lc}}}" #endif +#ifdef USE_GNULIBC_1 #define STARTFILE_LINUX_SPEC "\ %{!shared: %{pg:gcrt1.o%s} %{!pg:%{p:gcrt1.o%s} %{!p:crt1.o%s}}} \ %{mnewlib: ecrti.o%s} %{!mnewlib: crti.o%s} \ %{!shared:crtbegin.o%s} %{shared:crtbeginS.o%s}" +#else +#define STARTFILE_LINUX_SPEC "\ +%{!shared: %{pg:gcrt1.o%s} %{!pg:%{p:gcrt1.o%s} %{!p:crt1.o%s}}} \ +%{mnewlib: ecrti.o%s} %{!mnewlib: crti.o%s} \ +%{static:crtbeginT.o%s} \ +%{!static:%{!shared:crtbegin.o%s} %{shared:crtbeginS.o%s}}" +#endif #define ENDFILE_LINUX_SPEC "%{!shared:crtend.o%s} %{shared:crtendS.o%s} \ %{mnewlib: ecrtn.o%s} %{!mnewlib: crtn.o%s}" @@ -1198,22 +1219,51 @@ do { \ %{rdynamic:-export-dynamic} \ %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}}}" +#if !defined(USE_GNULIBC_1) && defined(HAVE_LD_EH_FRAME_HDR) +# define LINK_EH_SPEC "%{!static:--eh-frame-hdr} " +#endif + #ifdef USE_GNULIBC_1 -#define CPP_OS_LINUX_SPEC "-D__unix__ -D__linux__ \ -%{!undef: \ - %{!ansi: \ - %{!std=*:-Dunix -D__unix -Dlinux -D__linux} \ - %{std=gnu*:-Dunix -D__unix -Dlinux -D__linux}}} \ +#define CPP_OS_LINUX_SPEC "-D__unix__ -D__gnu_linux__ -D__linux__ \ +%{!undef: \ + %{!ansi: \ + %{!std=*:-Dunix -D__unix -Dlinux -D__linux} \ + %{std=gnu*:-Dunix -D__unix -Dlinux -D__linux}}} \ -Asystem=unix -Asystem=posix" #else -#define CPP_OS_LINUX_SPEC "-D__unix__ -D__linux__ \ -%{!undef: \ - %{!ansi: \ - %{!std=*:-Dunix -D__unix -Dlinux -D__linux} \ - %{std=gnu*:-Dunix -D__unix -Dlinux -D__linux}}} \ +#define CPP_OS_LINUX_SPEC "-D__unix__ -D__gnu_linux__ -D__linux__ \ +%{!undef: \ + %{!ansi: \ + %{!std=*:-Dunix -D__unix -Dlinux -D__linux} \ + %{std=gnu*:-Dunix -D__unix -Dlinux -D__linux}}} \ -Asystem=unix -Asystem=posix %{pthread:-D_REENTRANT}" #endif +/* GNU/Hurd support. */ +#define LIB_GNU_SPEC "%{mnewlib: --start-group -lgnu -lc --end-group } \ +%{!mnewlib: %{shared:-lc} %{!shared: %{pthread:-lpthread } \ +%{profile:-lc_p} %{!profile:-lc}}}" + +#define STARTFILE_GNU_SPEC "\ +%{!shared: %{!static: %{pg:gcrt1.o%s} %{!pg:%{p:gcrt1.o%s} %{!p:crt1.o%s}}}} \ +%{static: %{pg:gcrt0.o%s} %{!pg:%{p:gcrt0.o%s} %{!p:crt0.o%s}}} \ +%{mnewlib: ecrti.o%s} %{!mnewlib: crti.o%s} \ +%{!shared:crtbegin.o%s} %{shared:crtbeginS.o%s}" + +#define ENDFILE_GNU_SPEC "%{!shared:crtend.o%s} %{shared:crtendS.o%s} \ +%{mnewlib: ecrtn.o%s} %{!mnewlib: crtn.o%s}" + +#define LINK_START_GNU_SPEC "" + +#define LINK_OS_GNU_SPEC "-m elf32ppclinux %{!shared: %{!static: \ + %{rdynamic:-export-dynamic} \ + %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}}}" + +#define CPP_OS_GNU_SPEC "-D__unix__ -D__gnu_hurd__ -D__GNU__ \ +%{!undef: \ + %{!ansi: -Dunix -D__unix}} \ +-Asystem=gnu -Asystem=unix -Asystem=posix %{pthread:-D_REENTRANT}" + /* NetBSD support. */ #define LIB_NETBSD_SPEC "\ %{profile:-lgmon -lc_p} %{!profile:-lc}" @@ -1292,6 +1342,7 @@ ncrtn.o%s" { "lib_mvme", LIB_MVME_SPEC }, \ { "lib_sim", LIB_SIM_SPEC }, \ { "lib_freebsd", LIB_FREEBSD_SPEC }, \ + { "lib_gnu", LIB_GNU_SPEC }, \ { "lib_linux", LIB_LINUX_SPEC }, \ { "lib_netbsd", LIB_NETBSD_SPEC }, \ { "lib_vxworks", LIB_VXWORKS_SPEC }, \ @@ -1301,6 +1352,7 @@ ncrtn.o%s" { "startfile_mvme", STARTFILE_MVME_SPEC }, \ { "startfile_sim", STARTFILE_SIM_SPEC }, \ { "startfile_freebsd", STARTFILE_FREEBSD_SPEC }, \ + { "startfile_gnu", STARTFILE_GNU_SPEC }, \ { "startfile_linux", STARTFILE_LINUX_SPEC }, \ { "startfile_netbsd", STARTFILE_NETBSD_SPEC }, \ { "startfile_vxworks", STARTFILE_VXWORKS_SPEC }, \ @@ -1310,6 +1362,7 @@ ncrtn.o%s" { "endfile_mvme", ENDFILE_MVME_SPEC }, \ { "endfile_sim", ENDFILE_SIM_SPEC }, \ { "endfile_freebsd", ENDFILE_FREEBSD_SPEC }, \ + { "endfile_gnu", ENDFILE_GNU_SPEC }, \ { "endfile_linux", ENDFILE_LINUX_SPEC }, \ { "endfile_netbsd", ENDFILE_NETBSD_SPEC }, \ { "endfile_vxworks", ENDFILE_VXWORKS_SPEC }, \ @@ -1323,6 +1376,7 @@ ncrtn.o%s" { "link_start_mvme", LINK_START_MVME_SPEC }, \ { "link_start_sim", LINK_START_SIM_SPEC }, \ { "link_start_freebsd", LINK_START_FREEBSD_SPEC }, \ + { "link_start_gnu", LINK_START_GNU_SPEC }, \ { "link_start_linux", LINK_START_LINUX_SPEC }, \ { "link_start_netbsd", LINK_START_NETBSD_SPEC }, \ { "link_start_vxworks", LINK_START_VXWORKS_SPEC }, \ @@ -1334,6 +1388,7 @@ ncrtn.o%s" { "link_os_sim", LINK_OS_SIM_SPEC }, \ { "link_os_freebsd", LINK_OS_FREEBSD_SPEC }, \ { "link_os_linux", LINK_OS_LINUX_SPEC }, \ + { "link_os_gnu", LINK_OS_GNU_SPEC }, \ { "link_os_netbsd", LINK_OS_NETBSD_SPEC }, \ { "link_os_vxworks", LINK_OS_VXWORKS_SPEC }, \ { "link_os_default", LINK_OS_DEFAULT_SPEC }, \ @@ -1349,6 +1404,7 @@ ncrtn.o%s" { "cpp_os_mvme", CPP_OS_MVME_SPEC }, \ { "cpp_os_sim", CPP_OS_SIM_SPEC }, \ { "cpp_os_freebsd", CPP_OS_FREEBSD_SPEC }, \ + { "cpp_os_gnu", CPP_OS_GNU_SPEC }, \ { "cpp_os_linux", CPP_OS_LINUX_SPEC }, \ { "cpp_os_netbsd", CPP_OS_NETBSD_SPEC }, \ { "cpp_os_vxworks", CPP_OS_VXWORKS_SPEC }, \ diff --git a/contrib/gcc/config/rs6000/t-aix43 b/contrib/gcc/config/rs6000/t-aix43 index 209a763..7be8ebc 100644 --- a/contrib/gcc/config/rs6000/t-aix43 +++ b/contrib/gcc/config/rs6000/t-aix43 @@ -62,7 +62,7 @@ SHLIB_INSTALL = $(INSTALL_DATA) @shlib_base_name@.a $$(slibdir)/ SHLIB_LIBS = -lc `case @shlib_base_name@ in *pthread*) echo -lpthread ;; esac` SHLIB_MKMAP = $(srcdir)/mkmap-flat.awk SHLIB_MAPFILES = $(srcdir)/libgcc-std.ver -SHLIB_NM_FLAGS = -Bpg +SHLIB_NM_FLAGS = -Bpg -X32_64 # Either 32-bit and 64-bit objects in archives. AR_FLAGS_FOR_TARGET = -X32_64 diff --git a/contrib/gcc/config/rs6000/t-linux64 b/contrib/gcc/config/rs6000/t-linux64 new file mode 100644 index 0000000..48b6150 --- /dev/null +++ b/contrib/gcc/config/rs6000/t-linux64 @@ -0,0 +1,16 @@ +# Override t-linux. We don't want -fPIC. +CRTSTUFF_T_CFLAGS_S = +TARGET_LIBGCC2_CFLAGS = + +EXTRA_MULTILIB_PARTS=crtbegin.o crtend.o crtbeginS.o crtendS.o crtbeginT.o \ + crtsavres.o + +# ld provides these functions as needed. +crtsavres.S: + echo >crtsavres.S + +$(T)crtsavres.o: crtsavres.S + $(GCC_FOR_TARGET) $(GCC_CFLAGS) $(INCLUDES) $(MULTILIB_CFLAGS) -c crtsavres.S -o $(T)crtsavres.o + +# Modify the shared lib version file +SHLIB_MKMAP_OPTS = -v dotsyms=1 diff --git a/contrib/gcc/config/rs6000/t-ppccomm b/contrib/gcc/config/rs6000/t-ppccomm index e264586..7570af4 100644 --- a/contrib/gcc/config/rs6000/t-ppccomm +++ b/contrib/gcc/config/rs6000/t-ppccomm @@ -38,9 +38,10 @@ MULTILIB_MATCHES_SYSV = mcall-sysv=mcall-sysv-eabi mcall-sysv=mcall-sysv-noeabi LIBGCC = stmp-multilib INSTALL_LIBGCC = install-multilib EXTRA_MULTILIB_PARTS = crtbegin$(objext) crtend$(objext) \ - crtbeginS$(objext) crtendS$(objext) \ + crtbeginS$(objext) crtendS$(objext) crtbeginT$(objext) \ ecrti$(objext) ecrtn$(objext) \ - ncrti$(objext) ncrtn$(objext) + ncrti$(objext) ncrtn$(objext) \ + crtsavres$(objext) # We build {e,n}crti.o and {e,n}crtn.o, which serve to add begin and # end labels to all of the special sections used when we link using gcc. @@ -58,6 +59,9 @@ ncrti.S: $(srcdir)/config/rs6000/sol-ci.asm ncrtn.S: $(srcdir)/config/rs6000/sol-cn.asm cat $(srcdir)/config/rs6000/sol-cn.asm >ncrtn.S +crtsavres.S: $(srcdir)/config/rs6000/crtsavres.asm + cat $(srcdir)/config/rs6000/crtsavres.asm >crtsavres.S + # Build multiple copies of ?crt{i,n}.o, one for each target switch. $(T)ecrti$(objext): ecrti.S $(GCC_FOR_TARGET) $(GCC_CFLAGS) $(INCLUDES) $(MULTILIB_CFLAGS) -c ecrti.S -o $(T)ecrti$(objext) @@ -71,6 +75,9 @@ $(T)ncrti$(objext): ncrti.S $(T)ncrtn$(objext): ncrtn.S $(GCC_FOR_TARGET) $(GCC_CFLAGS) $(INCLUDES) $(MULTILIB_CFLAGS) -c ncrtn.S -o $(T)ncrtn$(objext) +$(T)crtsavres$(objext): crtsavres.S + $(GCC_FOR_TARGET) $(GCC_CFLAGS) $(INCLUDES) $(MULTILIB_CFLAGS) -c crtsavres.S -o $(T)crtsavres$(objext) + # It is important that crtbegin.o, etc., aren't surprised by stuff in .sdata. CRTSTUFF_T_CFLAGS = -msdata=none # Make sure crt*.o are built with -fPIC even if configured with diff --git a/contrib/gcc/config/rs6000/xcoff.h b/contrib/gcc/config/rs6000/xcoff.h index e60f3fe..4a51b04 100644 --- a/contrib/gcc/config/rs6000/xcoff.h +++ b/contrib/gcc/config/rs6000/xcoff.h @@ -1,6 +1,6 @@ /* Definitions of target machine for GNU compiler, for some generic XCOFF file format - Copyright (C) 2001 Free Software Foundation, Inc. + Copyright (C) 2001, 2002 Free Software Foundation, Inc. This file is part of GNU CC. @@ -22,10 +22,6 @@ Boston, MA 02111-1307, USA. */ #define TARGET_OBJECT_FORMAT OBJECT_XCOFF -/* The AIX linker will discard static constructors in object files before - collect has a chance to see them, so scan the object files directly. */ -#define COLLECT_EXPORT_LIST - /* The RS/6000 uses the XCOFF format. */ #define XCOFF_DEBUGGING_INFO @@ -345,13 +341,7 @@ toc_section () \ SYMBOL_REF_FLAG (sym_ref) = 1; \ if (TREE_PUBLIC (DECL)) \ { \ - if (RS6000_WEAK && DECL_WEAK (decl)) \ - { \ - fputs ("\t.weak .", FILE); \ - RS6000_OUTPUT_BASENAME (FILE, NAME); \ - putc ('\n', FILE); \ - } \ - else \ + if (!RS6000_WEAK || !DECL_WEAK (decl)) \ { \ fputs ("\t.globl .", FILE); \ RS6000_OUTPUT_BASENAME (FILE, NAME); \ @@ -464,20 +454,6 @@ toc_section () \ xcoff_bss_section_name); \ } while (0) -/* Output a weak symbol, if weak support present. */ -#ifdef HAVE_GAS_WEAK -#define HANDLE_PRAGMA_WEAK 1 - -#define ASM_WEAKEN_LABEL(FILE, NAME) \ - do \ - { \ - fputs ("\t.weak ", (FILE)); \ - assemble_name ((FILE), (NAME)); \ - fputc ('\n', (FILE)); \ - } \ - while (0) -#endif /* HAVE_GAS_WEAK */ - /* This is how we tell the assembler that two symbols have the same value. */ #define SET_ASM_OP "\t.set " |