public inbox for gcc-cvs@sourceware.org
help / color / mirror / Atom feed
* [gcc(refs/users/wschmidt/heads/builtins4)] rs6000: Add remaining overloads
@ 2020-11-24 16:44 William Schmidt
0 siblings, 0 replies; 4+ messages in thread
From: William Schmidt @ 2020-11-24 16:44 UTC (permalink / raw)
To: gcc-cvs
https://gcc.gnu.org/g:0e6a7cfbe15993702819414eae7053b673f5941f
commit 0e6a7cfbe15993702819414eae7053b673f5941f
Author: Bill Schmidt <wschmidt@linux.ibm.com>
Date: Mon Nov 2 09:55:43 2020 -0500
rs6000: Add remaining overloads
2020-11-02 Bill Schmidt <wschmidt@linux.ibm.com>
* config/rs6000/rs6000-overload.def: Add remaining overloads.
Diff:
---
gcc/config/rs6000/rs6000-overload.def | 3628 +++++++++++++++++++++++++++++++++
1 file changed, 3628 insertions(+)
diff --git a/gcc/config/rs6000/rs6000-overload.def b/gcc/config/rs6000/rs6000-overload.def
index 7c28cdcb84c..82037644157 100644
--- a/gcc/config/rs6000/rs6000-overload.def
+++ b/gcc/config/rs6000/rs6000-overload.def
@@ -74,8 +74,3636 @@
; a semicolon are also treated as blank lines.
+[CRYPTO_PERMXOR, SKIP, __builtin_crypto_vpermxor]
+ vuc __builtin_crypto_vpermxor (vuc, vuc, vuc);
+ VPERMXOR_V16QI
+ vus __builtin_crypto_vpermxor (vus, vus, vus);
+ VPERMXOR_V8HI
+ vui __builtin_crypto_vpermxor (vui, vui, vui);
+ VPERMXOR_V4SI
+ vull __builtin_crypto_vpermxor (vull, vull, vull);
+ VPERMXOR_V2DI
+
+[CRYPTO_PMSUM, SKIP, __builtin_crypto_vpmsum]
+ vuc __builtin_crypto_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_C
+ vus __builtin_crypto_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_C
+ vui __builtin_crypto_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_C
+ vull __builtin_crypto_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_C
+
+[SCAL_CMPB, SKIP, __builtin_cmpb]
+ unsigned int __builtin_cmpb (unsigned int, unsigned int);
+ CMPB_32
+ unsigned long long __builtin_cmpb (unsigned long long, unsigned long long);
+ CMPB
+
[VEC_ABS, vec_abs, __builtin_vec_abs]
vsc __builtin_vec_abs (vsc);
ABS_V16QI
vss __builtin_vec_abs (vss);
ABS_V8HI
+ vsi __builtin_vec_abs (vsi);
+ ABS_V4SI
+ vsll __builtin_vec_abs (vsll);
+ ABS_V2DI
+ vf __builtin_vec_abs (vf);
+ ABS_V4SF
+ vd __builtin_vec_abs (vd);
+ XVABSDP
+
+[VEC_ABSD, vec_absd, __builtin_vec_vadu, _ARCH_PWR9]
+ vuc __builtin_vec_vadu (vuc, vuc);
+ VADUB
+ vus __builtin_vec_vadu (vus, vus);
+ VADUH
+ vui __builtin_vec_vadu (vui, vui);
+ VADUW
+
+[VEC_ABSS, vec_abss, __builtin_vec_abss]
+ vsc __builtin_vec_abss (vsc);
+ ABSS_V16QI
+ vss __builtin_vec_abss (vss);
+ ABSS_V8HI
+ vsi __builtin_vec_abss (vsi);
+ ABSS_V4SI
+
+[VEC_ADD, vec_add, __builtin_vec_add]
+ vsc __builtin_vec_add (vsc, vsc);
+ VADDUBM VADDUBM_VSC
+ vuc __builtin_vec_add (vuc, vuc);
+ VADDUBM VADDUBM_VUC
+ vss __builtin_vec_add (vss, vss);
+ VADDUHM VADDUHM_VSS
+ vus __builtin_vec_add (vus, vus);
+ VADDUHM VADDUHM_VUS
+ vsi __builtin_vec_add (vsi, vsi);
+ VADDUWM VADDUWM_VSI
+ vui __builtin_vec_add (vui, vui);
+ VADDUWM VADDUWM_VUI
+ vsll __builtin_vec_add (vsll, vsll);
+ VADDUDM VADDUDM_VSLL
+ vull __builtin_vec_add (vull, vull);
+ VADDUDM VADDUDM_VULL
+ vsq __builtin_vec_add (vsq, vsq);
+ VADDUQM VADDUQM_VSQ
+ vuq __builtin_vec_add (vuq, vuq);
+ VADDUQM VADDUQM_VUQ
+ vf __builtin_vec_add (vf, vf);
+ XVADDSP
+ vd __builtin_vec_add (vd, vd);
+ XVADDDP
+
+[VEC_ADDC, vec_addc, __builtin_vec_addc]
+ vsi __builtin_vec_addc (vsi, vsi);
+ VADDCUW VADDCUW_VSI
+ vui __builtin_vec_addc (vui, vui);
+ VADDCUW VADDCUW_VUI
+ vsq __builtin_vec_addc (vsq, vsq);
+ VADDCUQ VADDCUQ_VSQ
+ vuq __builtin_vec_addc (vuq, vuq);
+ VADDCUQ VADDCUQ_VUQ
+
+; TODO: Note that the entry for VEC_ADDEC currently gets ignored in
+; altivec_resolve_overloaded_builtin. There are also forms for
+; vsi and vui arguments, but rather than building a define_expand
+; for the instruction sequence generated for those, we do some RTL
+; hackery. Revisit whether we can remove that. For now, keep this
+; much of the entry here to generate the #define, at least.
+[VEC_ADDEC, vec_addec, __builtin_vec_addec]
+ vsq __builtin_vec_addec (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ
+ vuq __builtin_vec_addec (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ
+
+[VEC_ADDS, vec_adds, __builtin_vec_adds]
+ vuc __builtin_vec_adds (vuc, vuc);
+ VADDUBS
+ vsc __builtin_vec_adds (vsc, vsc);
+ VADDSBS
+ vus __builtin_vec_adds (vus, vus);
+ VADDUHS
+ vss __builtin_vec_adds (vss, vss);
+ VADDSHS
+ vui __builtin_vec_adds (vui, vui);
+ VADDUWS
+ vsi __builtin_vec_adds (vsi, vsi);
+ VADDSWS
+
+[VEC_ANDC, vec_andc, __builtin_vec_andc]
+ vbc __builtin_vec_andc (vbc, vbc);
+ VANDC_V16QI_UNS VANDC_VBC
+ vsc __builtin_vec_andc (vsc, vsc);
+ VANDC_V16QI
+ vuc __builtin_vec_andc (vuc, vuc);
+ VANDC_V16QI_UNS VANDC_VUC
+ vbs __builtin_vec_andc (vbs, vbs);
+ VANDC_V8HI_UNS VANDC_VBS
+ vss __builtin_vec_andc (vss, vss);
+ VANDC_V8HI
+ vus __builtin_vec_andc (vus, vus);
+ VANDC_V8HI_UNS VANDC_VUS
+ vbi __builtin_vec_andc (vbi, vbi);
+ VANDC_V4SI_UNS VANDC_VBI
+ vsi __builtin_vec_andc (vsi, vsi);
+ VANDC_V4SI
+ vui __builtin_vec_andc (vui, vui);
+ VANDC_V4SI_UNS VANDC_VUI
+ vbll __builtin_vec_andc (vbll, vbll);
+ VANDC_V4SI_UNS VANDC_VBLL
+ vsll __builtin_vec_andc (vsll, vsll);
+ VANDC_V2DI
+ vull __builtin_vec_andc (vull, vull);
+ VANDC_V2DI_UNS VANDC_VULL
+ vf __builtin_vec_andc (vf, vf);
+ VANDC_V4SF
+ vd __builtin_vec_andc (vd, vd);
+ VANDC_V2DF
+
+[VEC_AVG, vec_avg, __builtin_vec_avg]
+ vsc __builtin_vec_avg (vsc, vsc);
+ VAVGSB
+ vuc __builtin_vec_avg (vuc, vuc);
+ VAVGUB
+ vss __builtin_vec_avg (vss, vss);
+ VAVGSH
+ vus __builtin_vec_avg (vus, vus);
+ VAVGUH
+ vsi __builtin_vec_avg (vsi, vsi);
+ VAVGSW
+ vui __builtin_vec_avg (vui, vui);
+ VAVGUW
+
+[VEC_BLENDV, vec_blendv, __builtin_vec_xxblend, _ARCH_PWR10]
+ vsc __builtin_vec_xxblend (vsc, vsc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VSC
+ vuc __builtin_vec_xxblend (vuc, vuc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VUC
+ vss __builtin_vec_xxblend (vss, vss, vus);
+ VXXBLEND_V8HI VXXBLEND_VSS
+ vus __builtin_vec_xxblend (vus, vus, vus);
+ VXXBLEND_V8HI VXXBLEND_VUS
+ vsi __builtin_vec_xxblend (vsi, vsi, vui);
+ VXXBLEND_V4SI VXXBLEND_VSI
+ vui __builtin_vec_xxblend (vui, vui, vui);
+ VXXBLEND_V4SI VXXBLEND_VUI
+ vsll __builtin_vec_xxblend (vsll, vsll, vull);
+ VXXBLEND_V2DI VXXBLEND_VSLL
+ vull __builtin_vec_xxblend (vull, vull, vull);
+ VXXBLEND_V2DI VXXBLEND_VULL
+ vf __builtin_vec_xxblend (vf, vf, vui);
+ VXXBLEND_V4SF
+ vd __builtin_vec_xxblend (vd, vd, vull);
+ VXXBLEND_V2DF
+
+[VEC_BPERM, vec_bperm, __builtin_vec_vbperm_api, _ARCH_PWR8]
+ vull __builtin_vec_vbperm_api (vull, vuc);
+ VBPERMD VBPERMD_VULL
+ vull __builtin_vec_vbperm_api (vuq, vuc);
+ VBPERMD VBPERMD_VUQ
+ vuc __builtin_vec_vbperm_api (vuc, vuc);
+ VBPERMQ2
+
+[VEC_CEIL, vec_ceil, __builtin_vec_ceil]
+ vf __builtin_vec_ceil (vf);
+ XVRSPIP
+ vd __builtin_vec_ceil (vd);
+ XVRDPIP
+
+[VEC_CFUGE, vec_cfuge, __builtin_vec_cfuge, _ARCH_PWR10]
+ vull __builtin_vec_cfuge (vull, vull);
+ VCFUGED
+
+[VEC_CIPHER_BE, vec_cipher_be, __builtin_vec_vcipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipher_be (vuc, vuc);
+ VCIPHER_BE
+
+[VEC_CIPHERLAST_BE, vec_cipherlast_be, __builtin_vec_vcipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipherlast_be (vuc, vuc);
+ VCIPHERLAST_BE
+
+[VEC_CLRL, vec_clrl, __builtin_vec_clrl, _ARCH_PWR10]
+ vsc __builtin_vec_clrl (vsc, unsigned int);
+ VCLRLB VCLRLB_S
+ vuc __builtin_vec_clrl (vuc, unsigned int);
+ VCLRLB VCLRLB_U
+
+[VEC_CLRR, vec_clrr, __builtin_vec_clrr, ARCH_PWR10]
+ vsc __builtin_vec_clrr (vsc, unsigned int);
+ VCLRRB VCLRRB_S
+ vuc __builtin_vec_clrr (vuc, unsigned int);
+ VCLRRB VCLRRB_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPAE_P, SKIP, __builtin_vec_vcmpae_p]
+ signed int __builtin_vec_vcmpae_p (vsc, vsc);
+ VCMPAEB_P VCMPAEB_VSC_P
+ signed int __builtin_vec_vcmpae_p (vuc, vuc);
+ VCMPAEB_P VCMPAEB_VUC_P
+ signed int __builtin_vec_vcmpae_p (vbc, vbc);
+ VCMPAEB_P VCMPAEB_VBC_P
+ signed int __builtin_vec_vcmpae_p (vss, vss);
+ VCMPAEH_P VCMPAEH_VSS_P
+ signed int __builtin_vec_vcmpae_p (vus, vus);
+ VCMPAEH_P VCMPAEH_VUS_P
+ signed int __builtin_vec_vcmpae_p (vbs, vbs);
+ VCMPAEH_P VCMPAEH_VBS_P
+ signed int __builtin_vec_vcmpae_p (vp, vp);
+ VCMPAEH_P VCMPAEH_VP_P
+ signed int __builtin_vec_vcmpae_p (vsi, vsi);
+ VCMPAEW_P VCMPAEW_VSI_P
+ signed int __builtin_vec_vcmpae_p (vui, vui);
+ VCMPAEW_P VCMPAEW_VUI_P
+ signed int __builtin_vec_vcmpae_p (vbi, vbi);
+ VCMPAEW_P VCMPAEW_VBI_P
+ signed int __builtin_vec_vcmpae_p (vsll, vsll);
+ VCMPAED_P VCMPAED_VSLL_P
+ signed int __builtin_vec_vcmpae_p (vull, vull);
+ VCMPAED_P VCMPAED_VULL_P
+ signed int __builtin_vec_vcmpae_p (vbll, vbll);
+ VCMPAED_P VCMPAED_VBLL_P
+ signed int __builtin_vec_vcmpae_p (vf, vf);
+ VCMPAEFP_P
+ signed int __builtin_vec_vcmpae_p (vd, vd);
+ VCMPAEDP_P
+
+[VEC_CMPB, vec_cmpb, __builtin_vec_cmpb]
+ vsi __builtin_vec_cmpb (vf, vf);
+ VCMPBFP
+
+[VEC_CMPEQ, vec_cmpeq, __builtin_vec_cmpeq]
+ vbc __builtin_vec_cmpeq (vsc, vsc);
+ VCMPEQUB VCMPEQUB_VSC
+ vbc __builtin_vec_cmpeq (vuc, vuc);
+ VCMPEQUB VCMPEQUB_VUC
+ vbc __builtin_vec_cmpeq (vbc, vbc);
+ VCMPEQUB VCMPEQUB_VBC
+ vbs __builtin_vec_cmpeq (vss, vss);
+ VCMPEQUH VCMPEQUH_VSS
+ vbs __builtin_vec_cmpeq (vus, vus);
+ VCMPEQUH VCMPEQUH_VUS
+ vbs __builtin_vec_cmpeq (vbs, vbs);
+ VCMPEQUH VCMPEQUH_VBS
+ vbi __builtin_vec_cmpeq (vsi, vsi);
+ VCMPEQUW VCMPEQUW_VSI
+ vbi __builtin_vec_cmpeq (vui, vui);
+ VCMPEQUW VCMPEQUW_VUI
+ vbi __builtin_vec_cmpeq (vbi, vbi);
+ VCMPEQUW VCMPEQUW_VBI
+ vbll __builtin_vec_cmpeq (vsll, vsll);
+ VCMPEQUD VCMPEQUD_VSLL
+ vbll __builtin_vec_cmpeq (vull, vull);
+ VCMPEQUD VCMPEQUD_VULL
+ vbll __builtin_vec_cmpeq (vbll, vbll);
+ VCMPEQUD VCMPEQUD_VBLL
+ vbi __builtin_vec_cmpeq (vf, vf);
+ XVCMPEQSP
+ vbll __builtin_vec_cmpeq (vd, vd);
+ XVCMPEQDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPEQ_P, SKIP, __builtin_vec_vcmpeq_p]
+ signed int __builtin_vec_vcmpeq_p (signed int, vuc, vuc);
+ VCMPEQUB_P VCMPEQUB_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsc, vsc);
+ VCMPEQUB_P VCMPEQUB_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbc, vbc);
+ VCMPEQUB_P VCMPEQUB_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vus, vus);
+ VCMPEQUH_P VCMPEQUH_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vss, vss);
+ VCMPEQUH_P VCMPEQUH_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbs, vbs);
+ VCMPEQUH_P VCMPEQUH_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vp, vp);
+ VCMPEQUH_P VCMPEQUH_PP
+ signed int __builtin_vec_vcmpeq_p (signed int, vui, vui);
+ VCMPEQUW_P VCMPEQUW_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsi, vsi);
+ VCMPEQUW_P VCMPEQUW_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbi, vbi);
+ VCMPEQUW_P VCMPEQUW_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vull, vull);
+ VCMPEQUD_P VCMPEQUD_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsll, vsll);
+ VCMPEQUD_P VCMPEQUD_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbll, vbll);
+ VCMPEQUD_P VCMPEQUD_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vf, vf);
+ XVCMPEQSP_P
+ signed int __builtin_vec_vcmpeq_p (signed int, vd, vd);
+ XVCMPEQDP_P
+
+[VEC_CMPEQB, SKIP, __builtin_byte_in_set]
+ signed int __builtin_byte_in_set (unsigned char, unsigned long long);
+ CMPEQB
+
+[VEC_CMPGE, vec_cmpge, __builtin_vec_cmpge]
+ vbc __builtin_vec_cmpge (vsc, vsc);
+ CMPGE_16QI CMPGE_16QI_VSC
+ vbc __builtin_vec_cmpge (vuc, vuc);
+ CMPGE_16QI CMPGE_16QI_VUC
+ vbs __builtin_vec_cmpge (vss, vss);
+ CMPGE_8HI CMPGE_8HI_VSS
+ vbs __builtin_vec_cmpge (vus, vus);
+ CMPGE_8HI CMPGE_8HI_VUS
+ vbi __builtin_vec_cmpge (vsi, vsi);
+ CMPGE_4SI CMPGE_4SI_VSI
+ vbi __builtin_vec_cmpge (vui, vui);
+ CMPGE_4SI CMPGE_4SI_VUI
+ vbll __builtin_vec_cmpge (vsll, vsll);
+ CMPGE_2DI CMPGE_2DI_VSLL
+ vbll __builtin_vec_cmpge (vull, vull);
+ CMPGE_2DI CMPGE_2DI_VULL
+ vbi __builtin_vec_cmpge (vf, vf);
+ XVCMPGESP
+ vbll __builtin_vec_cmpge (vd, vd);
+ XVCMPGEDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+; Also, cmpge is the same as cmpgt for all cases except floating point.
+; There is further code to deal with this special case in
+; altivec_build_resolved_builtin. TODO: Make sure this is still true.
+[VEC_CMPGE_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P VCMPGTUB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P VCMPGTSB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P VCMPGTUH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P VCMPGTSH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P VCMPGTUW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P VCMPGTSW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P VCMPGTUD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P VCMPGTSD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGESP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGEDP_P
+
+[VEC_CMPGT, vec_cmpgt, __builtin_vec_cmpgt]
+ vbc __builtin_vec_cmpgt (vsc, vsc);
+ VCMPGTSB
+ vbc __builtin_vec_cmpgt (vuc, vuc);
+ VCMPGTUB
+ vbs __builtin_vec_cmpgt (vss, vss);
+ VCMPGTSH
+ vbs __builtin_vec_cmpgt (vus, vus);
+ VCMPGTUH
+ vbi __builtin_vec_cmpgt (vsi, vsi);
+ VCMPGTSW
+ vbi __builtin_vec_cmpgt (vui, vui);
+ VCMPGTUW
+ vbll __builtin_vec_cmpgt (vsll, vsll);
+ VCMPGTSD
+ vbll __builtin_vec_cmpgt (vull, vull);
+ VCMPGTUD
+ vbi __builtin_vec_cmpgt (vf, vf);
+ XVCMPGTSP
+ vbll __builtin_vec_cmpgt (vd, vd);
+ XVCMPGTDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPGT_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGTSP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGTDP_P
+
+; Note that there is no entry for VEC_CMPLE. VEC_CMPLE is implemented
+; using VEC_CMPGE with reversed arguments in altivec.h.
+
+; Note that there is no entry for VEC_CMPLT. VEC_CMPLT is implemented
+; using VEC_CMPGT with reversed arguments in altivec.h.
+
+[VEC_CMPNE, vec_cmpne, __builtin_vec_cmpne]
+ vbc __builtin_vec_cmpne (vbc, vbc);
+ VCMPNEB VCMPNEB_VBC
+ vbc __builtin_vec_cmpne (vsc, vsc);
+ VCMPNEB VCMPNEB_VSC
+ vbc __builtin_vec_cmpne (vuc, vuc);
+ VCMPNEB VCMPNEB_VUC
+ vbs __builtin_vec_cmpne (vbs, vbs);
+ VCMPNEH VCMPNEH_VBS
+ vbs __builtin_vec_cmpne (vss, vss);
+ VCMPNEH VCMPNEH_VSS
+ vbs __builtin_vec_cmpne (vus, vus);
+ VCMPNEH VCMPNEH_VUS
+ vbi __builtin_vec_cmpne (vbi, vbi);
+ VCMPNEW VCMPNEW_VBI
+ vbi __builtin_vec_cmpne (vsi, vsi);
+ VCMPNEW VCMPNEW_VSI
+ vbi __builtin_vec_cmpne (vui, vui);
+ VCMPNEW VCMPNEW_VUI
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNE_P, SKIP, __builtin_vec_vcmpne_p]
+ signed int __builtin_vec_vcmpne_p (vsc, vsc);
+ VCMPNEB_P VCMPNEB_VSC_P
+ signed int __builtin_vec_vcmpne_p (vuc, vuc);
+ VCMPNEB_P VCMPNEB_VUC_P
+ signed int __builtin_vec_vcmpne_p (vbc, vbc);
+ VCMPNEB_P VCMPNEB_VBC_P
+ signed int __builtin_vec_vcmpne_p (vss, vss);
+ VCMPNEH_P VCMPNEH_VSS_P
+ signed int __builtin_vec_vcmpne_p (vus, vus);
+ VCMPNEH_P VCMPNEH_VUS_P
+ signed int __builtin_vec_vcmpne_p (vbs, vbs);
+ VCMPNEH_P VCMPNEH_VBS_P
+ signed int __builtin_vec_vcmpne_p (vp, vp);
+ VCMPNEH_P VCMPNEH_VP_P
+ signed int __builtin_vec_vcmpne_p (vsi, vsi);
+ VCMPNEW_P VCMPNEW_VSI_P
+ signed int __builtin_vec_vcmpne_p (vui, vui);
+ VCMPNEW_P VCMPNEW_VUI_P
+ signed int __builtin_vec_vcmpne_p (vbi, vbi);
+ VCMPNEW_P VCMPNEW_VBI_P
+ signed int __builtin_vec_vcmpne_p (vsll, vsll);
+ VCMPNED_P VCMPNED_VSLL_P
+ signed int __builtin_vec_vcmpne_p (vull, vull);
+ VCMPNED_P VCMPNED_VULL_P
+ signed int __builtin_vec_vcmpne_p (vbll, vbll);
+ VCMPNED_P VCMPNED_VBLL_P
+ signed int __builtin_vec_vcmpne_p (vf, vf);
+ VCMPNEFP_P
+ signed int __builtin_vec_vcmpne_p (vd, vd);
+ VCMPNEDP_P
+
+[VEC_CMPNEZ, vec_cmpnez, __builtin_vec_cmpnez, _ARCH_PWR9]
+ vbc __builtin_vec_cmpnez (vsc, vsc);
+ CMPNEZB CMPNEZB_S
+ vbc __builtin_vec_cmpnez (vuc, vuc);
+ CMPNEZB CMPNEZB_U
+ vbs __builtin_vec_cmpnez (vss, vss);
+ CMPNEZH CMPNEZH_S
+ vbs __builtin_vec_cmpnez (vus, vus);
+ CMPNEZH CMPNEZH_U
+ vbi __builtin_vec_cmpnez (vsi, vsi);
+ CMPNEZW CMPNEZW_S
+ vbi __builtin_vec_cmpnez (vui, vui);
+ CMPNEZW CMPNEZW_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNEZ_P, SKIP, __builtin_vec_vcmpnez_p]
+ signed int __builtin_vec_vcmpnez_p (signed int, vsc, vsc);
+ VCMPNEZB_P VCMPNEZB_VSC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vuc, vuc);
+ VCMPNEZB_P VCMPNEZB_VUC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vss, vss);
+ VCMPNEZH_P VCMPNEZH_VSS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vus, vus);
+ VCMPNEZH_P VCMPNEZH_VUS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vsi, vsi);
+ VCMPNEZW_P VCMPNEZW_VSI_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vui, vui);
+ VCMPNEZW_P VCMPNEZW_VUI_P
+
+[VEC_CMPRB, SKIP, __builtin_byte_in_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB
+
+[VEC_CMPRB2, SKIP, __builtin_byte_in_either_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB2
+
+[VEC_CNTLZ, vec_cntlz, __builtin_vec_vclz, _ARCH_PWR8]
+ vsc __builtin_vec_vclz (vsc);
+ VCLZB VCLZB_S
+ vuc __builtin_vec_vclz (vuc);
+ VCLZB VCLZB_U
+ vss __builtin_vec_vclz (vss);
+ VCLZH VCLZH_S
+ vus __builtin_vec_vclz (vus);
+ VCLZH VCLZH_U
+ vsi __builtin_vec_vclz (vsi);
+ VCLZW VCLZW_S
+ vui __builtin_vec_vclz (vui);
+ VCLZW VCLZW_U
+ vsll __builtin_vec_vclz (vsll);
+ VCLZD VCLZD_S
+ vull __builtin_vec_vclz (vull);
+ VCLZD VCLZD_U
+
+[VEC_CNTLZM, vec_cntlzm, __builtin_vec_vclzdm, _ARCH_PWR10]
+ vull __builtin_vec_vclzdm (vull, vull);
+ VCLZDM
+
+[VEC_CNTTZM, vec_cnttzm, __builtin_vec_vctzdm, _ARCH_PWR10]
+ vull __builtin_vec_vctzdm (vull, vull);
+ CNTTZDM
+
+[VEC_CNTLZ_LSBB, vec_cntlz_lsbb, __builtin_vec_vclzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vclzlsbb (vsc);
+ VCLZLSBB_V16QI VCLZLSBB_VSC
+ signed int __builtin_vec_vclzlsbb (vuc);
+ VCLZLSBB_V16QI VCLZLSBB_VUC
+ signed int __builtin_vec_vclzlsbb (vss);
+ VCLZLSBB_V8HI VCLZLSBB_VSS
+ signed int __builtin_vec_vclzlsbb (vus);
+ VCLZLSBB_V8HI VCLZLSBB_VUS
+ signed int __builtin_vec_vclzlsbb (vsi);
+ VCLZLSBB_V4SI VCLZLSBB_VSI
+ signed int __builtin_vec_vclzlsbb (vui);
+ VCLZLSBB_V4SI VCLZLSBB_VUI
+
+[VEC_CNTM, vec_cntm, __builtin_vec_cntm, _ARCH_PWR10]
+ unsigned long long __builtin_vec_cntm (vuc, const int);
+ VCNTMBB
+ unsigned long long __builtin_vec_cntm (vus, const int);
+ VCNTMBH
+ unsigned long long __builtin_vec_cntm (vui, const int);
+ VCNTMBW
+ unsigned long long __builtin_vec_cntm (vull, const int);
+ VCNTMBD
+
+[VEC_CNTTZ, vec_cnttz, __builtin_vec_vctz, _ARCH_PWR9]
+ vsc __builtin_vec_vctz (vsc);
+ VCTZB VCTZB_S
+ vuc __builtin_vec_vctz (vuc);
+ VCTZB VCTZB_U
+ vss __builtin_vec_vctz (vss);
+ VCTZH VCTZH_S
+ vus __builtin_vec_vctz (vus);
+ VCTZH VCTZH_U
+ vsi __builtin_vec_vctz (vsi);
+ VCTZW VCTZW_S
+ vui __builtin_vec_vctz (vui);
+ VCTZW VCTZW_U
+ vsll __builtin_vec_vctz (vsll);
+ VCTZD VCTZD_S
+ vull __builtin_vec_vctz (vull);
+ VCTZD VCTZD_U
+
+[VEC_CNTTZ_LSBB, vec_cnttz_lsbb, __builtin_vec_vctzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vctzlsbb (vsc);
+ VCTZLSBB_V16QI VCTZLSBB_VSC
+ signed int __builtin_vec_vctzlsbb (vuc);
+ VCTZLSBB_V16QI VCTZLSBB_VUC
+ signed int __builtin_vec_vctzlsbb (vss);
+ VCTZLSBB_V8HI VCTZLSBB_VSS
+ signed int __builtin_vec_vctzlsbb (vus);
+ VCTZLSBB_V8HI VCTZLSBB_VUS
+ signed int __builtin_vec_vctzlsbb (vsi);
+ VCTZLSBB_V4SI VCTZLSBB_VSI
+ signed int __builtin_vec_vctzlsbb (vui);
+ VCTZLSBB_V4SI VCTZLSBB_VUI
+
+[VEC_CONVERT_4F32_8I16, SKIP, __builtin_vec_convert_4f32_8i16]
+ vus __builtin_vec_convert_4f32_8i16 (vf, vf);
+ CONVERT_4F32_8I16
+
+[VEC_CONVERT_4F32_8F16, vec_pack_to_short_fp32, __builtin_vec_convert_4f32_8f16, _ARCH_PWR9]
+ vus __builtin_vec_convert_4f32_8f16 (vf, vf);
+ CONVERT_4F32_8F16
+
+[VEC_COPYSIGN, vec_cpsgn, __builtin_vec_copysign]
+ vf __builtin_vec_copysign (vf, vf);
+ CPSGNSP
+ vd __builtin_vec_copysign (vd, vd);
+ CPSGNDP
+
+[VEC_CTF, vec_ctf, __builtin_vec_ctf]
+ vf __builtin_vec_ctf (vsi, const int);
+ VCFSX
+ vf __builtin_vec_ctf (vui, const int);
+ VCFUX
+ vd __builtin_vec_ctf (vsll, const int);
+ XVCVSXDDP_SCALE
+ vd __builtin_vec_ctf (vull, const int);
+ XVCVUXDDP_SCALE
+
+[VEC_CTS, vec_cts, __builtin_vec_cts]
+ vsi __builtin_vec_cts (vf, const int);
+ VCTSXS
+ vsll __builtin_vec_cts (vd, const int);
+ XVCVDPSXDS_SCALE
+
+[VEC_CTU, vec_ctu, __builtin_vec_ctu]
+ vui __builtin_vec_ctu (vf, const int);
+ VCTUXS
+ vull __builtin_vec_ctu (vd, const int);
+ XVCVDPUXDS_SCALE
+
+[VEC_DIV, vec_div, __builtin_vec_div, __VSX__]
+ vsll __builtin_vec_div (vsll, vsll);
+ DIV_V2DI
+ vull __builtin_vec_div (vull, vull);
+ UDIV_V2DI
+ vf __builtin_vec_div (vf, vf);
+ XVDIVSP
+ vd __builtin_vec_div (vd, vd);
+ XVDIVDP
+
+[VEC_DOUBLE, vec_double, __builtin_vec_double]
+ vd __builtin_vec_double (vsll);
+ XVCVSXDDP
+ vd __builtin_vec_double (vull);
+ XVCVUXDDP
+
+[VEC_DOUBLEE, vec_doublee, __builtin_vec_doublee]
+ vd __builtin_vec_doublee (vsi);
+ DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vui);
+ UNS_DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vf);
+ DOUBLEE_V4SF
+
+[VEC_DOUBLEH, vec_doubleh, __builtin_vec_doubleh]
+ vd __builtin_vec_doubleh (vsi);
+ DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vui);
+ UNS_DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vf);
+ DOUBLEH_V4SF
+
+[VEC_DOUBLEL, vec_doublel, __builtin_vec_doublel]
+ vd __builtin_vec_doublel (vsi);
+ DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vui);
+ UNS_DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vf);
+ DOUBLEL_V4SF
+
+[VEC_DOUBLEO, vec_doubleo, __builtin_vec_doubleo]
+ vd __builtin_vec_doubleo (vsi);
+ DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vui);
+ UNS_DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vf);
+ DOUBLEO_V4SF
+
+[VEC_DST, vec_dst, __builtin_vec_dst]
+ void __builtin_vec_dst (unsigned char *, const int, const int);
+ DST DST_UC
+ void __builtin_vec_dst (signed char *, const int, const int);
+ DST DST_SC
+ void __builtin_vec_dst (unsigned short *, const int, const int);
+ DST DST_US
+ void __builtin_vec_dst (signed short *, const int, const int);
+ DST DST_SS
+ void __builtin_vec_dst (unsigned int *, const int, const int);
+ DST DST_UI
+ void __builtin_vec_dst (signed int *, const int, const int);
+ DST DST_SI
+ void __builtin_vec_dst (unsigned long long *, const int, const int);
+ DST DST_ULL
+ void __builtin_vec_dst (signed long long *, const int, const int);
+ DST DST_SLL
+ void __builtin_vec_dst (float *, const int, const int);
+ DST DST_F
+ void __builtin_vec_dst (vuc *, const int, const int);
+ DST DST_VUC
+ void __builtin_vec_dst (vsc *, const int, const int);
+ DST DST_VSC
+ void __builtin_vec_dst (vbc *, const int, const int);
+ DST DST_VBC
+ void __builtin_vec_dst (vus *, const int, const int);
+ DST DST_VUS
+ void __builtin_vec_dst (vss *, const int, const int);
+ DST DST_VSS
+ void __builtin_vec_dst (vbs *, const int, const int);
+ DST DST_VBS
+ void __builtin_vec_dst (vp *, const int, const int);
+ DST DST_VP
+ void __builtin_vec_dst (vui *, const int, const int);
+ DST DST_VUI
+ void __builtin_vec_dst (vsi *, const int, const int);
+ DST DST_VSI
+ void __builtin_vec_dst (vbi *, const int, const int);
+ DST DST_VBI
+ void __builtin_vec_dst (vf *, const int, const int);
+ DST DST_VF
+
+[VEC_DSTST, vec_dstst, __builtin_vec_dstst]
+ void __builtin_vec_dstst (unsigned char *, const int, const int);
+ DSTST DSTST_UC
+ void __builtin_vec_dstst (signed char *, const int, const int);
+ DSTST DSTST_SC
+ void __builtin_vec_dstst (unsigned short *, const int, const int);
+ DSTST DSTST_US
+ void __builtin_vec_dstst (signed short *, const int, const int);
+ DSTST DSTST_SS
+ void __builtin_vec_dstst (unsigned int *, const int, const int);
+ DSTST DSTST_UI
+ void __builtin_vec_dstst (signed int *, const int, const int);
+ DSTST DSTST_SI
+ void __builtin_vec_dstst (unsigned long long *, const int, const int);
+ DSTST DSTST_ULL
+ void __builtin_vec_dstst (signed long long *, const int, const int);
+ DSTST DSTST_SLL
+ void __builtin_vec_dstst (float *, const int, const int);
+ DSTST DSTST_F
+ void __builtin_vec_dstst (vuc *, const int, const int);
+ DSTST DSTST_VUC
+ void __builtin_vec_dstst (vsc *, const int, const int);
+ DSTST DSTST_VSC
+ void __builtin_vec_dstst (vbc *, const int, const int);
+ DSTST DSTST_VBC
+ void __builtin_vec_dstst (vus *, const int, const int);
+ DSTST DSTST_VUS
+ void __builtin_vec_dstst (vss *, const int, const int);
+ DSTST DSTST_VSS
+ void __builtin_vec_dstst (vbs *, const int, const int);
+ DSTST DSTST_VBS
+ void __builtin_vec_dstst (vp *, const int, const int);
+ DSTST DSTST_VP
+ void __builtin_vec_dstst (vui *, const int, const int);
+ DSTST DSTST_VUI
+ void __builtin_vec_dstst (vsi *, const int, const int);
+ DSTST DSTST_VSI
+ void __builtin_vec_dstst (vbi *, const int, const int);
+ DSTST DSTST_VBI
+ void __builtin_vec_dstst (vf *, const int, const int);
+ DSTST DSTST_VF
+
+[VEC_DSTSTT, vec_dststt, __builtin_vec_dststt]
+ void __builtin_vec_dststt (unsigned char *, const int, const int);
+ DSTSTT DSTSTT_UC
+ void __builtin_vec_dststt (signed char *, const int, const int);
+ DSTSTT DSTSTT_SC
+ void __builtin_vec_dststt (unsigned short *, const int, const int);
+ DSTSTT DSTSTT_US
+ void __builtin_vec_dststt (signed short *, const int, const int);
+ DSTSTT DSTSTT_SS
+ void __builtin_vec_dststt (unsigned int *, const int, const int);
+ DSTSTT DSTSTT_UI
+ void __builtin_vec_dststt (signed int *, const int, const int);
+ DSTSTT DSTSTT_SI
+ void __builtin_vec_dststt (unsigned long long *, const int, const int);
+ DSTSTT DSTSTT_ULL
+ void __builtin_vec_dststt (signed long long *, const int, const int);
+ DSTSTT DSTSTT_SLL
+ void __builtin_vec_dststt (float *, const int, const int);
+ DSTSTT DSTSTT_F
+ void __builtin_vec_dststt (vuc *, const int, const int);
+ DSTSTT DSTSTT_VUC
+ void __builtin_vec_dststt (vsc *, const int, const int);
+ DSTSTT DSTSTT_VSC
+ void __builtin_vec_dststt (vbc *, const int, const int);
+ DSTSTT DSTSTT_VBC
+ void __builtin_vec_dststt (vus *, const int, const int);
+ DSTSTT DSTSTT_VUS
+ void __builtin_vec_dststt (vss *, const int, const int);
+ DSTSTT DSTSTT_VSS
+ void __builtin_vec_dststt (vbs *, const int, const int);
+ DSTSTT DSTSTT_VBS
+ void __builtin_vec_dststt (vp *, const int, const int);
+ DSTSTT DSTSTT_VP
+ void __builtin_vec_dststt (vui *, const int, const int);
+ DSTSTT DSTSTT_VUI
+ void __builtin_vec_dststt (vsi *, const int, const int);
+ DSTSTT DSTSTT_VSI
+ void __builtin_vec_dststt (vbi *, const int, const int);
+ DSTSTT DSTSTT_VBI
+ void __builtin_vec_dststt (vf *, const int, const int);
+ DSTSTT DSTSTT_VF
+
+[VEC_DSTT, vec_dstt, __builtin_vec_dstt]
+ void __builtin_vec_dstt (unsigned char *, const int, const int);
+ DSTT DSTT_UC
+ void __builtin_vec_dstt (signed char *, const int, const int);
+ DSTT DSTT_SC
+ void __builtin_vec_dstt (unsigned short *, const int, const int);
+ DSTT DSTT_US
+ void __builtin_vec_dstt (signed short *, const int, const int);
+ DSTT DSTT_SS
+ void __builtin_vec_dstt (unsigned int *, const int, const int);
+ DSTT DSTT_UI
+ void __builtin_vec_dstt (signed int *, const int, const int);
+ DSTT DSTT_SI
+ void __builtin_vec_dstt (unsigned long long *, const int, const int);
+ DSTT DSTT_ULL
+ void __builtin_vec_dstt (signed long long *, const int, const int);
+ DSTT DSTT_SLL
+ void __builtin_vec_dstt (float *, const int, const int);
+ DSTT DSTT_F
+ void __builtin_vec_dstt (vuc *, const int, const int);
+ DSTT DSTT_VUC
+ void __builtin_vec_dstt (vsc *, const int, const int);
+ DSTT DSTT_VSC
+ void __builtin_vec_dstt (vbc *, const int, const int);
+ DSTT DSTT_VBC
+ void __builtin_vec_dstt (vus *, const int, const int);
+ DSTT DSTT_VUS
+ void __builtin_vec_dstt (vss *, const int, const int);
+ DSTT DSTT_VSS
+ void __builtin_vec_dstt (vbs *, const int, const int);
+ DSTT DSTT_VBS
+ void __builtin_vec_dstt (vp *, const int, const int);
+ DSTT DSTT_VP
+ void __builtin_vec_dstt (vui *, const int, const int);
+ DSTT DSTT_VUI
+ void __builtin_vec_dstt (vsi *, const int, const int);
+ DSTT DSTT_VSI
+ void __builtin_vec_dstt (vbi *, const int, const int);
+ DSTT DSTT_VBI
+ void __builtin_vec_dstt (vf *, const int, const int);
+ DSTT DSTT_VF
+
+[VEC_EQV, vec_eqv, __builtin_vec_eqv, _ARCH_PWR8]
+ vsc __builtin_vec_eqv (vsc, vsc);
+ EQV_V16QI
+ vuc __builtin_vec_eqv (vuc, vuc);
+ EQV_V16QI_UNS EQV_V16QI_VUC
+ vbc __builtin_vec_eqv (vbc, vbc);
+ EQV_V16QI_UNS EQV_V16QI_VBC
+ vss __builtin_vec_eqv (vss, vss);
+ EQV_V8HI
+ vus __builtin_vec_eqv (vus, vus);
+ EQV_V8HI_UNS EQV_V8HI_VUS
+ vbs __builtin_vec_eqv (vbs, vbs);
+ EQV_V8HI_UNS EQV_V8HI_VBS
+ vsi __builtin_vec_eqv (vsi, vsi);
+ EQV_V4SI
+ vui __builtin_vec_eqv (vui, vui);
+ EQV_V4SI_UNS EQV_V4SI_VUI
+ vbi __builtin_vec_eqv (vbi, vbi);
+ EQV_V4SI_UNS EQV_V4SI_VBI
+ vsll __builtin_vec_eqv (vsll, vsll);
+ EQV_V2DI
+ vull __builtin_vec_eqv (vull, vull);
+ EQV_V2DI_UNS EQV_V2DI_VULL
+ vbll __builtin_vec_eqv (vbll, vbll);
+ EQV_V2DI_UNS EQV_V2DI_VBLL
+ vf __builtin_vec_eqv (vf, vf);
+ EQV_V4SF
+ vd __builtin_vec_eqv (vd, vd);
+ EQV_V2DF
+
+[VEC_EXPANDM, vec_expandm, __builtin_vec_vexpandm, _ARCH_PWR10]
+ vuc __builtin_vec_vexpandm (vuc);
+ VEXPANDMB
+ vus __builtin_vec_vexpandm (vus);
+ VEXPANDMH
+ vui __builtin_vec_vexpandm (vui);
+ VEXPANDMW
+ vull __builtin_vec_vexpandm (vull);
+ VEXPANDMD
+ vuq __builtin_vec_vexpandm (vuq);
+ VEXPANDMQ
+
+[VEC_EXPTE, vec_expte, __builtin_vec_expte]
+ vf __builtin_vec_expte (vf);
+ VEXPTEFP
+
+[VEC_EXTRACTM, vec_extractm, __builtin_vec_vextractm, _ARCH_PWR10]
+ signed int __builtin_vec_vextractm (vuc);
+ VEXTRACTMB
+ signed int __builtin_vec_vextractm (vus);
+ VEXTRACTMH
+ signed int __builtin_vec_vextractm (vui);
+ VEXTRACTMW
+ signed int __builtin_vec_vextractm (vull);
+ VEXTRACTMD
+ signed int __builtin_vec_vextractm (vuq);
+ VEXTRACTMQ
+
+[VEC_EXTRACT_FP_FROM_SHORTH, vec_extract_fp32_from_shorth, __builtin_vec_vextract_fp_from_shorth, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shorth (vus);
+ VEXTRACT_FP_FROM_SHORTH
+
+[VEC_EXTRACT_FP_FROM_SHORTL, vec_extract_fp32_from_shortl, __builtin_vec_vextract_fp_from_shortl, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shortl (vus);
+ VEXTRACT_FP_FROM_SHORTL
+
+[VEC_EXTRACTH, vec_extracth, __builtin_vec_extracth, _ARCH_PWR10]
+ vull __builtin_vec_extracth (vuc, vuc, unsigned char);
+ VEXTRACTBR
+ vull __builtin_vec_extracth (vus, vus, unsigned char);
+ VEXTRACTHR
+ vull __builtin_vec_extracth (vui, vui, unsigned char);
+ VEXTRACTWR
+ vull __builtin_vec_extracth (vull, vull, unsigned char);
+ VEXTRACTDR
+
+[VEC_EXTRACTL, vec_extractl, __builtin_vec_extractl, _ARCH_PWR10]
+ vull __builtin_vec_extractl (vuc, vuc, unsigned char);
+ VEXTRACTBL
+ vull __builtin_vec_extractl (vus, vus, unsigned char);
+ VEXTRACTHL
+ vull __builtin_vec_extractl (vui, vui, unsigned char);
+ VEXTRACTWL
+ vull __builtin_vec_extractl (vull, vull, unsigned char);
+ VEXTRACTDL
+
+[VEC_EXTRACT4B, vec_extract4b, __builtin_vec_extract4b, _ARCH_PWR9]
+ vull __builtin_vec_extract4b (vuc, const int);
+ EXTRACT4B
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTULX, vec_xlx, __builtin_vec_vextulx, _ARCH_PWR9]
+ signed char __builtin_vec_vextulx (unsigned int, vsc);
+ VEXTUBLX VEXTUBLX_S
+ unsigned char __builtin_vec_vextulx (unsigned int, vuc);
+ VEXTUBLX VEXTUBLX_U
+ signed short __builtin_vec_vextulx (unsigned int, vss);
+ VEXTUHLX VEXTUHLX_S
+ unsigned short __builtin_vec_vextulx (unsigned int, vus);
+ VEXTUHLX VEXTUHLX_U
+ signed int __builtin_vec_vextulx (unsigned int, vsi);
+ VEXTUWLX VEXTUWLX_S
+ unsigned int __builtin_vec_vextulx (unsigned int, vui);
+ VEXTUWLX VEXTUWLX_U
+ float __builtin_vec_vextulx (unsigned int, vf);
+ VEXTUWLX VEXTUWLX_F
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTURX, vec_xrx, __builtin_vec_vexturx, _ARCH_PWR9]
+ signed char __builtin_vec_vexturx (unsigned int, vsc);
+ VEXTUBRX VEXTUBRX_S
+ unsigned char __builtin_vec_vexturx (unsigned int, vuc);
+ VEXTUBRX VEXTUBRX_U
+ signed short __builtin_vec_vexturx (unsigned int, vss);
+ VEXTUHRX VEXTUHRX_S
+ unsigned short __builtin_vec_vexturx (unsigned int, vus);
+ VEXTUHRX VEXTUHRX_U
+ signed int __builtin_vec_vexturx (unsigned int, vsi);
+ VEXTUWRX VEXTUWRX_S
+ unsigned int __builtin_vec_vexturx (unsigned int, vui);
+ VEXTUWRX VEXTUWRX_U
+ float __builtin_vec_vexturx (unsigned int, vf);
+ VEXTUWRX VEXTUWRX_F
+
+[VEC_FIRSTMATCHINDEX, vec_first_match_index, __builtin_vec_first_match_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_index (vsc, vsc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_match_index (vuc, vuc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_match_index (vss, vss);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_match_index (vus, vus);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_match_index (vsi, vsi);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_match_index (vui, vui);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VUI
+
+[VEC_FIRSTMATCHOREOSINDEX, vec_first_match_or_eos_index, __builtin_vec_first_match_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_or_eos_index (vsc, vsc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_match_or_eos_index (vuc, vuc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_match_or_eos_index (vss, vss);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_match_or_eos_index (vus, vus);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_match_or_eos_index (vsi, vsi);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_match_or_eos_index (vui, vui);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VUI
+
+[VEC_FIRSTMISMATCHINDEX, vec_first_mismatch_index, __builtin_vec_first_mismatch_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_index (vsc, vsc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_index (vuc, vuc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_index (vss, vss);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_index (vus, vus);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_index (vsi, vsi);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_index (vui, vui);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VUI
+
+[VEC_FIRSTMISMATCHOREOSINDEX, vec_first_mismatch_or_eos_index, __builtin_vec_first_mismatch_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsc, vsc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vuc, vuc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vss, vss);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vus, vus);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsi, vsi);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vui, vui);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VUI
+
+[VEC_FLOAT, vec_float, __builtin_vec_float]
+ vf __builtin_vec_float (vsi);
+ XVCVSXWSP_V4SF
+ vf __builtin_vec_float (vui);
+ XVCVUXWSP_V4SF
+
+[VEC_FLOAT2, vec_float2, __builtin_vec_float2]
+ vf __builtin_vec_float2 (vsll, vsll);
+ FLOAT2_V2DI
+ vf __builtin_vec_float2 (vull, vull);
+ UNS_FLOAT2_V2DI
+ vf __builtin_vec_float2 (vd, vd);
+ FLOAT2_V2DF
+
+[VEC_FLOATE, vec_floate, __builtin_vec_floate]
+ vf __builtin_vec_floate (vsll);
+ FLOATE_V2DI
+ vf __builtin_vec_floate (vull);
+ UNS_FLOATE_V2DI
+ vf __builtin_vec_floate (vd);
+ FLOATE_V2DF
+
+[VEC_FLOATO, vec_floato, __builtin_vec_floato]
+ vf __builtin_vec_floato (vsll);
+ FLOATO_V2DI
+ vf __builtin_vec_floato (vull);
+ UNS_FLOATO_V2DI
+ vf __builtin_vec_floato (vd);
+ FLOATO_V2DF
+
+[VEC_FLOOR, vec_floor, __builtin_vec_floor]
+ vf __builtin_vec_floor (vf);
+ XVRSPIM
+ vd __builtin_vec_floor (vd);
+ XVRDPIM
+
+[VEC_GB, vec_gb, __builtin_vec_vgbbd, _ARCH_PWR8]
+ vsc __builtin_vec_vgbbd (vsc);
+ VGBBD VGBBD_S
+ vuc __builtin_vec_vgbbd (vuc);
+ VGBBD VGBBD_U
+
+[VEC_GENBM, vec_genbm, __builtin_vec_mtvsrbm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrbm (unsigned long long);
+ MTVSRBM
+
+[VEC_GENHM, vec_genhm, __builtin_vec_mtvsrhm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrhm (unsigned long long);
+ MTVSRHM
+
+[VEC_GENWM, vec_genwm, __builtin_vec_mtvsrwm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrwm (unsigned long long);
+ MTVSRWM
+
+[VEC_GENDM, vec_gendm, __builtin_vec_mtvsrdm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrdm (unsigned long long);
+ MTVSRDM
+
+[VEC_GENQM, vec_genqm, __builtin_vec_mtvsrqm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrqm (unsigned long long);
+ MTVSRQM
+
+[VEC_GENPCVM, vec_genpcvm, __builtin_vec_xxgenpcvm, _ARCH_PWR10]
+ vuc __builtin_vec_xxgenpcvm (vuc, const int);
+ XXGENPCVM_V16QI
+ vus __builtin_vec_xxgenpcvm (vus, const int);
+ XXGENPCVM_V8HI
+ vui __builtin_vec_xxgenpcvm (vui, const int);
+ XXGENPCVM_V4SI
+ vull __builtin_vec_xxgenpcvm (vull, const int);
+ XXGENPCVM_V2DI
+
+[VEC_GNB, vec_gnb, __builtin_vec_gnb, _ARCH_PWR10]
+ vull __builtin_vec_gnb (vuq, unsigned char);
+ VGNB
+
+[VEC_INSERTH, vec_inserth, __builtin_vec_inserth, _ARCH_PWR10]
+ vuc __builtin_vec_inserth (unsigned char, vuc, unsigned int);
+ VINSERTGPRBR
+ vuc __builtin_vec_inserth (vuc, vuc, unsigned int);
+ VINSERTVPRBR
+ vus __builtin_vec_inserth (unsigned short, vus, unsigned int);
+ VINSERTGPRHR
+ vus __builtin_vec_inserth (vus, vus, unsigned int);
+ VINSERTVPRHR
+ vui __builtin_vec_inserth (unsigned int, vui, unsigned int);
+ VINSERTGPRWR
+ vui __builtin_vec_inserth (vui, vui, unsigned int);
+ VINSERTVPRWR
+ vull __builtin_vec_inserth (unsigned long long, vull, unsigned int);
+ VINSERTGPRDR
+
+[VEC_INSERTL, vec_insertl, __builtin_vec_insertl, _ARCH_PWR10]
+ vuc __builtin_vec_insertl (unsigned char, vuc, unsigned int);
+ VINSERTGPRBL
+ vuc __builtin_vec_insertl (vuc, vuc, unsigned int);
+ VINSERTVPRBL
+ vus __builtin_vec_insertl (unsigned short, vus, unsigned int);
+ VINSERTGPRHL
+ vus __builtin_vec_insertl (vus, vus, unsigned int);
+ VINSERTVPRHL
+ vui __builtin_vec_insertl (unsigned int, vui, unsigned int);
+ VINSERTGPRWL
+ vui __builtin_vec_insertl (vui, vui, unsigned int);
+ VINSERTVPRWL
+ vull __builtin_vec_insertl (unsigned long long, vull, unsigned int);
+ VINSERTGPRDL
+
+[VEC_INSERT4B, vec_insert4b, __builtin_vec_insert4b, _ARCH_PWR9]
+ vuc __builtin_vec_insert4b (vsi, vuc, const int);
+ INSERT4B INSERT4B_S
+ vuc __builtin_vec_insert4b (vui, vuc, const int);
+ INSERT4B INSERT4B_U
+
+[VEC_LD, vec_ld, __builtin_vec_ld]
+ vsc __builtin_vec_ld (signed long long, vsc *);
+ LVX_V16QI LVX_V16QI_VSC
+ vsc __builtin_vec_ld (signed long long, signed char *);
+ LVX_V16QI LVX_V16QI_SC
+ vuc __builtin_vec_ld (signed long long, vuc *);
+ LVX_V16QI LVX_V16QI_VUC
+ vuc __builtin_vec_ld (signed long long, unsigned char *);
+ LVX_V16QI LVX_V16QI_UC
+ vbc __builtin_vec_ld (signed long long, vbc *);
+ LVX_V16QI LVX_V16QI_VBC
+ vss __builtin_vec_ld (signed long long, vss *);
+ LVX_V8HI LVX_V8HI_VSS
+ vss __builtin_vec_ld (signed long long, signed short *);
+ LVX_V8HI LVX_V8HI_SS
+ vus __builtin_vec_ld (signed long long, vus *);
+ LVX_V8HI LVX_V8HI_VUS
+ vus __builtin_vec_ld (signed long long, unsigned short *);
+ LVX_V8HI LVX_V8HI_US
+ vbs __builtin_vec_ld (signed long long, vbs *);
+ LVX_V8HI LVX_V8HI_VBS
+ vp __builtin_vec_ld (signed long long, vp *);
+ LVX_V8HI LVX_V8HI_VP
+ vsi __builtin_vec_ld (signed long long, vsi *);
+ LVX_V4SI LVX_V4SI_VSI
+ vsi __builtin_vec_ld (signed long long, signed int *);
+ LVX_V4SI LVX_V4SI_SI
+ vui __builtin_vec_ld (signed long long, vui *);
+ LVX_V4SI LVX_V4SI_VUI
+ vui __builtin_vec_ld (signed long long, unsigned int *);
+ LVX_V4SI LVX_V4SI_UI
+ vbi __builtin_vec_ld (signed long long, vbi *);
+ LVX_V4SI LVX_V4SI_VBI
+ vsll __builtin_vec_ld (signed long long, vsll *);
+ LVX_V2DI LVX_V2DI_VSLL
+ vsll __builtin_vec_ld (signed long long, signed long long *);
+ LVX_V2DI LVX_V2DI_SLL
+ vull __builtin_vec_ld (signed long long, vull *);
+ LVX_V2DI LVX_V2DI_VULL
+ vull __builtin_vec_ld (signed long long, unsigned long long *);
+ LVX_V2DI LVX_V2DI_ULL
+ vbll __builtin_vec_ld (signed long long, vbll *);
+ LVX_V2DI LVX_V2DI_VBLL
+ vsq __builtin_vec_ld (signed long long, const vsq *);
+ LVX_V1TI LVX_V1TI_VSQ
+ vuq __builtin_vec_ld (signed long long, const vuq *);
+ LVX_V1TI LVX_V1TI_VUQ
+ vsq __builtin_vec_ld (signed long long, __int128 *);
+ LVX_V1TI LVX_V1TI_TI
+ vuq __builtin_vec_ld (signed long long, unsigned __int128 *);
+ LVX_V1TI LVX_V1TI_UTI
+ vf __builtin_vec_ld (signed long long, vf *);
+ LVX_V4SF LVX_V4SF_VF
+ vf __builtin_vec_ld (signed long long, float *);
+ LVX_V4SF LVX_V4SF_F
+ vd __builtin_vec_ld (signed long long, vd *);
+ LVX_V2DF LVX_V2DF_VD
+ vd __builtin_vec_ld (signed long long, double *);
+ LVX_V2DF LVX_V2DF_D
+
+[VEC_LDE, vec_lde, __builtin_vec_lde]
+ vsc __builtin_vec_lde (signed long long, signed char *);
+ LVEBX LVEBX_SC
+ vuc __builtin_vec_lde (signed long long, unsigned char *);
+ LVEBX LVEBX_UC
+ vss __builtin_vec_lde (signed long long, signed short *);
+ LVEHX LVEHX_SS
+ vus __builtin_vec_lde (signed long long, unsigned short *);
+ LVEHX LVEHX_US
+ vsi __builtin_vec_lde (signed long long, signed int *);
+ LVEWX LVEWX_SI
+ vui __builtin_vec_lde (signed long long, unsigned int *);
+ LVEWX LVEWX_UI
+ vf __builtin_vec_lde (signed long long, float *);
+ LVEWX LVEWX_F
+
+[VEC_LDL, vec_ldl, __builtin_vec_ldl]
+ vsc __builtin_vec_ldl (signed long long, vsc *);
+ LVXL_V16QI LVXL_V16QI_VSC
+ vsc __builtin_vec_ldl (signed long long, signed char *);
+ LVXL_V16QI LVXL_V16QI_SC
+ vuc __builtin_vec_ldl (signed long long, vuc *);
+ LVXL_V16QI LVXL_V16QI_VUC
+ vuc __builtin_vec_ldl (signed long long, unsigned char *);
+ LVXL_V16QI LVXL_V16QI_UC
+ vbc __builtin_vec_ldl (signed long long, vbc *);
+ LVXL_V16QI LVXL_V16QI_VBC
+ vss __builtin_vec_ldl (signed long long, vss *);
+ LVXL_V8HI LVXL_V8HI_VSS
+ vss __builtin_vec_ldl (signed long long, signed short *);
+ LVXL_V8HI LVXL_V8HI_SS
+ vus __builtin_vec_ldl (signed long long, vus *);
+ LVXL_V8HI LVXL_V8HI_VUS
+ vus __builtin_vec_ldl (signed long long, unsigned short *);
+ LVXL_V8HI LVXL_V8HI_US
+ vbs __builtin_vec_ldl (signed long long, vbs *);
+ LVXL_V8HI LVXL_V8HI_VBS
+ vp __builtin_vec_ldl (signed long long, vp *);
+ LVXL_V8HI LVXL_V8HI_VP
+ vsi __builtin_vec_ldl (signed long long, vsi *);
+ LVXL_V4SI LVXL_V4SI_VSI
+ vsi __builtin_vec_ldl (signed long long, signed int *);
+ LVXL_V4SI LVXL_V4SI_SI
+ vui __builtin_vec_ldl (signed long long, vui *);
+ LVXL_V4SI LVXL_V4SI_VUI
+ vui __builtin_vec_ldl (signed long long, unsigned int *);
+ LVXL_V4SI LVXL_V4SI_UI
+ vbi __builtin_vec_ldl (signed long long, vbi *);
+ LVXL_V4SI LVXL_V4SI_VBI
+ vsll __builtin_vec_ldl (signed long long, vsll *);
+ LVXL_V2DI LVXL_V2DI_VSLL
+ vsll __builtin_vec_ldl (signed long long, signed long long *);
+ LVXL_V2DI LVXL_V2DI_SLL
+ vull __builtin_vec_ldl (signed long long, vull *);
+ LVXL_V2DI LVXL_V2DI_VULL
+ vull __builtin_vec_ldl (signed long long, unsigned long long *);
+ LVXL_V2DI LVXL_V2DI_ULL
+ vbll __builtin_vec_ldl (signed long long, vbll *);
+ LVXL_V2DI LVXL_V2DI_VBLL
+ vsq __builtin_vec_ldl (signed long long, const vsq *);
+ LVXL_V1TI LVXL_V1TI_VSQ
+ vuq __builtin_vec_ldl (signed long long, const vuq *);
+ LVXL_V1TI LVXL_V1TI_VUQ
+ vsq __builtin_vec_ldl (signed long long, __int128 *);
+ LVXL_V1TI LVXL_V1TI_TI
+ vuq __builtin_vec_ldl (signed long long, unsigned __int128 *);
+ LVXL_V1TI LVXL_V1TI_UTI
+ vf __builtin_vec_ldl (signed long long, vf *);
+ LVXL_V4SF LVXL_V4SF_VF
+ vf __builtin_vec_ldl (signed long long, float *);
+ LVXL_V4SF LVXL_V4SF_F
+ vd __builtin_vec_ldl (signed long long, vd *);
+ LVXL_V2DF LVXL_V2DF_VD
+ vd __builtin_vec_ldl (signed long long, double *);
+ LVXL_V2DF LVXL_V2DF_D
+
+[VEC_LOGE, vec_loge, __builtin_vec_loge]
+ vf __builtin_vec_loge (vf);
+ VLOGEFP
+
+[VEC_LVLX, vec_lvlx, __builtin_vec_lvlx, __PPU__]
+ vbc __builtin_vec_lvlx (signed long long, vbc *);
+ LVLX LVLX_VBC
+ vsc __builtin_vec_lvlx (signed long long, vsc *);
+ LVLX LVLX_VSC
+ vsc __builtin_vec_lvlx (signed long long, signed char *);
+ LVLX LVLX_SC
+ vuc __builtin_vec_lvlx (signed long long, vuc *);
+ LVLX LVLX_VUC
+ vuc __builtin_vec_lvlx (signed long long, unsigned char *);
+ LVLX LVLX_UC
+ vbs __builtin_vec_lvlx (signed long long, vbs *);
+ LVLX LVLX_VBS
+ vss __builtin_vec_lvlx (signed long long, vss *);
+ LVLX LVLX_VSS
+ vss __builtin_vec_lvlx (signed long long, signed short *);
+ LVLX LVLX_SS
+ vus __builtin_vec_lvlx (signed long long, vus *);
+ LVLX LVLX_VUS
+ vus __builtin_vec_lvlx (signed long long, unsigned short *);
+ LVLX LVLX_US
+ vp __builtin_vec_lvlx (signed long long, vp *);
+ LVLX LVLX_VP
+ vbi __builtin_vec_lvlx (signed long long, vbi *);
+ LVLX LVLX_VBI
+ vsi __builtin_vec_lvlx (signed long long, vsi *);
+ LVLX LVLX_VSI
+ vsi __builtin_vec_lvlx (signed long long, signed int *);
+ LVLX LVLX_SI
+ vui __builtin_vec_lvlx (signed long long, vui *);
+ LVLX LVLX_VUI
+ vui __builtin_vec_lvlx (signed long long, unsigned int *);
+ LVLX LVLX_UI
+ vf __builtin_vec_lvlx (signed long long, vf *);
+ LVLX LVLX_VF
+ vf __builtin_vec_lvlx (signed long long, float *);
+ LVLX LVLX_F
+
+[VEC_LVLXL, vec_lvlxl, __builtin_vec_lvlxl, __PPU__]
+ vbc __builtin_vec_lvlxl (signed long long, vbc *);
+ LVLXL LVLXL_VBC
+ vsc __builtin_vec_lvlxl (signed long long, vsc *);
+ LVLXL LVLXL_VSC
+ vsc __builtin_vec_lvlxl (signed long long, signed char *);
+ LVLXL LVLXL_SC
+ vuc __builtin_vec_lvlxl (signed long long, vuc *);
+ LVLXL LVLXL_VUC
+ vuc __builtin_vec_lvlxl (signed long long, unsigned char *);
+ LVLXL LVLXL_UC
+ vbs __builtin_vec_lvlxl (signed long long, vbs *);
+ LVLXL LVLXL_VBS
+ vss __builtin_vec_lvlxl (signed long long, vss *);
+ LVLXL LVLXL_VSS
+ vss __builtin_vec_lvlxl (signed long long, signed short *);
+ LVLXL LVLXL_SS
+ vus __builtin_vec_lvlxl (signed long long, vus *);
+ LVLXL LVLXL_VUS
+ vus __builtin_vec_lvlxl (signed long long, unsigned short *);
+ LVLXL LVLXL_US
+ vp __builtin_vec_lvlxl (signed long long, vp *);
+ LVLXL LVLXL_VP
+ vbi __builtin_vec_lvlxl (signed long long, vbi *);
+ LVLXL LVLXL_VBI
+ vsi __builtin_vec_lvlxl (signed long long, vsi *);
+ LVLXL LVLXL_VSI
+ vsi __builtin_vec_lvlxl (signed long long, signed int *);
+ LVLXL LVLXL_SI
+ vui __builtin_vec_lvlxl (signed long long, vui *);
+ LVLXL LVLXL_VUI
+ vui __builtin_vec_lvlxl (signed long long, unsigned int *);
+ LVLXL LVLXL_UI
+ vf __builtin_vec_lvlxl (signed long long, vf *);
+ LVLXL LVLXL_VF
+ vf __builtin_vec_lvlxl (signed long long, float *);
+ LVLXL LVLXL_F
+
+[VEC_LVRX, vec_lvrx, __builtin_vec_lvrx, __PPU__]
+ vbc __builtin_vec_lvrx (signed long long, vbc *);
+ LVRX LVRX_VBC
+ vsc __builtin_vec_lvrx (signed long long, vsc *);
+ LVRX LVRX_VSC
+ vsc __builtin_vec_lvrx (signed long long, signed char *);
+ LVRX LVRX_SC
+ vuc __builtin_vec_lvrx (signed long long, vuc *);
+ LVRX LVRX_VUC
+ vuc __builtin_vec_lvrx (signed long long, unsigned char *);
+ LVRX LVRX_UC
+ vbs __builtin_vec_lvrx (signed long long, vbs *);
+ LVRX LVRX_VBS
+ vss __builtin_vec_lvrx (signed long long, vss *);
+ LVRX LVRX_VSS
+ vss __builtin_vec_lvrx (signed long long, signed short *);
+ LVRX LVRX_SS
+ vus __builtin_vec_lvrx (signed long long, vus *);
+ LVRX LVRX_VUS
+ vus __builtin_vec_lvrx (signed long long, unsigned short *);
+ LVRX LVRX_US
+ vp __builtin_vec_lvrx (signed long long, vp *);
+ LVRX LVRX_VP
+ vbi __builtin_vec_lvrx (signed long long, vbi *);
+ LVRX LVRX_VBI
+ vsi __builtin_vec_lvrx (signed long long, vsi *);
+ LVRX LVRX_VSI
+ vsi __builtin_vec_lvrx (signed long long, signed int *);
+ LVRX LVRX_SI
+ vui __builtin_vec_lvrx (signed long long, vui *);
+ LVRX LVRX_VUI
+ vui __builtin_vec_lvrx (signed long long, unsigned int *);
+ LVRX LVRX_UI
+ vf __builtin_vec_lvrx (signed long long, vf *);
+ LVRX LVRX_VF
+ vf __builtin_vec_lvrx (signed long long, float *);
+ LVRX LVRX_F
+
+[VEC_LVRXL, vec_lvrxl, __builtin_vec_lvrxl, __PPU__]
+ vbc __builtin_vec_lvrxl (signed long long, vbc *);
+ LVRXL LVRXL_VBC
+ vsc __builtin_vec_lvrxl (signed long long, vsc *);
+ LVRXL LVRXL_VSC
+ vsc __builtin_vec_lvrxl (signed long long, signed char *);
+ LVRXL LVRXL_SC
+ vuc __builtin_vec_lvrxl (signed long long, vuc *);
+ LVRXL LVRXL_VUC
+ vuc __builtin_vec_lvrxl (signed long long, unsigned char *);
+ LVRXL LVRXL_UC
+ vbs __builtin_vec_lvrxl (signed long long, vbs *);
+ LVRXL LVRXL_VBS
+ vss __builtin_vec_lvrxl (signed long long, vss *);
+ LVRXL LVRXL_VSS
+ vss __builtin_vec_lvrxl (signed long long, signed short *);
+ LVRXL LVRXL_SS
+ vus __builtin_vec_lvrxl (signed long long, vus *);
+ LVRXL LVRXL_VUS
+ vus __builtin_vec_lvrxl (signed long long, unsigned short *);
+ LVRXL LVRXL_US
+ vp __builtin_vec_lvrxl (signed long long, vp *);
+ LVRXL LVRXL_VP
+ vbi __builtin_vec_lvrxl (signed long long, vbi *);
+ LVRXL LVRXL_VBI
+ vsi __builtin_vec_lvrxl (signed long long, vsi *);
+ LVRXL LVRXL_VSI
+ vsi __builtin_vec_lvrxl (signed long long, signed int *);
+ LVRXL LVRXL_SI
+ vui __builtin_vec_lvrxl (signed long long, vui *);
+ LVRXL LVRXL_VUI
+ vui __builtin_vec_lvrxl (signed long long, unsigned int *);
+ LVRXL LVRXL_UI
+ vf __builtin_vec_lvrxl (signed long long, vf *);
+ LVRXL LVRXL_VF
+ vf __builtin_vec_lvrxl (signed long long, float *);
+ LVRXL LVRXL_F
+
+[VEC_LVSL, vec_lvsl, __builtin_vec_lvsl]
+ vuc __builtin_vec_lvsl (signed long long, unsigned char *);
+ LVSL LVSL_UC
+ vuc __builtin_vec_lvsl (signed long long, signed char *);
+ LVSL LVSL_SC
+
+[VEC_LVSR, vec_lvsr, __builtin_vec_lvsr]
+ vuc __builtin_vec_lvsr (signed long long, unsigned char *);
+ LVSR LVSR_UC
+ vuc __builtin_vec_lvsr (signed long long, signed char *);
+ LVSR LVSR_SC
+
+[VEC_LXVL, vec_xl_len, __builtin_vec_lxvl, _ARCH_PPC64_PWR9]
+ vsc __builtin_vec_lxvl (signed char *, unsigned long long);
+ LXVL LXVL_VSC
+ vuc __builtin_vec_lxvl (unsigned char *, unsigned long long);
+ LXVL LXVL_VUC
+ vss __builtin_vec_lxvl (signed short *, unsigned long long);
+ LXVL LXVL_VSS
+ vus __builtin_vec_lxvl (unsigned short *, unsigned long long);
+ LXVL LXVL_VUS
+ vsi __builtin_vec_lxvl (signed int *, unsigned long long);
+ LXVL LXVL_VSI
+ vui __builtin_vec_lxvl (unsigned int *, unsigned long long);
+ LXVL LXVL_VUI
+ vsll __builtin_vec_lxvl (signed long long *, unsigned long long);
+ LXVL LXVL_VSLL
+ vull __builtin_vec_lxvl (unsigned long long *, unsigned long long);
+ LXVL LXVL_VULL
+ vsq __builtin_vec_lxvl (signed __int128 *, unsigned long long);
+ LXVL LXVL_VSQ
+ vuq __builtin_vec_lxvl (unsigned __int128 *, unsigned long long);
+ LXVL LXVL_VUQ
+ vf __builtin_vec_lxvl (float *, unsigned long long);
+ LXVL LXVL_VF
+ vd __builtin_vec_lxvl (double *, unsigned long long);
+ LXVL LXVL_VD
+
+[VEC_MADD, vec_madd, __builtin_vec_madd]
+ vss __builtin_vec_madd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS
+ vss __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS
+ vss __builtin_vec_madd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS
+ vus __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS
+ vf __builtin_vec_madd (vf, vf, vf);
+ XVMADDSP
+ vd __builtin_vec_madd (vd, vd, vd);
+ XVMADDDP
+
+[VEC_MADDS, vec_madds, __builtin_vec_madds]
+ vss __builtin_vec_madds (vss, vss, vss);
+ VMHADDSHS
+
+[VEC_MAX, vec_max, __builtin_vec_max]
+ vsc __builtin_vec_max (vsc, vsc);
+ VMAXSB
+ vuc __builtin_vec_max (vuc, vuc);
+ VMAXUB
+ vss __builtin_vec_max (vss, vss);
+ VMAXSH
+ vus __builtin_vec_max (vus, vus);
+ VMAXUH
+ vsi __builtin_vec_max (vsi, vsi);
+ VMAXSW
+ vui __builtin_vec_max (vui, vui);
+ VMAXUW
+ vsll __builtin_vec_max (vsll, vsll);
+ VMAXSD
+ vull __builtin_vec_max (vull, vull);
+ VMAXUD
+ vf __builtin_vec_max (vf, vf);
+ XVMAXSP
+ vd __builtin_vec_max (vd, vd);
+ XVMAXDP
+
+[VEC_MERGEE, vec_mergee, __builtin_vec_vmrgew]
+ vsi __builtin_vec_vmrgew (vsi, vsi);
+ VMRGEW_V4SI VMRGEW_VSI
+ vui __builtin_vec_vmrgew (vui, vui);
+ VMRGEW_V4SI VMRGEW_VUI
+ vbi __builtin_vec_vmrgew (vbi, vbi);
+ VMRGEW_V4SI VMRGEW_VBI
+ vsll __builtin_vec_vmrgew (vsll, vsll);
+ VMRGEW_V2DI VMRGEW_VSLL
+ vull __builtin_vec_vmrgew (vull, vull);
+ VMRGEW_V2DI VMRGEW_VULL
+ vbll __builtin_vec_vmrgew (vbll, vbll);
+ VMRGEW_V2DI VMRGEW_VBLL
+ vf __builtin_vec_vmrgew (vf, vf);
+ VMRGEW_V4SF
+ vd __builtin_vec_vmrgew (vd, vd);
+ VMRGEW_V2DF
+
+[VEC_MERGEH, vec_mergeh, __builtin_vec_mergeh]
+ vbc __builtin_vec_mergeh (vbc, vbc);
+ VMRGHB VMRGHB_VBC
+ vsc __builtin_vec_mergeh (vsc, vsc);
+ VMRGHB VMRGHB_VSC
+ vuc __builtin_vec_mergeh (vuc, vuc);
+ VMRGHB VMRGHB_VUC
+ vbs __builtin_vec_mergeh (vbs, vbs);
+ VMRGHH VMRGHH_VBS
+ vss __builtin_vec_mergeh (vss, vss);
+ VMRGHH VMRGHH_VSS
+ vus __builtin_vec_mergeh (vus, vus);
+ VMRGHH VMRGHH_VUS
+ vp __builtin_vec_mergeh (vp, vp);
+ VMRGHH VMRGHH_VP
+ vbi __builtin_vec_mergeh (vbi, vbi);
+ VMRGHW VMRGHW_VBI
+ vsi __builtin_vec_mergeh (vsi, vsi);
+ VMRGHW VMRGHW_VSI
+ vui __builtin_vec_mergeh (vui, vui);
+ VMRGHW VMRGHW_VUI
+ vbll __builtin_vec_mergeh (vbll, vbll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VBLL
+ vsll __builtin_vec_mergeh (vsll, vsll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VSLL
+ vull __builtin_vec_mergeh (vull, vull);
+ VEC_MERGEH_V2DI VEC_MERGEH_VULL
+ vf __builtin_vec_mergeh (vf, vf);
+ VMRGHW VMRGHW_VF
+ vd __builtin_vec_mergeh (vd, vd);
+ VEC_MERGEH_V2DF
+
+[VEC_MERGEL, vec_mergel, __builtin_vec_mergel]
+ vbc __builtin_vec_mergel (vbc, vbc);
+ VMRGLB VMRGLB_VBC
+ vsc __builtin_vec_mergel (vsc, vsc);
+ VMRGLB VMRGLB_VSC
+ vuc __builtin_vec_mergel (vuc, vuc);
+ VMRGLB VMRGLB_VUC
+ vbs __builtin_vec_mergel (vbs, vbs);
+ VMRGLH VMRGLH_VBS
+ vss __builtin_vec_mergel (vss, vss);
+ VMRGLH VMRGLH_VSS
+ vus __builtin_vec_mergel (vus, vus);
+ VMRGLH VMRGLH_VUS
+ vp __builtin_vec_mergel (vp, vp);
+ VMRGLH VMRGLH_VP
+ vbi __builtin_vec_mergel (vbi, vbi);
+ VMRGLW VMRGLW_VBI
+ vsi __builtin_vec_mergel (vsi, vsi);
+ VMRGLW VMRGLW_VSI
+ vui __builtin_vec_mergel (vui, vui);
+ VMRGLW VMRGLW_VUI
+ vbll __builtin_vec_mergel (vbll, vbll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VBLL
+ vsll __builtin_vec_mergel (vsll, vsll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VSLL
+ vull __builtin_vec_mergel (vull, vull);
+ VEC_MERGEL_V2DI VEC_MERGEL_VULL
+ vf __builtin_vec_mergel (vf, vf);
+ VMRGLW VMRGLW_VF
+ vd __builtin_vec_mergel (vd, vd);
+ VEC_MERGEL_V2DF
+
+[VEC_MERGEO, vec_mergeo, __builtin_vec_vmrgow]
+ vsi __builtin_vec_vmrgow (vsi, vsi);
+ VMRGOW_V4SI VMRGOW_VSI
+ vui __builtin_vec_vmrgow (vui, vui);
+ VMRGOW_V4SI VMRGOW_VUI
+ vbi __builtin_vec_vmrgow (vbi, vbi);
+ VMRGOW_V4SI VMRGOW_VBI
+ vsll __builtin_vec_vmrgow (vsll, vsll);
+ VMRGOW_V2DI VMRGOW_VSLL
+ vull __builtin_vec_vmrgow (vull, vull);
+ VMRGOW_V2DI VMRGOW_VULL
+ vbll __builtin_vec_vmrgow (vbll, vbll);
+ VMRGOW_V2DI VMRGOW_VBLL
+ vf __builtin_vec_vmrgow (vf, vf);
+ VMRGOW_V4SF
+ vd __builtin_vec_vmrgow (vd, vd);
+ VMRGOW_V2DF
+
+[VEC_MIN, vec_min, __builtin_vec_min]
+ vsc __builtin_vec_min (vsc, vsc);
+ VMINSB
+ vuc __builtin_vec_min (vuc, vuc);
+ VMINUB
+ vss __builtin_vec_min (vss, vss);
+ VMINSH
+ vus __builtin_vec_min (vus, vus);
+ VMINUH
+ vsi __builtin_vec_min (vsi, vsi);
+ VMINSW
+ vui __builtin_vec_min (vui, vui);
+ VMINUW
+ vsll __builtin_vec_min (vsll, vsll);
+ VMINSD
+ vull __builtin_vec_min (vull, vull);
+ VMINUD
+ vf __builtin_vec_min (vf, vf);
+ XVMINSP
+ vd __builtin_vec_min (vd, vd);
+ XVMINDP
+
+[VEC_MLADD, vec_mladd, __builtin_vec_mladd]
+ vss __builtin_vec_mladd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS2
+ vss __builtin_vec_mladd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS2
+ vss __builtin_vec_mladd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS2
+ vus __builtin_vec_mladd (vus, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS2
+
+[VEC_MRADDS, vec_mradds, __builtin_vec_mradds]
+ vss __builtin_vec_mradds (vss, vss, vss);
+ VMHRADDSHS
+
+[VEC_MSUB, vec_msub, __builtin_vec_msub, __VSX__]
+ vf __builtin_vec_msub (vf, vf, vf);
+ XVMSUBSP
+ vd __builtin_vec_msub (vd, vd, vd);
+ XVMSUBDP
+
+[VEC_MSUM, vec_msum, __builtin_vec_msum]
+ vui __builtin_vec_msum (vuc, vuc, vui);
+ VMSUMUBM
+ vsi __builtin_vec_msum (vsc, vuc, vsi);
+ VMSUMMBM
+ vui __builtin_vec_msum (vus, vus, vui);
+ VMSUMUHM
+ vsi __builtin_vec_msum (vss, vss, vsi);
+ VMSUMSHM
+ vsq __builtin_vec_msum (vsll, vsll, vsq);
+ VMSUMUDM VMSUMUDM_S
+ vuq __builtin_vec_msum (vull, vull, vuq);
+ VMSUMUDM VMSUMUDM_U
+
+[VEC_MSUMS, vec_msums, __builtin_vec_msums]
+ vui __builtin_vec_msums (vus, vus, vui);
+ VMSUMUHS
+ vsi __builtin_vec_msums (vss, vss, vsi);
+ VMSUMSHS
+
+[VEC_MTVSCR, vec_mtvscr, __builtin_vec_mtvscr]
+ void __builtin_vec_mtvscr (vbc);
+ MTVSCR MTVSCR_VBC
+ void __builtin_vec_mtvscr (vsc);
+ MTVSCR MTVSCR_VSC
+ void __builtin_vec_mtvscr (vuc);
+ MTVSCR MTVSCR_VUC
+ void __builtin_vec_mtvscr (vbs);
+ MTVSCR MTVSCR_VBS
+ void __builtin_vec_mtvscr (vss);
+ MTVSCR MTVSCR_VSS
+ void __builtin_vec_mtvscr (vus);
+ MTVSCR MTVSCR_VUS
+ void __builtin_vec_mtvscr (vp);
+ MTVSCR MTVSCR_VP
+ void __builtin_vec_mtvscr (vbi);
+ MTVSCR MTVSCR_VBI
+ void __builtin_vec_mtvscr (vsi);
+ MTVSCR MTVSCR_VSI
+ void __builtin_vec_mtvscr (vui);
+ MTVSCR MTVSCR_VUI
+
+; Note that there is no entry for VEC_MUL. See rs6000-c.c:
+; altivec_resolve_overloaded_builtin, where there is special-case
+; code for VEC_MUL. TODO: Is this really necessary? Investigate.
+
+[VEC_MULE, vec_mule, __builtin_vec_mule]
+ vss __builtin_vec_mule (vsc, vsc);
+ VMULESB
+ vus __builtin_vec_mule (vuc, vuc);
+ VMULEUB
+ vsi __builtin_vec_mule (vss, vss);
+ VMULESH
+ vui __builtin_vec_mule (vus, vus);
+ VMULEUH
+ vsll __builtin_vec_mule (vsi, vsi);
+ VMULESW
+ vull __builtin_vec_mule (vui, vui);
+ VMULEUW
+
+[VEC_MULO, vec_mulo, __builtin_vec_mulo]
+ vss __builtin_vec_mulo (vsc, vsc);
+ VMULOSB
+ vus __builtin_vec_mulo (vuc, vuc);
+ VMULOUB
+ vsi __builtin_vec_mulo (vss, vss);
+ VMULOSH
+ vui __builtin_vec_mulo (vus, vus);
+ VMULOUH
+ vsll __builtin_vec_mulo (vsi, vsi);
+ VMULOSW
+ vull __builtin_vec_mulo (vui, vui);
+ VMULOUW
+
+[VEC_NABS, vec_nabs, __builtin_vec_nabs]
+ vsc __builtin_vec_nabs (vsc);
+ NABS_V16QI
+ vss __builtin_vec_nabs (vss);
+ NABS_V8HI
+ vsi __builtin_vec_nabs (vsi);
+ NABS_V4SI
+ vsll __builtin_vec_nabs (vsll);
+ NABS_V2DI
+ vf __builtin_vec_nabs (vf);
+ NABS_V4SF
+ vd __builtin_vec_nabs (vd);
+ NABS_V2DF
+
+[VEC_NAND, vec_nand, __builtin_vec_nand, _ARCH_PWR8]
+ vsc __builtin_vec_nand (vsc, vsc);
+ NAND_V16QI
+ vuc __builtin_vec_nand (vuc, vuc);
+ NAND_V16QI_UNS NAND_VUC
+ vbc __builtin_vec_nand (vbc, vbc);
+ NAND_V16QI_UNS NAND_VBC
+ vss __builtin_vec_nand (vss, vss);
+ NAND_V8HI
+ vus __builtin_vec_nand (vus, vus);
+ NAND_V8HI_UNS NAND_VUS
+ vbs __builtin_vec_nand (vbs, vbs);
+ NAND_V8HI_UNS NAND_VBS
+ vsi __builtin_vec_nand (vsi, vsi);
+ NAND_V4SI
+ vui __builtin_vec_nand (vui, vui);
+ NAND_V4SI_UNS NAND_VUI
+ vbi __builtin_vec_nand (vbi, vbi);
+ NAND_V4SI_UNS NAND_VBI
+ vsll __builtin_vec_nand (vsll, vsll);
+ NAND_V2DI
+ vull __builtin_vec_nand (vull, vull);
+ NAND_V2DI_UNS NAND_VULL
+ vbll __builtin_vec_nand (vbll, vbll);
+ NAND_V2DI_UNS NAND_VBLL
+ vf __builtin_vec_nand (vf, vf);
+ NAND_V4SF
+ vd __builtin_vec_nand (vd, vd);
+ NAND_V2DF
+
+[VEC_NCIPHER_BE, vec_ncipher_be, __builtin_vec_vncipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipher_be (vuc, vuc);
+ VNCIPHER_BE
+
+[VEC_NCIPHERLAST_BE, vec_ncipherlast_be, __builtin_vec_vncipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipherlast_be (vuc, vuc);
+ VNCIPHERLAST_BE
+
+[VEC_NEARBYINT, vec_nearbyint, __builtin_vec_nearbyint, __VSX__]
+ vf __builtin_vec_nearbyint (vf);
+ XVRSPI XVRSPI_NBI
+ vd __builtin_vec_nearbyint (vd);
+ XVRDPI XVRDPI_NBI
+
+[VEC_NEG, vec_neg, __builtin_vec_neg]
+ vsc __builtin_vec_neg (vsc);
+ NEG_V16QI
+ vss __builtin_vec_neg (vss);
+ NEG_V8HI
+ vsi __builtin_vec_neg (vsi);
+ NEG_V4SI
+ vsll __builtin_vec_neg (vsll);
+ NEG_V2DI
+ vf __builtin_vec_neg (vf);
+ NEG_V4SF
+ vd __builtin_vec_neg (vd);
+ NEG_V2DF
+
+[VEC_NMADD, vec_nmadd, __builtin_vec_nmadd, __VSX__]
+ vf __builtin_vec_nmadd (vf, vf, vf);
+ XVNMADDSP
+ vd __builtin_vec_nmadd (vd, vd, vd);
+ XVNMADDDP
+
+[VEC_NMSUB, vec_nmsub, __builtin_vec_nmsub]
+ vf __builtin_vec_nmsub (vf, vf, vf);
+ XVNMSUBSP
+ vd __builtin_vec_nmsub (vd, vd, vd);
+ XVNMSUBDP
+
+[VEC_NOR, vec_nor, __builtin_vec_nor]
+ vsc __builtin_vec_nor (vsc, vsc);
+ VNOR_V16QI
+ vuc __builtin_vec_nor (vuc, vuc);
+ VNOR_V16QI_UNS VNOR_V16QI_U
+ vbc __builtin_vec_nor (vbc, vbc);
+ VNOR_V16QI_UNS VNOR_V16QI_B
+ vss __builtin_vec_nor (vss, vss);
+ VNOR_V8HI
+ vus __builtin_vec_nor (vus, vus);
+ VNOR_V8HI_UNS VNOR_V8HI_U
+ vbs __builtin_vec_nor (vbs, vbs);
+ VNOR_V8HI_UNS VNOR_V8HI_B
+ vsi __builtin_vec_nor (vsi, vsi);
+ VNOR_V4SI
+ vui __builtin_vec_nor (vui, vui);
+ VNOR_V4SI_UNS VNOR_V4SI_U
+ vbi __builtin_vec_nor (vbi, vbi);
+ VNOR_V4SI_UNS VNOR_V4SI_B
+ vsll __builtin_vec_nor (vsll, vsll);
+ VNOR_V2DI
+ vull __builtin_vec_nor (vull, vull);
+ VNOR_V2DI_UNS VNOR_V2DI_U
+ vbll __builtin_vec_nor (vbll, vbll);
+ VNOR_V2DI_UNS VNOR_V2DI_B
+ vf __builtin_vec_nor (vf, vf);
+ VNOR_V4SF
+ vd __builtin_vec_nor (vd, vd);
+ VNOR_V2DF
+
+[VEC_OR, vec_or, __builtin_vec_or]
+ vsc __builtin_vec_or (vsc, vsc);
+ VOR_V16QI
+ vuc __builtin_vec_or (vuc, vuc);
+ VOR_V16QI_UNS VOR_V16QI_U
+ vbc __builtin_vec_or (vbc, vbc);
+ VOR_V16QI_UNS VOR_V16QI_B
+ vss __builtin_vec_or (vss, vss);
+ VOR_V8HI
+ vus __builtin_vec_or (vus, vus);
+ VOR_V8HI_UNS VOR_V8HI_U
+ vbs __builtin_vec_or (vbs, vbs);
+ VOR_V8HI_UNS VOR_V8HI_B
+ vsi __builtin_vec_or (vsi, vsi);
+ VOR_V4SI
+ vui __builtin_vec_or (vui, vui);
+ VOR_V4SI_UNS VOR_V4SI_U
+ vbi __builtin_vec_or (vbi, vbi);
+ VOR_V4SI_UNS VOR_V4SI_B
+ vsll __builtin_vec_or (vsll, vsll);
+ VOR_V2DI
+ vull __builtin_vec_or (vull, vull);
+ VOR_V2DI_UNS VOR_V2DI_U
+ vbll __builtin_vec_or (vbll, vbll);
+ VOR_V2DI_UNS VOR_V2DI_B
+ vf __builtin_vec_or (vf, vf);
+ VOR_V4SF
+ vd __builtin_vec_or (vd, vd);
+ VOR_V2DF
+
+[VEC_ORC, vec_orc, __builtin_vec_orc, _ARCH_PWR8]
+ vsc __builtin_vec_orc (vsc, vsc);
+ ORC_V16QI
+ vuc __builtin_vec_orc (vuc, vuc);
+ ORC_V16QI_UNS ORC_VUC
+ vbc __builtin_vec_orc (vbc, vbc);
+ ORC_V16QI_UNS ORC_VBC
+ vss __builtin_vec_orc (vss, vss);
+ ORC_V8HI
+ vus __builtin_vec_orc (vus, vus);
+ ORC_V8HI_UNS ORC_VUS
+ vbs __builtin_vec_orc (vbs, vbs);
+ ORC_V8HI_UNS ORC_VBS
+ vsi __builtin_vec_orc (vsi, vsi);
+ ORC_V4SI
+ vui __builtin_vec_orc (vui, vui);
+ ORC_V4SI_UNS ORC_VUI
+ vbi __builtin_vec_orc (vbi, vbi);
+ ORC_V4SI_UNS ORC_VBI
+ vsll __builtin_vec_orc (vsll, vsll);
+ ORC_V2DI
+ vull __builtin_vec_orc (vull, vull);
+ ORC_V2DI_UNS ORC_VULL
+ vbll __builtin_vec_orc (vbll, vbll);
+ ORC_V2DI_UNS ORC_VBLL
+ vf __builtin_vec_orc (vf, vf);
+ ORC_V4SF
+ vd __builtin_vec_orc (vd, vd);
+ ORC_V2DF
+
+[VEC_PACK, vec_pack, __builtin_vec_pack]
+ vsc __builtin_vec_pack (vss, vss);
+ VPKUHUM VPKUHUM_VSS
+ vuc __builtin_vec_pack (vus, vus);
+ VPKUHUM VPKUHUM_VUS
+ vbc __builtin_vec_pack (vbs, vbs);
+ VPKUHUM VPKUHUM_VBS
+ vss __builtin_vec_pack (vsi, vsi);
+ VPKUWUM VPKUWUM_VSI
+ vus __builtin_vec_pack (vui, vui);
+ VPKUWUM VPKUWUM_VUI
+ vbs __builtin_vec_pack (vbi, vbi);
+ VPKUWUM VPKUWUM_VBI
+ vsi __builtin_vec_pack (vsll, vsll);
+ VPKUDUM VPKUDUM_VSLL
+ vui __builtin_vec_pack (vull, vull);
+ VPKUDUM VPKUDUM_VULL
+ vbi __builtin_vec_pack (vbll, vbll);
+ VPKUDUM VPKUDUM_VBLL
+ vf __builtin_vec_pack (vd, vd);
+ FLOAT2_V2DF FLOAT2_V2DF_PACK
+
+[VEC_PACKPX, vec_packpx, __builtin_vec_packpx]
+ vp __builtin_vec_packpx (vui, vui);
+ VPKPX
+
+[VEC_PACKS, vec_packs, __builtin_vec_packs]
+ vuc __builtin_vec_packs (vus, vus);
+ VPKUHUS
+ vsc __builtin_vec_packs (vss, vss);
+ VPKSHSS
+ vus __builtin_vec_packs (vui, vui);
+ VPKUWUS
+ vss __builtin_vec_packs (vsi, vsi);
+ VPKSWSS
+ vui __builtin_vec_packs (vull, vull);
+ VPKUDUS
+ vsi __builtin_vec_packs (vsll, vsll);
+ VPKSDSS
+
+[VEC_PDEP, vec_pdep, __builtin_vec_vpdepd, _ARCH_PWR10]
+ vull __builtin_vec_vpdepd (vull, vull);
+ VPDEPD
+
+[VEC_PERM, vec_perm, __builtin_vec_perm]
+ vsc __builtin_vec_perm (vsc, vsc, vuc);
+ VPERM_16QI
+ vuc __builtin_vec_perm (vuc, vuc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VUC
+ vbc __builtin_vec_perm (vbc, vbc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VBC
+ vss __builtin_vec_perm (vss, vss, vuc);
+ VPERM_8HI
+ vus __builtin_vec_perm (vus, vus, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VUS
+ vbs __builtin_vec_perm (vbs, vbs, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VBS
+ vp __builtin_vec_perm (vp, vp, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VP
+ vsi __builtin_vec_perm (vsi, vsi, vuc);
+ VPERM_4SI
+ vui __builtin_vec_perm (vui, vui, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VUI
+ vbi __builtin_vec_perm (vbi, vbi, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VBI
+ vsll __builtin_vec_perm (vsll, vsll, vuc);
+ VPERM_2DI
+ vull __builtin_vec_perm (vull, vull, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VULL
+ vbll __builtin_vec_perm (vbll, vbll, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VBLL
+ vf __builtin_vec_perm (vf, vf, vuc);
+ VPERM_4SF
+ vd __builtin_vec_perm (vd, vd, vuc);
+ VPERM_2DF
+
+[VEC_PERMX, vec_permx, __builtin_vec_xxpermx, _ARCH_PWR10]
+ vsc __builtin_vec_xxpermx (vsc, vsc, vuc, const int);
+ XXPERMX_V16QI
+ vuc __builtin_vec_xxpermx (vuc, vuc, vuc, const int);
+ XXPERMX_UV16QI
+ vss __builtin_vec_xxpermx (vss, vss, vuc, const int);
+ XXPERMX_V8HI
+ vus __builtin_vec_xxpermx (vus, vus, vuc, const int);
+ XXPERMX_UV8HI
+ vsi __builtin_vec_xxpermx (vsi, vsi, vuc, const int);
+ XXPERMX_V4SI
+ vui __builtin_vec_xxpermx (vui, vui, vuc, const int);
+ XXPERMX_UV4SI
+ vsll __builtin_vec_xxpermx (vsll, vsll, vuc, const int);
+ XXPERMX_V2DI
+ vull __builtin_vec_xxpermx (vull, vull, vuc, const int);
+ XXPERMX_UV2DI
+ vf __builtin_vec_xxpermx (vf, vf, vuc, const int);
+ XXPERMX_V4SF
+ vd __builtin_vec_xxpermx (vd, vd, vuc, const int);
+ XXPERMX_V2DF
+
+[VEC_PERMXOR, vec_permxor, __builtin_vec_vpermxor]
+ vsc __builtin_vec_vpermxor (vsc, vsc, vsc);
+ VPERMXOR VPERMXOR_VSC
+ vuc __builtin_vec_vpermxor (vuc, vuc, vuc);
+ VPERMXOR VPERMXOR_VUC
+ vbc __builtin_vec_vpermxor (vbc, vbc, vbc);
+ VPERMXOR VPERMXOR_VBC
+
+[VEC_PEXT, vec_pext, __builtin_vec_vpextd, _ARCH_PWR10]
+ vull __builtin_vec_vpextd (vull, vull);
+ VPEXTD
+
+[VEC_PMSUM, vec_pmsum_be, __builtin_vec_vpmsum]
+ vus __builtin_vec_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_V
+ vui __builtin_vec_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_V
+ vull __builtin_vec_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_V
+ vuq __builtin_vec_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_V
+
+[VEC_POPCNT, vec_popcnt, __builtin_vec_vpopcnt, _ARCH_PWR8]
+ vsc __builtin_vec_vpopcnt (vsc);
+ VPOPCNTB
+ vuc __builtin_vec_vpopcnt (vuc);
+ VPOPCNTUB
+ vss __builtin_vec_vpopcnt (vss);
+ VPOPCNTH
+ vus __builtin_vec_vpopcnt (vus);
+ VPOPCNTUH
+ vsi __builtin_vec_vpopcnt (vsi);
+ VPOPCNTW
+ vui __builtin_vec_vpopcnt (vui);
+ VPOPCNTUW
+ vsll __builtin_vec_vpopcnt (vsll);
+ VPOPCNTD
+ vull __builtin_vec_vpopcnt (vull);
+ VPOPCNTUD
+
+[VEC_PARITY_LSBB, vec_parity_lsbb, __builtin_vec_vparity_lsbb, _ARCH_PWR9]
+ vui __builtin_vec_vparity_lsbb (vsi);
+ VPRTYBW VPRTYBW_S
+ vui __builtin_vec_vparity_lsbb (vui);
+ VPRTYBW VPRTYBW_U
+ vull __builtin_vec_vparity_lsbb (vsll);
+ VPRTYBD VPRTYBD_S
+ vull __builtin_vec_vparity_lsbb (vull);
+ VPRTYBD VPRTYBD_U
+ vuq __builtin_vec_vparity_lsbb (vsq);
+ VPRTYBQ VPRTYBQ_S
+ vuq __builtin_vec_vparity_lsbb (vuq);
+ VPRTYBQ VPRTYBQ_U
+
+[VEC_RE, vec_re, __builtin_vec_re]
+ vf __builtin_vec_re (vf);
+ XVRESP
+ vd __builtin_vec_re (vd);
+ XVREDP
+
+[VEC_RECIP, vec_recipdiv, __builtin_vec_recipdiv]
+ vf __builtin_vec_recipdiv (vf, vf);
+ RECIP_V4SF
+ vd __builtin_vec_recipdiv (vd, vd);
+ RECIP_V2DF
+
+[VEC_REPLACE_ELT, vec_replace_elt, __builtin_vec_replace_elt, _ARCH_PWR10]
+ vui __builtin_vec_replace_elt (vui, unsigned int, const int);
+ VREPLACE_ELT_UV4SI
+ vsi __builtin_vec_replace_elt (vsi, signed int, const int);
+ VREPLACE_ELT_V4SI
+ vull __builtin_vec_replace_elt (vull, unsigned long long, const int);
+ VREPLACE_ELT_UV2DI
+ vsll __builtin_vec_replace_elt (vsll, signed long long, const int);
+ VREPLACE_ELT_V2DI
+ vf __builtin_vec_replace_elt (vf, float, const int);
+ VREPLACE_ELT_V4SF
+ vd __builtin_vec_replace_elt (vd, double, const int);
+ VREPLACE_ELT_V2DF
+
+[VEC_REPLACE_UN, vec_replace_unaligned, __builtin_vec_replace_un, _ARCH_PWR10]
+ vui __builtin_vec_replace_un (vui, unsigned int, const int);
+ VREPLACE_UN_UV4SI
+ vsi __builtin_vec_replace_un (vsi, signed int, const int);
+ VREPLACE_UN_V4SI
+ vull __builtin_vec_replace_un (vull, unsigned long long, const int);
+ VREPLACE_UN_UV2DI
+ vsll __builtin_vec_replace_un (vsll, signed long long, const int);
+ VREPLACE_UN_V2DI
+ vf __builtin_vec_replace_un (vf, float, const int);
+ VREPLACE_UN_V4SF
+ vd __builtin_vec_replace_un (vd, double, const int);
+ VREPLACE_UN_V2DF
+
+[VEC_REVB, vec_revb, __builtin_vec_revb, _ARCH_PWR8]
+ vss __builtin_vec_revb (vss);
+ REVB_V8HI REVB_VSS
+ vus __builtin_vec_revb (vus);
+ REVB_V8HI REVB_VUS
+ vsi __builtin_vec_revb (vsi);
+ REVB_V4SI REVB_VSI
+ vui __builtin_vec_revb (vui);
+ REVB_V4SI REVB_VUI
+ vsll __builtin_vec_revb (vsll);
+ REVB_V2DI REVB_VSLL
+ vull __builtin_vec_revb (vull);
+ REVB_V2DI REVB_VULL
+ vsq __builtin_vec_revb (vsq);
+ REVB_V1TI REVB_VSQ
+ vuq __builtin_vec_revb (vuq);
+ REVB_V1TI REVB_VUQ
+ vf __builtin_vec_revb (vf);
+ REVB_V4SF
+ vd __builtin_vec_revb (vd);
+ REVB_V2DF
+
+[VEC_REVE, vec_reve, __builtin_vec_vreve]
+ vsc __builtin_vec_vreve (vsc);
+ VREVE_V16QI VREVE_VSC
+ vuc __builtin_vec_vreve (vuc);
+ VREVE_V16QI VREVE_VUC
+ vbc __builtin_vec_vreve (vbc);
+ VREVE_V16QI VREVE_VBC
+ vss __builtin_vec_vreve (vss);
+ VREVE_V8HI VREVE_VSS
+ vus __builtin_vec_vreve (vus);
+ VREVE_V8HI VREVE_VUS
+ vbs __builtin_vec_vreve (vbs);
+ VREVE_V8HI VREVE_VBS
+ vsi __builtin_vec_vreve (vsi);
+ VREVE_V4SI VREVE_VSI
+ vui __builtin_vec_vreve (vui);
+ VREVE_V4SI VREVE_VUI
+ vbi __builtin_vec_vreve (vbi);
+ VREVE_V4SI VREVE_VBI
+ vsll __builtin_vec_vreve (vsll);
+ VREVE_V2DI VREVE_VSLL
+ vull __builtin_vec_vreve (vull);
+ VREVE_V2DI VREVE_VULL
+ vbll __builtin_vec_vreve (vbll);
+ VREVE_V2DI VREVE_VBLL
+ vf __builtin_vec_vreve (vf);
+ VREVE_V4SF
+ vd __builtin_vec_vreve (vd);
+ VREVE_V2DF
+
+[VEC_RINT, vec_rint, __builtin_vec_rint, __VSX__]
+ vf __builtin_vec_rint (vf);
+ XVRSPIC
+ vd __builtin_vec_rint (vd);
+ XVRDPIC
+
+[VEC_RL, vec_rl, __builtin_vec_rl]
+ vsc __builtin_vec_rl (vsc, vuc);
+ VRLB VRLB_VSC
+ vuc __builtin_vec_rl (vuc, vuc);
+ VRLB VRLB_VUC
+ vss __builtin_vec_rl (vss, vus);
+ VRLH VRLH_VSS
+ vus __builtin_vec_rl (vus, vus);
+ VRLH VRLH_VUS
+ vsi __builtin_vec_rl (vsi, vui);
+ VRLW VRLW_VSI
+ vui __builtin_vec_rl (vui, vui);
+ VRLW VRLW_VUI
+ vsll __builtin_vec_rl (vsll, vull);
+ VRLD VRLD_VSLL
+ vull __builtin_vec_rl (vull, vull);
+ VRLD VRLD_VULL
+
+[VEC_RLMI, vec_rlmi, __builtin_vec_rlmi]
+ vui __builtin_vec_rlmi (vui, vui, vui);
+ VRLWMI
+ vull __builtin_vec_rlmi (vull, vull, vull);
+ VRLDMI
+
+[VEC_RLNM, vec_rlnm, __builtin_vec_rlnm]
+ vui __builtin_vec_rlnm (vui, vui);
+ VRLWNM
+ vull __builtin_vec_rlnm (vull, vull);
+ VRLDNM
+
+[VEC_ROUND, vec_round, __builtin_vec_round]
+ vf __builtin_vec_round (vf);
+ XVRSPI
+ vd __builtin_vec_round (vd);
+ XVRDPI
+
+[VEC_RSQRT, vec_rsqrt, __builtin_vec_rsqrt]
+ vf __builtin_vec_rsqrt (vf);
+ RSQRT_4SF
+ vd __builtin_vec_rsqrt (vd);
+ RSQRT_2DF
+
+[VEC_RSQRTE, vec_rsqrte, __builtin_vec_rsqrte]
+ vf __builtin_vec_rsqrte (vf);
+ XVRSQRTESP
+ vd __builtin_vec_rsqrte (vd);
+ XVRSQRTEDP
+
+[VEC_SBOX_BE, vec_sbox_be, __builtin_vec_sbox_be, _ARCH_PWR8]
+ vuc __builtin_vec_sbox_be (vuc);
+ VSBOX_BE
+
+[VEC_SEL, vec_sel, __builtin_vec_sel]
+ vsc __builtin_vec_sel (vsc, vsc, vbc);
+ VSEL_16QI VSEL_16QI_B
+ vsc __builtin_vec_sel (vsc, vsc, vuc);
+ VSEL_16QI VSEL_16QI_U
+ vuc __builtin_vec_sel (vuc, vuc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_UB
+ vuc __builtin_vec_sel (vuc, vuc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_UU
+ vbc __builtin_vec_sel (vbc, vbc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_BB
+ vbc __builtin_vec_sel (vbc, vbc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_BU
+ vss __builtin_vec_sel (vss, vss, vbs);
+ VSEL_8HI VSEL_8HI_B
+ vss __builtin_vec_sel (vss, vss, vus);
+ VSEL_8HI VSEL_8HI_U
+ vus __builtin_vec_sel (vus, vus, vbs);
+ VSEL_8HI_UNS VSEL_8HI_UB
+ vus __builtin_vec_sel (vus, vus, vus);
+ VSEL_8HI_UNS VSEL_8HI_UU
+ vbs __builtin_vec_sel (vbs, vbs, vbs);
+ VSEL_8HI_UNS VSEL_8HI_BB
+ vbs __builtin_vec_sel (vbs, vbs, vus);
+ VSEL_8HI_UNS VSEL_8HI_BU
+ vsi __builtin_vec_sel (vsi, vsi, vbi);
+ VSEL_4SI VSEL_4SI_B
+ vsi __builtin_vec_sel (vsi, vsi, vui);
+ VSEL_4SI VSEL_4SI_U
+ vui __builtin_vec_sel (vui, vui, vbi);
+ VSEL_4SI_UNS VSEL_4SI_UB
+ vui __builtin_vec_sel (vui, vui, vui);
+ VSEL_4SI_UNS VSEL_4SI_UU
+ vbi __builtin_vec_sel (vbi, vbi, vbi);
+ VSEL_4SI_UNS VSEL_4SI_BB
+ vbi __builtin_vec_sel (vbi, vbi, vui);
+ VSEL_4SI_UNS VSEL_4SI_BU
+ vsll __builtin_vec_sel (vsll, vsll, vbll);
+ VSEL_2DI_B VSEL_2DI_B
+ vsll __builtin_vec_sel (vsll, vsll, vull);
+ VSEL_2DI_B VSEL_2DI_U
+ vull __builtin_vec_sel (vull, vull, vbll);
+ VSEL_2DI_UNS VSEL_2DI_UB
+ vull __builtin_vec_sel (vull, vull, vull);
+ VSEL_2DI_UNS VSEL_2DI_UU
+ vbll __builtin_vec_sel (vbll, vbll, vbll);
+ VSEL_2DI_UNS VSEL_2DI_BB
+ vbll __builtin_vec_sel (vbll, vbll, vull);
+ VSEL_2DI_UNS VSEL_2DI_BU
+ vf __builtin_vec_sel (vf, vf, vbll);
+ VSEL_4SF VSEL_4SF_B
+ vf __builtin_vec_sel (vf, vf, vull);
+ VSEL_4SF VSEL_4SF_U
+ vd __builtin_vec_sel (vd, vd, vbll);
+ VSEL_2DF VSEL_2DF_B
+ vd __builtin_vec_sel (vd, vd, vull);
+ VSEL_2DF VSEL_2DF_U
+
+[VEC_SHASIGMA_BE, vec_shasigma_be, __builtin_crypto_vshasigma]
+ vui __builtin_crypto_vshasigma (vui, const int, const int);
+ VSHASIGMAW
+ vull __builtin_crypto_vshasigma (vull, const int, const int);
+ VSHASIGMAD
+
+[VEC_SIGNED, vec_signed, __builtin_vec_vsigned]
+ vsi __builtin_vec_vsigned (vf);
+ VEC_VSIGNED_V4SF
+ vsll __builtin_vec_vsigned (vd);
+ VEC_VSIGNED_V2DF
+
+[VEC_SIGNED2, vec_signed2, __builtin_vec_vsigned2]
+ vsi __builtin_vec_vsigned2 (vd, vd);
+ VEC_VSIGNED2_V2DF
+
+[VEC_SIGNEDO, vec_signedo, __builtin_vec_vsignedo]
+ vui __builtin_vec_vsignedo (vd);
+ VEC_VSIGNEDO_V2DF
+
+[VEC_SL, vec_sl, __builtin_vec_sl]
+ vsc __builtin_vec_sl (vsc, vuc);
+ VSLB VSLB_VSC
+ vuc __builtin_vec_sl (vuc, vuc);
+ VSLB VSLB_VUC
+ vss __builtin_vec_sl (vss, vus);
+ VSLH VSLH_VSS
+ vus __builtin_vec_sl (vus, vus);
+ VSLH VSLH_VUS
+ vsi __builtin_vec_sl (vsi, vui);
+ VSLW VSLW_VSI
+ vui __builtin_vec_sl (vui, vui);
+ VSLW VSLW_VUI
+ vsll __builtin_vec_sl (vsll, vull);
+ VSLD VSLD_VSLL
+ vull __builtin_vec_sl (vull, vull);
+ VSLD VSLD_VULL
+
+[VEC_SLD, vec_sld, __builtin_vec_sld]
+ vsc __builtin_vec_sld (vsc, vsc, const int);
+ VSLDOI_16QI VSLDOI_VSC
+ vbc __builtin_vec_sld (vbc, vbc, const int);
+ VSLDOI_16QI VSLDOI_VBC
+ vuc __builtin_vec_sld (vuc, vuc, const int);
+ VSLDOI_16QI VSLDOI_VUC
+ vss __builtin_vec_sld (vss, vss, const int);
+ VSLDOI_8HI VSLDOI_VSS
+ vbs __builtin_vec_sld (vbs, vbs, const int);
+ VSLDOI_8HI VSLDOI_VBS
+ vus __builtin_vec_sld (vus, vus, const int);
+ VSLDOI_8HI VSLDOI_VUS
+ vp __builtin_vec_sld (vp, vp, const int);
+ VSLDOI_8HI VSLDOI_VP
+ vsi __builtin_vec_sld (vsi, vsi, const int);
+ VSLDOI_4SI VSLDOI_VSI
+ vbi __builtin_vec_sld (vbi, vbi, const int);
+ VSLDOI_4SI VSLDOI_VBI
+ vui __builtin_vec_sld (vui, vui, const int);
+ VSLDOI_4SI VSLDOI_VUI
+ vsll __builtin_vec_sld (vsll, vsll, const int);
+ VSLDOI_2DI VSLDOI_VSLL
+ vbll __builtin_vec_sld (vbll, vbll, const int);
+ VSLDOI_2DI VSLDOI_VBLL
+ vull __builtin_vec_sld (vull, vull, const int);
+ VSLDOI_2DI VSLDOI_VULL
+ vf __builtin_vec_sld (vf, vf, const int);
+ VSLDOI_4SF
+ vd __builtin_vec_sld (vd, vd, const int);
+ VSLDOI_2DF
+
+[VEC_SLDB, vec_sldb, __builtin_vec_sldb, _ARCH_PWR10]
+ vsc __builtin_vec_sldb (vsc, vsc, const int);
+ VSLDB_V16QI VSLDB_VSC
+ vuc __builtin_vec_sldb (vuc, vuc, const int);
+ VSLDB_V16QI VSLDB_VUC
+ vss __builtin_vec_sldb (vss, vss, const int);
+ VSLDB_V8HI VSLDB_VSS
+ vus __builtin_vec_sldb (vus, vus, const int);
+ VSLDB_V8HI VSLDB_VUS
+ vsi __builtin_vec_sldb (vsi, vsi, const int);
+ VSLDB_V4SI VSLDB_VSI
+ vui __builtin_vec_sldb (vui, vui, const int);
+ VSLDB_V4SI VSLDB_VUI
+ vsll __builtin_vec_sldb (vsll, vsll, const int);
+ VSLDB_V2DI VSLDB_VSLL
+ vull __builtin_vec_sldb (vull, vull, const int);
+ VSLDB_V2DI VSLDB_VULL
+
+[VEC_SLDW, vec_sldw, __builtin_vec_sldw]
+ vsc __builtin_vec_sldw (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC
+ vuc __builtin_vec_sldw (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC
+ vss __builtin_vec_sldw (vss, vss, const int);
+ XXSLDWI_16QI XXSLDWI_VSS
+ vus __builtin_vec_sldw (vus, vus, const int);
+ XXSLDWI_16QI XXSLDWI_VUS
+ vsi __builtin_vec_sldw (vsi, vsi, const int);
+ XXSLDWI_16QI XXSLDWI_VSI
+ vui __builtin_vec_sldw (vui, vui, const int);
+ XXSLDWI_16QI XXSLDWI_VUI
+ vsll __builtin_vec_sldw (vsll, vsll, const int);
+ XXSLDWI_16QI XXSLDWI_VSLL
+ vull __builtin_vec_sldw (vull, vull, const int);
+ XXSLDWI_16QI XXSLDWI_VULL
+
+[VEC_SLL, vec_sll, __builtin_vec_sll]
+ vsc __builtin_vec_sll (vsc, vuc);
+ VSL VSL_VSC
+ vuc __builtin_vec_sll (vuc, vuc);
+ VSL VSL_VUC
+ vss __builtin_vec_sll (vss, vuc);
+ VSL VSL_VSS
+ vus __builtin_vec_sll (vus, vuc);
+ VSL VSL_VUS
+ vp __builtin_vec_sll (vp, vuc);
+ VSL VSL_VP
+ vsi __builtin_vec_sll (vsi, vuc);
+ VSL VSL_VSI
+ vui __builtin_vec_sll (vui, vuc);
+ VSL VSL_VUI
+ vsll __builtin_vec_sll (vsll, vuc);
+ VSL VSL_VSLL
+ vull __builtin_vec_sll (vull, vuc);
+ VSL VSL_VULL
+
+[VEC_SLO, vec_slo, __builtin_vec_slo]
+ vsc __builtin_vec_slo (vsc, vsc);
+ VSLO VSLO_VSCS
+ vsc __builtin_vec_slo (vsc, vuc);
+ VSLO VSLO_VSCU
+ vuc __builtin_vec_slo (vuc, vsc);
+ VSLO VSLO_VUCS
+ vuc __builtin_vec_slo (vuc, vuc);
+ VSLO VSLO_VUCU
+ vss __builtin_vec_slo (vss, vsc);
+ VSLO VSLO_VSSS
+ vss __builtin_vec_slo (vss, vuc);
+ VSLO VSLO_VSSU
+ vus __builtin_vec_slo (vus, vsc);
+ VSLO VSLO_VUSS
+ vus __builtin_vec_slo (vus, vuc);
+ VSLO VSLO_VUSU
+ vp __builtin_vec_slo (vp, vsc);
+ VSLO VSLO_VPS
+ vp __builtin_vec_slo (vp, vuc);
+ VSLO VSLO_VPU
+ vsi __builtin_vec_slo (vsi, vsc);
+ VSLO VSLO_VSIS
+ vsi __builtin_vec_slo (vsi, vuc);
+ VSLO VSLO_VSIU
+ vui __builtin_vec_slo (vui, vsc);
+ VSLO VSLO_VUIS
+ vui __builtin_vec_slo (vui, vuc);
+ VSLO VSLO_VUIU
+ vsll __builtin_vec_slo (vsll, vsc);
+ VSLO VSLO_VSLLS
+ vsll __builtin_vec_slo (vsll, vuc);
+ VSLO VSLO_VSLLU
+ vull __builtin_vec_slo (vull, vsc);
+ VSLO VSLO_VULLS
+ vull __builtin_vec_slo (vull, vuc);
+ VSLO VSLO_VULLU
+ vf __builtin_vec_slo (vf, vsc);
+ VSLO VSLO_VFS
+ vf __builtin_vec_slo (vf, vuc);
+ VSLO VSLO_VFU
+
+[VEC_SLV, vec_slv, __builtin_vec_vslv, _ARCH_PWR9]
+ vuc __builtin_vec_vslv (vuc, vuc);
+ VSLV
+
+[VEC_SPLAT, vec_splat, __builtin_vec_splat]
+ vsc __builtin_vec_splat (vsc, signed int);
+ VSPLTB VSPLTB_VSC
+ vuc __builtin_vec_splat (vuc, signed int);
+ VSPLTB VSPLTB_VUC
+ vbc __builtin_vec_splat (vbc, signed int);
+ VSPLTB VSPLTB_VBC
+ vss __builtin_vec_splat (vss, signed int);
+ VSPLTH VSPLTH_VSS
+ vus __builtin_vec_splat (vus, signed int);
+ VSPLTH VSPLTH_VUS
+ vbs __builtin_vec_splat (vbs, signed int);
+ VSPLTH VSPLTH_VBS
+ vp __builtin_vec_splat (vp, signed int);
+ VSPLTH VSPLTH_VP
+ vf __builtin_vec_splat (vf, signed int);
+ VSPLTW VSPLTW_VF
+ vsi __builtin_vec_splat (vsi, signed int);
+ VSPLTW VSPLTW_VSI
+ vui __builtin_vec_splat (vui, signed int);
+ VSPLTW VSPLTW_VUI
+ vbi __builtin_vec_splat (vbi, signed int);
+ VSPLTW VSPLTW_VBI
+ vd __builtin_vec_splat (vd, signed int);
+ XXSPLTD_V2DF
+ vsll __builtin_vec_splat (vsll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VSLL
+ vull __builtin_vec_splat (vull, signed int);
+ XXSPLTD_V2DI XXSPLTD_VULL
+ vbll __builtin_vec_splat (vbll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VBLL
+
+[VEC_SPLATI, vec_splati, __builtin_vec_xxspltiw, _ARCH_PWR10]
+ vsi __builtin_vec_xxspltiw (signed int);
+ VXXSPLTIW_V4SI
+ vf __builtin_vec_xxspltiw (float);
+ VXXSPLTIW_V4SF
+
+[VEC_SPLATID, vec_splatid, __builtin_vec_xxspltid, ARCH_PWR10]
+ vd __builtin_vec_xxspltid (float);
+ VXXSPLTIDP
+
+[VEC_SPLATI_INS, vec_splati_ins, __builtin_vec_xxsplti32dx, _ARCH_PWR10]
+ vsi __builtin_vec_xxsplti32dx (vsi, const int, signed int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VSI
+ vui __builtin_vec_xxsplti32dx (vui, const int, unsigned int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VUI
+ vf __builtin_vec_xxsplti32dx (vf, const int, float);
+ VXXSPLTI32DX_V4SF
+
+[VEC_SQRT, vec_sqrt, __builtin_vec_sqrt, __VSX__]
+ vf __builtin_vec_sqrt (vf);
+ XVSQRTSP
+ vd __builtin_vec_sqrt (vd);
+ XVSQRTDP
+
+[VEC_SR, vec_sr, __builtin_vec_sr]
+ vsc __builtin_vec_sr (vsc, vuc);
+ VSRB VSRB_VSC
+ vuc __builtin_vec_sr (vuc, vuc);
+ VSRB VSRB_VUC
+ vss __builtin_vec_sr (vss, vus);
+ VSRH VSRH_VSS
+ vus __builtin_vec_sr (vus, vus);
+ VSRH VSRH_VUS
+ vsi __builtin_vec_sr (vsi, vui);
+ VSRW VSRW_VSI
+ vui __builtin_vec_sr (vui, vui);
+ VSRW VSRW_VUI
+ vsll __builtin_vec_sr (vsll, vull);
+ VSRD VSRD_VSLL
+ vull __builtin_vec_sr (vull, vull);
+ VSRD VSRD_VULL
+
+[VEC_SRA, vec_sra, __builtin_vec_sra]
+ vsc __builtin_vec_sra (vsc, vuc);
+ VSRAB VSRAB_VSC
+ vuc __builtin_vec_sra (vuc, vuc);
+ VSRAB VSRAB_VUC
+ vss __builtin_vec_sra (vss, vus);
+ VSRAH VSRAH_VSS
+ vus __builtin_vec_sra (vus, vus);
+ VSRAH VSRAH_VUS
+ vsi __builtin_vec_sra (vsi, vui);
+ VSRAW VSRAW_VSI
+ vui __builtin_vec_sra (vui, vui);
+ VSRAW VSRAW_VUI
+ vsll __builtin_vec_sra (vsll, vull);
+ VSRAD VSRAD_VSLL
+ vull __builtin_vec_sra (vull, vull);
+ VSRAD VSRAD_VULL
+
+[VEC_SRDB, vec_srdb, __builtin_vec_srdb, _ARCH_PWR10]
+ vsc __builtin_vec_srdb (vsc, vsc, const int);
+ VSRDB_V16QI VSRDB_VSC
+ vuc __builtin_vec_srdb (vuc, vuc, const int);
+ VSRDB_V16QI VSRDB_VUC
+ vss __builtin_vec_srdb (vss, vss, const int);
+ VSRDB_V8HI VSRDB_VSS
+ vus __builtin_vec_srdb (vus, vus, const int);
+ VSRDB_V8HI VSRDB_VUS
+ vsi __builtin_vec_srdb (vsi, vsi, const int);
+ VSRDB_V4SI VSRDB_VSI
+ vui __builtin_vec_srdb (vui, vui, const int);
+ VSRDB_V4SI VSRDB_VUI
+ vsll __builtin_vec_srdb (vsll, vsll, const int);
+ VSRDB_V2DI VSRDB_VSLL
+ vull __builtin_vec_srdb (vull, vull, const int);
+ VSRDB_V2DI VSRDB_VULL
+
+[VEC_SRL, vec_srl, __builtin_vec_srl]
+ vsc __builtin_vec_srl (vsc, vuc);
+ VSR VSR_VSC
+ vuc __builtin_vec_srl (vuc, vuc);
+ VSR VSR_VUC
+ vss __builtin_vec_srl (vss, vuc);
+ VSR VSR_VSS
+ vus __builtin_vec_srl (vus, vuc);
+ VSR VSR_VUS
+ vp __builtin_vec_srl (vp, vuc);
+ VSR VSR_VP
+ vsi __builtin_vec_srl (vsi, vuc);
+ VSR VSR_VSI
+ vui __builtin_vec_srl (vui, vuc);
+ VSR VSR_VUI
+ vsll __builtin_vec_srl (vsll, vuc);
+ VSR VSR_VSLL
+ vull __builtin_vec_srl (vull, vuc);
+ VSR VSR_VULL
+
+[VEC_SRO, vec_sro, __builtin_vec_sro]
+ vsc __builtin_vec_sro (vsc, vsc);
+ VSRO VSRO_VSCS
+ vsc __builtin_vec_sro (vsc, vuc);
+ VSRO VSRO_VSCU
+ vuc __builtin_vec_sro (vuc, vsc);
+ VSRO VSRO_VUCS
+ vuc __builtin_vec_sro (vuc, vuc);
+ VSRO VSRO_VUCU
+ vss __builtin_vec_sro (vss, vsc);
+ VSRO VSRO_VSSS
+ vss __builtin_vec_sro (vss, vuc);
+ VSRO VSRO_VSSU
+ vus __builtin_vec_sro (vus, vsc);
+ VSRO VSRO_VUSS
+ vus __builtin_vec_sro (vus, vuc);
+ VSRO VSRO_VUSU
+ vp __builtin_vec_sro (vp, vsc);
+ VSRO VSRO_VPS
+ vp __builtin_vec_sro (vp, vuc);
+ VSRO VSRO_VPU
+ vsi __builtin_vec_sro (vsi, vsc);
+ VSRO VSRO_VSIS
+ vsi __builtin_vec_sro (vsi, vuc);
+ VSRO VSRO_VSIU
+ vui __builtin_vec_sro (vui, vsc);
+ VSRO VSRO_VUIS
+ vui __builtin_vec_sro (vui, vuc);
+ VSRO VSRO_VUIU
+ vsll __builtin_vec_sro (vsll, vsc);
+ VSRO VSRO_VSLLS
+ vsll __builtin_vec_sro (vsll, vuc);
+ VSRO VSRO_VSLLU
+ vull __builtin_vec_sro (vull, vsc);
+ VSRO VSRO_VULLS
+ vull __builtin_vec_sro (vull, vuc);
+ VSRO VSRO_VULLU
+ vf __builtin_vec_sro (vf, vsc);
+ VSRO VSRO_VFS
+ vf __builtin_vec_sro (vf, vuc);
+ VSRO VSRO_VFU
+
+[VEC_SRV, vec_srv, __builtin_vec_vsrv, _ARCH_PWR9]
+ vuc __builtin_vec_vsrv (vuc, vuc);
+ VSRV
+
+[VEC_ST, vec_st, __builtin_vec_st]
+ void __builtin_vec_st (vsc, signed long long, vsc *);
+ STVX_V16QI STVX_VSC
+ void __builtin_vec_st (vsc, signed long long, signed char *);
+ STVX_V16QI STVX_SC
+ void __builtin_vec_st (vuc, signed long long, vuc *);
+ STVX_V16QI STVX_VUC
+ void __builtin_vec_st (vuc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC
+ void __builtin_vec_st (vbc, signed long long, vbc *);
+ STVX_V16QI STVX_VBC
+ void __builtin_vec_st (vbc, signed long long, signed char *);
+ STVX_V16QI STVX_SC_B
+ void __builtin_vec_st (vbc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC_B
+ void __builtin_vec_st (vss, signed long long, vss *);
+ STVX_V8HI STVX_VSS
+ void __builtin_vec_st (vss, signed long long, signed short *);
+ STVX_V8HI STVX_SS
+ void __builtin_vec_st (vus, signed long long, vus *);
+ STVX_V8HI STVX_VUS
+ void __builtin_vec_st (vus, signed long long, unsigned short *);
+ STVX_V8HI STVX_US
+ void __builtin_vec_st (vbs, signed long long, vbs *);
+ STVX_V8HI STVX_VBS
+ void __builtin_vec_st (vbs, signed long long, signed short *);
+ STVX_V8HI STVX_SS_B
+ void __builtin_vec_st (vbs, signed long long, unsigned short *);
+ STVX_V8HI STVX_US_B
+ void __builtin_vec_st (vp, signed long long, vp *);
+ STVX_V8HI STVX_P
+ void __builtin_vec_st (vsi, signed long long, vsi *);
+ STVX_V4SI STVX_VSI
+ void __builtin_vec_st (vsi, signed long long, signed int *);
+ STVX_V4SI STVX_SI
+ void __builtin_vec_st (vui, signed long long, vui *);
+ STVX_V4SI STVX_VUI
+ void __builtin_vec_st (vui, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI
+ void __builtin_vec_st (vbi, signed long long, vbi *);
+ STVX_V4SI STVX_VBI
+ void __builtin_vec_st (vbi, signed long long, signed int *);
+ STVX_V4SI STVX_SI_B
+ void __builtin_vec_st (vbi, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI_B
+ void __builtin_vec_st (vsll, signed long long, vsll *);
+ STVX_V2DI STVX_VSLL
+ void __builtin_vec_st (vsll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL
+ void __builtin_vec_st (vull, signed long long, vull *);
+ STVX_V2DI STVX_VULL
+ void __builtin_vec_st (vull, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL
+ void __builtin_vec_st (vbll, signed long long, vbll *);
+ STVX_V2DI STVX_VBLL
+ void __builtin_vec_st (vbll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL_B
+ void __builtin_vec_st (vbll, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL_B
+ void __builtin_vec_st (vf, signed long long, vf *);
+ STVX_V4SF STVX_VF
+ void __builtin_vec_st (vf, signed long long, float *);
+ STVX_V4SF STVX_F
+ void __builtin_vec_st (vd, signed long long, vd *);
+ STVX_V2DF STVX_VD
+ void __builtin_vec_st (vd, signed long long, double *);
+ STVX_V2DF STVX_D
+
+[VEC_STE, vec_ste, __builtin_vec_ste]
+ void __builtin_vec_ste (vsc, signed long long, signed char *);
+ STVEBX STVEBX_S
+ void __builtin_vec_ste (vuc, signed long long, unsigned char *);
+ STVEBX STVEBX_U
+ void __builtin_vec_ste (vbc, signed long long, signed char *);
+ STVEBX STVEBX_BS
+ void __builtin_vec_ste (vbc, signed long long, unsigned char *);
+ STVEBX STVEBX_BU
+ void __builtin_vec_ste (vss, signed long long, signed short *);
+ STVEHX STVEHX_S
+ void __builtin_vec_ste (vus, signed long long, unsigned short *);
+ STVEHX STVEHX_U
+ void __builtin_vec_ste (vbs, signed long long, signed short *);
+ STVEHX STVEHX_BS
+ void __builtin_vec_ste (vbs, signed long long, unsigned short *);
+ STVEHX STVEHX_BU
+ void __builtin_vec_ste (vp, signed long long, signed short *);
+ STVEHX STVEHX_PS
+ void __builtin_vec_ste (vp, signed long long, unsigned short *);
+ STVEHX STVEHX_PU
+ void __builtin_vec_ste (vsi, signed long long, signed int *);
+ STVEWX STVEHWX_S
+ void __builtin_vec_ste (vui, signed long long, unsigned int *);
+ STVEWX STVEWX_U
+ void __builtin_vec_ste (vbi, signed long long, signed int *);
+ STVEWX STVEWX_BS
+ void __builtin_vec_ste (vbi, signed long long, unsigned int *);
+ STVEWX STVEWX_BU
+ void __builtin_vec_ste (vf, signed long long, float *);
+ STVEWX STVEWX_F
+
+[VEC_STL, vec_stl, __builtin_vec_stl]
+ void __builtin_vec_stl (vsc, signed long long, vsc *);
+ STVXL_V16QI STVXL_VSC
+ void __builtin_vec_stl (vsc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC
+ void __builtin_vec_stl (vuc, signed long long, vuc *);
+ STVXL_V16QI STVXL_VUC
+ void __builtin_vec_stl (vuc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC
+ void __builtin_vec_stl (vbc, signed long long, vbc *);
+ STVXL_V16QI STVXL_VBC
+ void __builtin_vec_stl (vbc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC_B
+ void __builtin_vec_stl (vbc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC_B
+ void __builtin_vec_stl (vss, signed long long, vss *);
+ STVXL_V8HI STVXL_VSS
+ void __builtin_vec_stl (vss, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS
+ void __builtin_vec_stl (vus, signed long long, vus *);
+ STVXL_V8HI STVXL_VUS
+ void __builtin_vec_stl (vus, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US
+ void __builtin_vec_stl (vbs, signed long long, vbs *);
+ STVXL_V8HI STVXL_VBS
+ void __builtin_vec_stl (vbs, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS_B
+ void __builtin_vec_stl (vbs, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US_B
+ void __builtin_vec_stl (vp, signed long long, vp *);
+ STVXL_V8HI STVXL_P
+ void __builtin_vec_stl (vsi, signed long long, vsi *);
+ STVXL_V4SI STVXL_VSI
+ void __builtin_vec_stl (vsi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI
+ void __builtin_vec_stl (vui, signed long long, vui *);
+ STVXL_V4SI STVXL_VUI
+ void __builtin_vec_stl (vui, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI
+ void __builtin_vec_stl (vbi, signed long long, vbi *);
+ STVXL_V4SI STVXL_VBI
+ void __builtin_vec_stl (vbi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI_B
+ void __builtin_vec_stl (vbi, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI_B
+ void __builtin_vec_stl (vsll, signed long long, vsll *);
+ STVXL_V2DI STVXL_VSLL
+ void __builtin_vec_stl (vsll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL
+ void __builtin_vec_stl (vull, signed long long, vull *);
+ STVXL_V2DI STVXL_VULL
+ void __builtin_vec_stl (vull, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL
+ void __builtin_vec_stl (vbll, signed long long, vbll *);
+ STVXL_V2DI STVXL_VBLL
+ void __builtin_vec_stl (vbll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL_B
+ void __builtin_vec_stl (vbll, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL_B
+ void __builtin_vec_stl (vf, signed long long, vf *);
+ STVXL_V4SF STVXL_VF
+ void __builtin_vec_stl (vf, signed long long, float *);
+ STVXL_V4SF STVXL_F
+ void __builtin_vec_stl (vd, signed long long, vd *);
+ STVXL_V2DF STVXL_VD
+ void __builtin_vec_stl (vd, signed long long, double *);
+ STVXL_V2DF STVXL_D
+
+[VEC_STRIL, vec_stril, __builtin_vec_stril, ARCH_PWR10]
+ vuc __builtin_vec_stril (vuc);
+ VSTRIBL VSTRIBL_U
+ vsc __builtin_vec_stril (vsc);
+ VSTRIBL VSTRIBL_S
+ vus __builtin_vec_stril (vus);
+ VSTRIHL VSTRIHL_U
+ vss __builtin_vec_stril (vss);
+ VSTRIHL VSTRIHL_S
+
+[VEC_STRIL_P, vec_stril_p, __builtin_vec_stril_p, _ARCH_PWR10]
+ signed int __builtin_vec_stril_p (vuc);
+ VSTRIBL_P VSTRIBL_PU
+ signed int __builtin_vec_stril_p (vsc);
+ VSTRIBL_P VSTRIBL_PS
+ signed int __builtin_vec_stril_p (vus);
+ VSTRIHL_P VSTRIHL_PU
+ signed int __builtin_vec_stril_p (vss);
+ VSTRIHL_P VSTRIHL_PS
+
+[VEC_STRIR, vec_strir, __builtin_vec_strir, _ARCH_PWR10]
+ vuc __builtin_vec_strir (vuc);
+ VSTRIBR VSTRIBR_U
+ vsc __builtin_vec_strir (vsc);
+ VSTRIBR VSTRIBR_S
+ vus __builtin_vec_strir (vus);
+ VSTRIHR VSTRIHR_U
+ vss __builtin_vec_strir (vss);
+ VSTRIHR VSTRIHR_S
+
+[VEC_STRIR_P, vec_strir_p, __builtin_vec_strir_p, ARCH_PWR10]
+ signed int __builtin_vec_strir_p (vuc);
+ VSTRIBR_P VSTRIBR_PU
+ signed int __builtin_vec_strir_p (vsc);
+ VSTRIBR_P VSTRIBR_PS
+ signed int __builtin_vec_strir_p (vus);
+ VSTRIHR_P VSTRIHR_PU
+ signed int __builtin_vec_strir_p (vss);
+ VSTRIHR_P VSTRIHR_PS
+
+[VEC_STVLX, vec_stvlx, __builtin_vec_stvlx, __PPU__]
+ void __builtin_vec_stvlx (vbc, signed long long, vbc *);
+ STVLX STVLX_VBC
+ void __builtin_vec_stvlx (vsc, signed long long, vsc *);
+ STVLX STVLX_VSC
+ void __builtin_vec_stvlx (vsc, signed long long, signed char *);
+ STVLX STVLX_SC
+ void __builtin_vec_stvlx (vuc, signed long long, vuc *);
+ STVLX STVLX_VUC
+ void __builtin_vec_stvlx (vuc, signed long long, unsigned char *);
+ STVLX STVLX_UC
+ void __builtin_vec_stvlx (vbs, signed long long, vbs *);
+ STVLX STVLX_VBS
+ void __builtin_vec_stvlx (vss, signed long long, vss *);
+ STVLX STVLX_VSS
+ void __builtin_vec_stvlx (vss, signed long long, signed short *);
+ STVLX STVLX_SS
+ void __builtin_vec_stvlx (vus, signed long long, vus *);
+ STVLX STVLX_VUS
+ void __builtin_vec_stvlx (vus, signed long long, unsigned short *);
+ STVLX STVLX_US
+ void __builtin_vec_stvlx (vp, signed long long, vp *);
+ STVLX STVLX_VP
+ void __builtin_vec_stvlx (vbi, signed long long, vbi *);
+ STVLX STVLX_VBI
+ void __builtin_vec_stvlx (vsi, signed long long, vsi *);
+ STVLX STVLX_VSI
+ void __builtin_vec_stvlx (vsi, signed long long, signed int *);
+ STVLX STVLX_SI
+ void __builtin_vec_stvlx (vui, signed long long, vui *);
+ STVLX STVLX_VUI
+ void __builtin_vec_stvlx (vui, signed long long, unsigned int *);
+ STVLX STVLX_UI
+ void __builtin_vec_stvlx (vf, signed long long, vf *);
+ STVLX STVLX_VF
+ void __builtin_vec_stvlx (vf, signed long long, float *);
+ STVLX STVLX_F
+
+[VEC_STVLXL, vec_stvlxl, __builtin_vec_stvlxl, __PPU__]
+ void __builtin_vec_stvlxl (vbc, signed long long, vbc *);
+ STVLXL STVLXL_VBC
+ void __builtin_vec_stvlxl (vsc, signed long long, vsc *);
+ STVLXL STVLXL_VSC
+ void __builtin_vec_stvlxl (vsc, signed long long, signed char *);
+ STVLXL STVLXL_SC
+ void __builtin_vec_stvlxl (vuc, signed long long, vuc *);
+ STVLXL STVLXL_VUC
+ void __builtin_vec_stvlxl (vuc, signed long long, unsigned char *);
+ STVLXL STVLXL_UC
+ void __builtin_vec_stvlxl (vbs, signed long long, vbs *);
+ STVLXL STVLXL_VBS
+ void __builtin_vec_stvlxl (vss, signed long long, vss *);
+ STVLXL STVLXL_VSS
+ void __builtin_vec_stvlxl (vss, signed long long, signed short *);
+ STVLXL STVLXL_SS
+ void __builtin_vec_stvlxl (vus, signed long long, vus *);
+ STVLXL STVLXL_VUS
+ void __builtin_vec_stvlxl (vus, signed long long, unsigned short *);
+ STVLXL STVLXL_US
+ void __builtin_vec_stvlxl (vp, signed long long, vp *);
+ STVLXL STVLXL_VP
+ void __builtin_vec_stvlxl (vbi, signed long long, vbi *);
+ STVLXL STVLXL_VBI
+ void __builtin_vec_stvlxl (vsi, signed long long, vsi *);
+ STVLXL STVLXL_VSI
+ void __builtin_vec_stvlxl (vsi, signed long long, signed int *);
+ STVLXL STVLXL_SI
+ void __builtin_vec_stvlxl (vui, signed long long, vui *);
+ STVLXL STVLXL_VUI
+ void __builtin_vec_stvlxl (vui, signed long long, unsigned int *);
+ STVLXL STVLXL_UI
+ void __builtin_vec_stvlxl (vf, signed long long, vf *);
+ STVLXL STVLXL_VF
+ void __builtin_vec_stvlxl (vf, signed long long, float *);
+ STVLXL STVLXL_F
+
+[VEC_STVRX, vec_stvrx, __builtin_vec_stvrx, __PPU__]
+ void __builtin_vec_stvrx (vbc, signed long long, vbc *);
+ STVRX STVRX_VBC
+ void __builtin_vec_stvrx (vsc, signed long long, vsc *);
+ STVRX STVRX_VSC
+ void __builtin_vec_stvrx (vsc, signed long long, signed char *);
+ STVRX STVRX_SC
+ void __builtin_vec_stvrx (vuc, signed long long, vuc *);
+ STVRX STVRX_VUC
+ void __builtin_vec_stvrx (vuc, signed long long, unsigned char *);
+ STVRX STVRX_UC
+ void __builtin_vec_stvrx (vbs, signed long long, vbs *);
+ STVRX STVRX_VBS
+ void __builtin_vec_stvrx (vss, signed long long, vss *);
+ STVRX STVRX_VSS
+ void __builtin_vec_stvrx (vss, signed long long, signed short *);
+ STVRX STVRX_SS
+ void __builtin_vec_stvrx (vus, signed long long, vus *);
+ STVRX STVRX_VUS
+ void __builtin_vec_stvrx (vus, signed long long, unsigned short *);
+ STVRX STVRX_US
+ void __builtin_vec_stvrx (vp, signed long long, vp *);
+ STVRX STVRX_VP
+ void __builtin_vec_stvrx (vbi, signed long long, vbi *);
+ STVRX STVRX_VBI
+ void __builtin_vec_stvrx (vsi, signed long long, vsi *);
+ STVRX STVRX_VSI
+ void __builtin_vec_stvrx (vsi, signed long long, signed int *);
+ STVRX STVRX_SI
+ void __builtin_vec_stvrx (vui, signed long long, vui *);
+ STVRX STVRX_VUI
+ void __builtin_vec_stvrx (vui, signed long long, unsigned int *);
+ STVRX STVRX_UI
+ void __builtin_vec_stvrx (vf, signed long long, vf *);
+ STVRX STVRX_VF
+ void __builtin_vec_stvrx (vf, signed long long, float *);
+ STVRX STVRX_F
+
+[VEC_STVRXL, vec_stvrxl, __builtin_vec_stvrxl, __PPU__]
+ void __builtin_vec_stvrxl (vbc, signed long long, vbc *);
+ STVRXL STVRXL_VBC
+ void __builtin_vec_stvrxl (vsc, signed long long, vsc *);
+ STVRXL STVRXL_VSC
+ void __builtin_vec_stvrxl (vsc, signed long long, signed char *);
+ STVRXL STVRXL_SC
+ void __builtin_vec_stvrxl (vuc, signed long long, vuc *);
+ STVRXL STVRXL_VUC
+ void __builtin_vec_stvrxl (vuc, signed long long, unsigned char *);
+ STVRXL STVRXL_UC
+ void __builtin_vec_stvrxl (vbs, signed long long, vbs *);
+ STVRXL STVRXL_VBS
+ void __builtin_vec_stvrxl (vss, signed long long, vss *);
+ STVRXL STVRXL_VSS
+ void __builtin_vec_stvrxl (vss, signed long long, signed short *);
+ STVRXL STVRXL_SS
+ void __builtin_vec_stvrxl (vus, signed long long, vus *);
+ STVRXL STVRXL_VUS
+ void __builtin_vec_stvrxl (vus, signed long long, unsigned short *);
+ STVRXL STVRXL_US
+ void __builtin_vec_stvrxl (vp, signed long long, vp *);
+ STVRXL STVRXL_VP
+ void __builtin_vec_stvrxl (vbi, signed long long, vbi *);
+ STVRXL STVRXL_VBI
+ void __builtin_vec_stvrxl (vsi, signed long long, vsi *);
+ STVRXL STVRXL_VSI
+ void __builtin_vec_stvrxl (vsi, signed long long, signed int *);
+ STVRXL STVRXL_SI
+ void __builtin_vec_stvrxl (vui, signed long long, vui *);
+ STVRXL STVRXL_VUI
+ void __builtin_vec_stvrxl (vui, signed long long, unsigned int *);
+ STVRXL STVRXL_UI
+ void __builtin_vec_stvrxl (vf, signed long long, vf *);
+ STVRXL STVRXL_VF
+ void __builtin_vec_stvrxl (vf, signed long long, float *);
+ STVRXL STVRXL_F
+
+[VEC_STXVL, vec_xst_len, __builtin_vec_stxvl, _ARCH_PPC64_PWR9]
+ void __builtin_vec_stxvl (vsc, signed char *, unsigned long long);
+ STXVL STXVL_VSC
+ void __builtin_vec_stxvl (vuc, unsigned char *, unsigned long long);
+ STXVL STXVL_VUC
+ void __builtin_vec_stxvl (vss, signed short *, unsigned long long);
+ STXVL STXVL_VSS
+ void __builtin_vec_stxvl (vus, unsigned short *, unsigned long long);
+ STXVL STXVL_VUS
+ void __builtin_vec_stxvl (vsi, signed int *, unsigned long long);
+ STXVL STXVL_VSI
+ void __builtin_vec_stxvl (vui, unsigned int *, unsigned long long);
+ STXVL STXVL_VUI
+ void __builtin_vec_stxvl (vsll, signed long long *, unsigned long long);
+ STXVL STXVL_VSLL
+ void __builtin_vec_stxvl (vull, unsigned long long *, unsigned long long);
+ STXVL STXVL_VULL
+ void __builtin_vec_stxvl (vsq, signed __int128 *, unsigned long long);
+ STXVL STXVL_VSQ
+ void __builtin_vec_stxvl (vuq, unsigned __int128 *, unsigned long long);
+ STXVL STXVL_VUQ
+ void __builtin_vec_stxvl (vf, float *, unsigned long long);
+ STXVL STXVL_VF
+ void __builtin_vec_stxvl (vd, double *, unsigned long long);
+ STXVL STXVL_VD
+
+[VEC_SUB, vec_sub, __builtin_vec_sub]
+ vsc __builtin_vec_sub (vsc, vsc);
+ VSUBUBM VSUBUBM_VSC
+ vuc __builtin_vec_sub (vuc, vuc);
+ VSUBUBM VSUBUBM_VUC
+ vss __builtin_vec_sub (vss, vss);
+ VSUBUHM VSUBUHM_VSS
+ vus __builtin_vec_sub (vus, vus);
+ VSUBUHM VSUBUHM_VUS
+ vsi __builtin_vec_sub (vsi, vsi);
+ VSUBUWM VSUBUWM_VSI
+ vui __builtin_vec_sub (vui, vui);
+ VSUBUWM VSUBUWM_VUI
+ vsll __builtin_vec_sub (vsll, vsll);
+ VSUBUDM VSUBUDM_VSLL
+ vull __builtin_vec_sub (vull, vull);
+ VSUBUDM VSUBUDM_VULL
+ vsq __builtin_vec_sub (vsq, vsq);
+ VSUBUQM VSUBUQM_VSQ
+ vuq __builtin_vec_sub (vuq, vuq);
+ VSUBUQM VSUBUQM_VUQ
+ vf __builtin_vec_sub (vf, vf);
+ XVSUBSP
+ vd __builtin_vec_sub (vd, vd);
+ XVSUBDP
+
+[VEC_SUBC, vec_subc, __builtin_vec_subc]
+ vsi __builtin_vec_subc (vsi, vsi);
+ VSUBCUW VSUBCUW_VSI
+ vui __builtin_vec_subc (vui, vui);
+ VSUBCUW VSUBCUW_VUI
+ vsq __builtin_vec_subc (vsq, vsq);
+ VSUBCUQ VSUBCUQ_VSQ
+ vuq __builtin_vec_subc (vuq, vuq);
+ VSUBCUQ VSUBCUQ_VUQ
+
+[VEC_SUBS, vec_subs, __builtin_vec_subs]
+ vuc __builtin_vec_subs (vuc, vuc);
+ VSUBUBS
+ vsc __builtin_vec_subs (vsc, vsc);
+ VSUBSBS
+ vus __builtin_vec_subs (vus, vus);
+ VSUBUHS
+ vss __builtin_vec_subs (vss, vss);
+ VSUBSHS
+ vui __builtin_vec_subs (vui, vui);
+ VSUBUWS
+ vsi __builtin_vec_subs (vsi, vsi);
+ VSUBSWS
+
+[VEC_SUM2S, vec_sum2s, __builtin_vec_sum2s]
+ vsi __builtin_vec_sum2s (vsi, vsi);
+ VSUM2SWS
+
+[VEC_SUM4S, vec_sum4s, __builtin_vec_sum4s]
+ vui __builtin_vec_sum4s (vuc, vui);
+ VSUM4UBS
+ vsi __builtin_vec_sum4s (vsc, vui);
+ VSUM4SBS
+ vsi __builtin_vec_sum4s (vss, vsi);
+ VSUM4SHS
+
+[VEC_SUMS, vec_sums, __builtin_vec_sums]
+ vsi __builtin_vec_sums (vsi, vsi);
+ VSUMSWS
+
+[VEC_TERNARYLOGIC, vec_ternarylogic, __builtin_vec_xxeval, _ARCH_PWR10]
+ vuc __builtin_vec_xxeval (vuc, vuc, vuc, const int);
+ XXEVAL XXEVAL_VUC
+ vus __builtin_vec_xxeval (vus, vus, vus, const int);
+ XXEVAL XXEVAL_VUS
+ vui __builtin_vec_xxeval (vui, vui, vui, const int);
+ XXEVAL XXEVAL_VUI
+ vull __builtin_vec_xxeval (vull, vull, vull, const int);
+ XXEVAL XXEVAL_VULL
+ vuq __builtin_vec_xxeval (vuq, vuq, vuq, const int);
+ XXEVAL XXEVAL_VUQ
+
+[VEC_TEST_LSBB_ALL_ONES, vec_test_lsbb_all_ones, __builtin_vec_xvtlsbb_all_ones, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_ones (vuc);
+ XVTLSBB_ONES
+
+[VEC_TEST_LSBB_ALL_ZEROS, vec_test_lsbb_all_zeros, __builtin_vec_xvtlsbb_all_zeros, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_zeros (vuc);
+ XVTLSBB_ZEROS
+
+[VEC_TRUNC, vec_trunc, __builtin_vec_trunc]
+ vf __builtin_vec_trunc (vf);
+ XVRSPIZ
+ vd __builtin_vec_trunc (vd);
+ XVRDPIZ
+
+[VEC_TSTSFI_GT, SKIP, __builtin_dfp_dtstsfi_gt]
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal64);
+ TSTSFI_GT_DD
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal128);
+ TSTSFI_GT_TD
+
+[VEC_TSTSFI_EQ, SKIP, __builtin_dfp_dtstsfi_eq]
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal64);
+ TSTSFI_EQ_DD
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal128);
+ TSTSFI_EQ_TD
+
+[VEC_TSTSFI_LT, SKIP, __builtin_dfp_dtstsfi_lt]
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal64);
+ TSTSFI_LT_DD
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal128);
+ TSTSFI_LT_TD
+
+[VEC_TSTSFI_OV, SKIP, __builtin_dfp_dtstsfi_ov]
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal64);
+ TSTSFI_OV_DD
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal128);
+ TSTSFI_OV_TD
+
+[VEC_UNPACKH, vec_unpackh, __builtin_vec_unpackh]
+ vss __builtin_vec_unpackh (vsc);
+ VUPKHSB VUPKHSB_VSC
+ vbs __builtin_vec_unpackh (vbc);
+ VUPKHSB VUPKHSB_VBC
+ vsi __builtin_vec_unpackh (vss);
+ VUPKHSH VUPKHSH_VSS
+ vbi __builtin_vec_unpackh (vbs);
+ VUPKHSH VUPKHSH_VBS
+ vui __builtin_vec_unpackh (vp);
+ VUPKHPX
+ vsll __builtin_vec_unpackh (vsi);
+ VUPKHSW VUPKHSW_VSI
+ vbll __builtin_vec_unpackh (vbi);
+ VUPKHSW VUPKHSW_VBI
+ vd __builtin_vec_unpackh (vf);
+ DOUBLEH_V4SF VUPKHF
+
+[VEC_UNPACKL, vec_unpackl, __builtin_vec_unpackl]
+ vss __builtin_vec_unpackl (vsc);
+ VUPKLSB VUPKLSB_VSC
+ vbs __builtin_vec_unpackl (vbc);
+ VUPKLSB VUPKLSB_VBC
+ vsi __builtin_vec_unpackl (vss);
+ VUPKLSH VUPKLSH_VSS
+ vbi __builtin_vec_unpackl (vbs);
+ VUPKLSH VUPKLSH_VBS
+ vui __builtin_vec_unpackl (vp);
+ VUPKLPX
+ vsll __builtin_vec_unpackl (vsi);
+ VUPKLSW VUPKLSW_VSI
+ vbll __builtin_vec_unpackl (vbi);
+ VUPKLSW VUPKLSW_VBI
+ vd __builtin_vec_unpackl (vf);
+ DOUBLEL_V4SF VUPKLF
+
+[VEC_UNSIGNED, vec_unsigned, __builtin_vec_vunsigned]
+ vsi __builtin_vec_vunsigned (vf);
+ VEC_VUNSIGNED_V4SF
+ vsll __builtin_vec_vunsigned (vd);
+ VEC_VUNSIGNED_V2DF
+
+[VEC_UNSIGNED2, vec_unsigned2, __builtin_vec_vunsigned2]
+ vsi __builtin_vec_vunsigned2 (vd, vd);
+ VEC_VUNSIGNED2_V2DF
+
+[VEC_UNSIGNEDO, vec_unsignedo, __builtin_vec_vunsignedo]
+ vui __builtin_vec_vunsignedo (vd);
+ VEC_VUNSIGNEDO_V2DF
+
+; Not sure this should exist, but it does. This group is redundant with
+; vec_addec, but the next three don't have an alias.
+[VEC_VADDECUQ, vec_vaddecuq, __builtin_vec_vaddecuq]
+ vsq __builtin_vec_vaddecuq (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ2
+ vuq __builtin_vec_vaddecuq (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ2
+
+; Not sure this should exist, but it does.
+[VEC_VADDEUQM, vec_vaddeuqm, __builtin_vec_vaddeuqm]
+ vsq __builtin_vec_vaddeuqm (vsq, vsq, vsq);
+ VADDEUQM VADDEUQM_VSQ
+ vuq __builtin_vec_vaddeuqm (vuq, vuq, vuq);
+ VADDEUQM VADDEUQM_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBECUQ, vec_vsubecuq, __builtin_vec_vsubecuq]
+ vsq __builtin_vec_vsubecuq (vsq, vsq, vsq);
+ VSUBECUQ VSUBECUQ_VSQ
+ vuq __builtin_vec_vsubecuq (vuq, vuq, vuq);
+ VSUBECUQ VSUBECUQ_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBEUQM, vec_vsubeuqm, __builtin_vec_vsubeuqm]
+ vsq __builtin_vec_vsubeuqm (vsq, vsq, vsq);
+ VSUBEUQM VSUBEUQM_VSQ
+ vuq __builtin_vec_vsubeuqm (vuq, vuq, vuq);
+ VSUBEUQM VSUBEUQM_VUQ
+
+[VEC_VEE, vec_extract_exp, __builtin_vec_extract_exp, _ARCH_PWR9]
+ vui __builtin_vec_extract_exp (vf);
+ VEESP
+ vull __builtin_vec_extract_exp (vd);
+ VEEDP
+
+[VEC_VES, vec_extract_sig, __builtin_vec_extract_sig, _ARCH_PWR9]
+ vui __builtin_vec_extract_sig (vf);
+ VESSP
+ vull __builtin_vec_extract_sig (vd);
+ VESDP
+
+[VEC_VIE, vec_insert_exp, __builtin_vec_insert_exp, ARCH_PWR9]
+ vf __builtin_vec_insert_exp (vf, vui);
+ VIESP VIESP_VF
+ vf __builtin_vec_insert_exp (vui, vui);
+ VIESP VIESP_VUI
+ vd __builtin_vec_insert_exp (vd, vull);
+ VIEDP VIEDP_VD
+ vd __builtin_vec_insert_exp (vull, vull);
+ VIEDP VIEDP_VULL
+
+[VEC_VSCEEQ, scalar_cmp_exp_eq, __builtin_vec_scalar_cmp_exp_eq, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_eq (double, double);
+ VSCEDPEQ
+ signed int __builtin_vec_scalar_cmp_exp_eq (_Float128, _Float128);
+ VSCEQPEQ
+
+[VEC_VSCEGT, scalar_cmp_exp_gt, __builtin_vec_scalar_cmp_exp_gt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_gt (double, double);
+ VSCEDPGT
+ signed int __builtin_vec_scalar_cmp_exp_gt (_Float128, _Float128);
+ VSCEQPGT
+
+[VEC_VSCELT, scalar_cmp_exp_lt, __builtin_vec_scalar_cmp_exp_lt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_lt (double, double);
+ VSCEDPLT
+ signed int __builtin_vec_scalar_cmp_exp_lt (_Float128, _Float128);
+ VSCEQPLT
+
+[VEC_VSCEUO, scalar_cmp_exp_unordered, __builtin_vec_scalar_cmp_exp_unordered, ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_unordered (double, double);
+ VSCEDPUO
+ signed int __builtin_vec_scalar_cmp_exp_unordered (_Float128, _Float128);
+ VSCEQPUO
+
+[VEC_VSEE, scalar_extract_exp, __builtin_vec_scalar_extract_exp, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_exp (double);
+ VSEEDP
+ unsigned int __builtin_vec_scalar_extract_exp (_Float128);
+ VSEEQP
+
+[VEC_VSES, scalar_extract_sig, __builtin_vec_scalar_extract_sig, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_sig (double);
+ VSESDP
+ unsigned int __builtin_vec_scalar_extract_sig (_Float128);
+ VSESQP
+
+[VEC_VSIE, scalar_insert_exp, __builtin_vec_scalar_insert_exp, _ARCH_PWR9]
+ double __builtin_vec_scalar_insert_exp (unsigned int, unsigned int);
+ VSIEDP VSIEDP_UI
+ double __builtin_vec_scalar_insert_exp (double, unsigned int);
+ VSIEDP VSIEDP_D
+ _Float128 __builtin_vec_scalar_insert_exp (unsigned long long, unsigned long long);
+ VSIEQP VSIEQP_ULL
+ _Float128 __builtin_vec_scalar_insert_exp (_Float128, unsigned long long);
+ VSIEQP VSIEQP_F128
+
+[VEC_VSTDC, scalar_test_data_class, __builtin_vec_scalar_test_data_class, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_data_class (float, signed int);
+ VSTDCSP
+ bool __builtin_vec_scalar_test_data_class (double, signed int);
+ VSTDCDP
+ bool __builtin_vec_scalar_test_data_class (_Float128, signed int);
+ VSTDCQP
+
+[VEC_VSTDCN, scalar_test_neg, __builtin_vec_scalar_test_neg, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_neg (float);
+ VSTDCNSP
+ bool __builtin_vec_scalar_test_neg (double);
+ VSTDCNDP
+ bool __builtin_vec_scalar_test_neg (_Float128);
+ VSTDCNQP
+
+[VEC_VTDC, vec_test_data_class, __builtin_vec_test_data_class, _ARCH_PWR9]
+ vbi __builtin_vec_test_data_class (vf, signed int);
+ VTDCSP
+ vbll __builtin_vec_test_data_class (vd, signed int);
+ VTDCDP
+
+[VEC_XL, vec_xl, __builtin_vec_vsx_ld, __VSX__]
+ vsc __builtin_vec_vsx_ld (signed long long, vsc *);
+ LXVW4X_V4SI LXVW4X_VSC
+ vsc __builtin_vec_vsx_ld (signed long long, signed char *);
+ LXVW4X_V4SI LXVW4X_SC
+ vuc __builtin_vec_vsx_ld (signed long long, vuc *);
+ LXVW4X_V4SI LXVW4X_VUC
+ vuc __builtin_vec_vsx_ld (signed long long, unsigned char *);
+ LXVW4X_V4SI LXVW4X_UC
+ vss __builtin_vec_vsx_ld (signed long long, vss *);
+ LXVW4X_V4SI LXVW4X_VSS
+ vss __builtin_vec_vsx_ld (signed long long, signed short *);
+ LXVW4X_V4SI LXVW4X_SS
+ vus __builtin_vec_vsx_ld (signed long long, vus *);
+ LXVW4X_V4SI LXVW4X_VUS
+ vus __builtin_vec_vsx_ld (signed long long, unsigned short *);
+ LXVW4X_V4SI LXVW4X_US
+ vsi __builtin_vec_vsx_ld (signed long long, vsi *);
+ LXVW4X_V4SI LXVW4X_VSI
+ vsi __builtin_vec_vsx_ld (signed long long, signed int *);
+ LXVW4X_V4SI LXVW4X_SI
+ vui __builtin_vec_vsx_ld (signed long long, vui *);
+ LXVW4X_V4SI LXVW4X_VUI
+ vui __builtin_vec_vsx_ld (signed long long, unsigned int *);
+ LXVW4X_V4SI LXVW4X_UI
+ vsll __builtin_vec_vsx_ld (signed long long, vsll *);
+ LXVD2X_V2DI LXVD2X_VSLL
+ vsll __builtin_vec_vsx_ld (signed long long, signed long long *);
+ LXVD2X_V2DI LXVD2X_SLL
+ vull __builtin_vec_vsx_ld (signed long long, vull *);
+ LXVD2X_V2DI LXVD2X_VULL
+ vull __builtin_vec_vsx_ld (signed long long, unsigned long long *);
+ LXVD2X_V2DI LXVD2X_ULL
+ vsq __builtin_vec_vsx_ld (signed long long, vsq *);
+ LXVD2X_V1TI LXVD2X_VSQ
+ vsq __builtin_vec_vsx_ld (signed long long, signed __int128 *);
+ LXVD2X_V1TI LXVD2X_SQ
+ vuq __builtin_vec_vsx_ld (signed long long, unsigned __int128 *);
+ LXVD2X_V1TI LXVD2X_UQ
+ vf __builtin_vec_vsx_ld (signed long long, vf *);
+ LXVW4X_V4SF LXVW4X_VF
+ vf __builtin_vec_vsx_ld (signed long long, float *);
+ LXVW4X_V4SF LXVW4X_F
+ vd __builtin_vec_vsx_ld (signed long long, vd *);
+ LXVD2X_V2DF LXVD2X_VD
+ vd __builtin_vec_vsx_ld (signed long long, double *);
+ LXVD2X_V2DF LXVD2X_D
+
+[VEC_XL_BE, vec_xl_be, __builtin_vec_xl_be, __VSX__]
+ vsc __builtin_vec_xl_be (signed long long, vsc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VSC
+ vsc __builtin_vec_xl_be (signed long long, signed char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_SC
+ vuc __builtin_vec_xl_be (signed long long, vuc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VUC
+ vuc __builtin_vec_xl_be (signed long long, unsigned char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_UC
+ vss __builtin_vec_xl_be (signed long long, vss *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VSS
+ vss __builtin_vec_xl_be (signed long long, signed short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_SS
+ vus __builtin_vec_xl_be (signed long long, vus *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VUS
+ vus __builtin_vec_xl_be (signed long long, unsigned short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_US
+ vsi __builtin_vec_xl_be (signed long long, vsi *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VSI
+ vsi __builtin_vec_xl_be (signed long long, signed int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_SI
+ vui __builtin_vec_xl_be (signed long long, vui *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VUI
+ vui __builtin_vec_xl_be (signed long long, unsigned int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_UI
+ vsll __builtin_vec_xl_be (signed long long, vsll *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VSLL
+ vsll __builtin_vec_xl_be (signed long long, signed long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_SLL
+ vull __builtin_vec_xl_be (signed long long, vull *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VULL
+ vull __builtin_vec_xl_be (signed long long, unsigned long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_ULL
+ vsq __builtin_vec_xl_be (signed long long, signed __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_SQ
+ vuq __builtin_vec_xl_be (signed long long, unsigned __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_UQ
+ vf __builtin_vec_xl_be (signed long long, vf *);
+ LD_ELEMREV_V4SF LD_ELEMREV_VF
+ vf __builtin_vec_xl_be (signed long long, float *);
+ LD_ELEMREV_V4SF LD_ELEMREV_F
+ vd __builtin_vec_xl_be (signed long long, vd *);
+ LD_ELEMREV_V2DF LD_ELEMREV_VD
+ vd __builtin_vec_xl_be (signed long long, double *);
+ LD_ELEMREV_V2DF LD_ELEMREV_DD
+
+[VEC_XL_LEN_R, vec_xl_len_r, __builtin_vec_xl_len_r, _ARCH_PPC64_PWR9]
+ vuc __builtin_vsx_xl_len_r (unsigned char *, unsigned long long);
+ XL_LEN_R
+
+[VEC_XL_SEXT, vec_xl_sext, __builtin_vec_xl_sext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_sext (signed long long, signed char *);
+ SE_LXVRBX
+ vsq __builtin_vec_xl_sext (signed long long, signed short *);
+ SE_LXVRHX
+ vsq __builtin_vec_xl_sext (signed long long, signed int *);
+ SE_LXVRWX
+ vsq __builtin_vec_xl_sext (signed long long, signed long long *);
+ SE_LXVRDX
+
+[VEC_XL_ZEXT, vec_xl_zext, __builtin_vec_xl_zext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_zext (signed long long, signed char *);
+ ZE_LXVRBX
+ vsq __builtin_vec_xl_zext (signed long long, signed short *);
+ ZE_LXVRHX
+ vsq __builtin_vec_xl_zext (signed long long, signed int *);
+ ZE_LXVRWX
+ vsq __builtin_vec_xl_zext (signed long long, signed long long *);
+ ZE_LXVRDX
+
+[VEC_XOR, vec_xor, __builtin_vec_xor]
+ vsc __builtin_vec_xor (vsc, vsc);
+ VXOR_V16QI
+ vuc __builtin_vec_xor (vuc, vuc);
+ VXOR_V16QI_UNS VXOR_VUC
+ vbc __builtin_vec_xor (vbc, vbc);
+ VXOR_V16QI_UNS VXOR_VBC
+ vss __builtin_vec_xor (vss, vss);
+ VXOR_V8HI
+ vus __builtin_vec_xor (vus, vus);
+ VXOR_V8HI_UNS VXOR_VUS
+ vbs __builtin_vec_xor (vbs, vbs);
+ VXOR_V8HI_UNS VXOR_VBS
+ vsi __builtin_vec_xor (vsi, vsi);
+ VXOR_V4SI
+ vui __builtin_vec_xor (vui, vui);
+ VXOR_V4SI_UNS VXOR_VUI
+ vbi __builtin_vec_xor (vbi, vbi);
+ VXOR_V4SI_UNS VXOR_VBI
+ vsll __builtin_vec_xor (vsll, vsll);
+ VXOR_V2DI
+ vull __builtin_vec_xor (vull, vull);
+ VXOR_V2DI_UNS VXOR_VULL
+ vbll __builtin_vec_xor (vbll, vbll);
+ VXOR_V2DI_UNS VXOR_VBLL
+ vf __builtin_vec_xor (vf, vf);
+ VXOR_V4SF
+ vd __builtin_vec_xor (vd, vd);
+ VXOR_V2DF
+
+[VEC_XST, vec_xst, __builtin_vec_vsx_st, __VSX__]
+ void __builtin_vec_vsx_st (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC
+ void __builtin_vec_vsx_st (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC
+ void __builtin_vec_vsx_st (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC
+ void __builtin_vec_vsx_st (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC
+ void __builtin_vec_vsx_st (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC
+ void __builtin_vec_vsx_st (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S
+ void __builtin_vec_vsx_st (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U
+ void __builtin_vec_vsx_st (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS
+ void __builtin_vec_vsx_st (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS
+ void __builtin_vec_vsx_st (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS
+ void __builtin_vec_vsx_st (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US
+ void __builtin_vec_vsx_st (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS
+ void __builtin_vec_vsx_st (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S
+ void __builtin_vec_vsx_st (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U
+ void __builtin_vec_vsx_st (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP
+ void __builtin_vec_vsx_st (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI
+ void __builtin_vec_vsx_st (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI
+ void __builtin_vec_vsx_st (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI
+ void __builtin_vec_vsx_st (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI
+ void __builtin_vec_vsx_st (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI
+ void __builtin_vec_vsx_st (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S
+ void __builtin_vec_vsx_st (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U
+ void __builtin_vec_vsx_st (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL
+ void __builtin_vec_vsx_st (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL
+ void __builtin_vec_vsx_st (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL
+ void __builtin_vec_vsx_st (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL
+ void __builtin_vec_vsx_st (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL
+ void __builtin_vec_vsx_st (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF
+ void __builtin_vec_vsx_st (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F
+ void __builtin_vec_vsx_st (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD
+ void __builtin_vec_vsx_st (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D
+
+[VEC_XST_BE, vec_xst_be, __builtin_vec_xst_be, __VSX__]
+ void __builtin_vec_xst_be (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC_BE
+ void __builtin_vec_xst_be (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S_BE
+ void __builtin_vec_xst_be (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U_BE
+ void __builtin_vec_xst_be (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS_BE
+ void __builtin_vec_xst_be (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS_BE
+ void __builtin_vec_xst_be (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS_BE
+ void __builtin_vec_xst_be (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US_BE
+ void __builtin_vec_xst_be (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS_BE
+ void __builtin_vec_xst_be (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S_BE
+ void __builtin_vec_xst_be (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U_BE
+ void __builtin_vec_xst_be (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP_BE
+ void __builtin_vec_xst_be (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI_BE
+ void __builtin_vec_xst_be (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI_BE
+ void __builtin_vec_xst_be (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI_BE
+ void __builtin_vec_xst_be (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S_BE
+ void __builtin_vec_xst_be (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U_BE
+ void __builtin_vec_xst_be (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL_BE
+ void __builtin_vec_xst_be (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL_BE
+ void __builtin_vec_xst_be (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL_BE
+ void __builtin_vec_xst_be (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL_BE
+ void __builtin_vec_xst_be (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL_BE
+ void __builtin_vec_xst_be (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF_BE
+ void __builtin_vec_xst_be (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F_BE
+ void __builtin_vec_xst_be (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD_BE
+ void __builtin_vec_xst_be (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D_BE
+
+[VEC_XST_LEN_R, vec_xst_len_r, __builtin_vec_xst_len_r, _ARCH_PPC64_PWR9]
+ void __builtin_vsx_xst_len_r (vuc, unsigned char *, unsigned long long);
+ XST_LEN_R
+
+[VEC_XST_TRUNC, vec_xst_trunc, __builtin_vec_xst_trunc, _ARCH_PWR10]
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed char *);
+ TR_STXVRBX TR_STXVRBX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned char *);
+ TR_STXVRBX TR_STXVRBX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed short *);
+ TR_STXVRHX TR_STXVRHX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned short *);
+ TR_STXVRHX TR_STXVRHX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed int *);
+ TR_STXVRWX TR_STXVRWX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned int *);
+ TR_STXVRWX TR_STXVRWX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed long long *);
+ TR_STXVRDX TR_STXVRDX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned long long *);
+ TR_STXVRDX TR_STXVRDX_U
+
+[VEC_XXPERMDI, vec_xxpermdi, __builtin_vsx_xxpermdi, __VSX__]
+ vsc __builtin_vsx_xxpermdi (vsc, vsc, const int);
+ XXPERMDI_16QI XXPERMDI_VSC
+ vuc __builtin_vsx_xxpermdi (vuc, vuc, const int);
+ XXPERMDI_16QI XXPERMDI_VUC
+ vss __builtin_vsx_xxpermdi (vss, vss, const int);
+ XXPERMDI_8HI XXPERMDI_VSS
+ vus __builtin_vsx_xxpermdi (vus, vus, const int);
+ XXPERMDI_8HI XXPERMDI_VUS
+ vsi __builtin_vsx_xxpermdi (vsi, vsi, const int);
+ XXPERMDI_4SI XXPERMDI_VSI
+ vui __builtin_vsx_xxpermdi (vui, vui, const int);
+ XXPERMDI_4SI XXPERMDI_VUI
+ vsll __builtin_vsx_xxpermdi (vsll, vsll, const int);
+ XXPERMDI_2DI XXPERMDI_VSLL
+ vull __builtin_vsx_xxpermdi (vull, vull, const int);
+ XXPERMDI_2DI XXPERMDI_VULL
+ vf __builtin_vsx_xxpermdi (vf, vf, const int);
+ XXPERMDI_4SF XXPERMDI_VF
+ vd __builtin_vsx_xxpermdi (vd, vd, const int);
+ XXPERMDI_2DF XXPERMDI_VD
+
+[VEC_XXSLDWI, vec_xxsldwi, __builtin_vsx_xxsldwi, __VSX__]
+ vsc __builtin_vsx_xxsldwi (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC2
+ vuc __builtin_vsx_xxsldwi (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC2
+ vss __builtin_vsx_xxsldwi (vss, vss, const int);
+ XXSLDWI_8HI XXSLDWI_VSS2
+ vus __builtin_vsx_xxsldwi (vus, vus, const int);
+ XXSLDWI_8HI XXSLDWI_VUS2
+ vsi __builtin_vsx_xxsldwi (vsi, vsi, const int);
+ XXSLDWI_4SI XXSLDWI_VSI2
+ vui __builtin_vsx_xxsldwi (vui, vui, const int);
+ XXSLDWI_4SI XXSLDWI_VUI2
+ vsll __builtin_vsx_xxsldwi (vsll, vsll, const int);
+ XXSLDWI_2DI XXSLDWI_VSLL2
+ vull __builtin_vsx_xxsldwi (vull, vull, const int);
+ XXSLDWI_2DI XXSLDWI_VULL2
+ vf __builtin_vsx_xxsldwi (vf, vf, const int);
+ XXSLDWI_4SF XXSLDWI_VF2
+ vd __builtin_vsx_xxsldwi (vd, vd, const int);
+ XXSLDWI_2DF XXSLDWI_VD2
^ permalink raw reply [flat|nested] 4+ messages in thread
* [gcc(refs/users/wschmidt/heads/builtins4)] rs6000: Add remaining overloads
@ 2021-02-07 18:13 William Schmidt
0 siblings, 0 replies; 4+ messages in thread
From: William Schmidt @ 2021-02-07 18:13 UTC (permalink / raw)
To: gcc-cvs
https://gcc.gnu.org/g:f986d0fdfa773450f7f9d5715dc452566136fcbd
commit f986d0fdfa773450f7f9d5715dc452566136fcbd
Author: Bill Schmidt <wschmidt@linux.ibm.com>
Date: Mon Nov 2 09:55:43 2020 -0500
rs6000: Add remaining overloads
2020-11-02 Bill Schmidt <wschmidt@linux.ibm.com>
* config/rs6000/rs6000-overload.def: Add remaining overloads.
Diff:
---
gcc/config/rs6000/rs6000-overload.def | 3628 +++++++++++++++++++++++++++++++++
1 file changed, 3628 insertions(+)
diff --git a/gcc/config/rs6000/rs6000-overload.def b/gcc/config/rs6000/rs6000-overload.def
index 7c28cdcb84c..82037644157 100644
--- a/gcc/config/rs6000/rs6000-overload.def
+++ b/gcc/config/rs6000/rs6000-overload.def
@@ -74,8 +74,3636 @@
; a semicolon are also treated as blank lines.
+[CRYPTO_PERMXOR, SKIP, __builtin_crypto_vpermxor]
+ vuc __builtin_crypto_vpermxor (vuc, vuc, vuc);
+ VPERMXOR_V16QI
+ vus __builtin_crypto_vpermxor (vus, vus, vus);
+ VPERMXOR_V8HI
+ vui __builtin_crypto_vpermxor (vui, vui, vui);
+ VPERMXOR_V4SI
+ vull __builtin_crypto_vpermxor (vull, vull, vull);
+ VPERMXOR_V2DI
+
+[CRYPTO_PMSUM, SKIP, __builtin_crypto_vpmsum]
+ vuc __builtin_crypto_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_C
+ vus __builtin_crypto_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_C
+ vui __builtin_crypto_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_C
+ vull __builtin_crypto_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_C
+
+[SCAL_CMPB, SKIP, __builtin_cmpb]
+ unsigned int __builtin_cmpb (unsigned int, unsigned int);
+ CMPB_32
+ unsigned long long __builtin_cmpb (unsigned long long, unsigned long long);
+ CMPB
+
[VEC_ABS, vec_abs, __builtin_vec_abs]
vsc __builtin_vec_abs (vsc);
ABS_V16QI
vss __builtin_vec_abs (vss);
ABS_V8HI
+ vsi __builtin_vec_abs (vsi);
+ ABS_V4SI
+ vsll __builtin_vec_abs (vsll);
+ ABS_V2DI
+ vf __builtin_vec_abs (vf);
+ ABS_V4SF
+ vd __builtin_vec_abs (vd);
+ XVABSDP
+
+[VEC_ABSD, vec_absd, __builtin_vec_vadu, _ARCH_PWR9]
+ vuc __builtin_vec_vadu (vuc, vuc);
+ VADUB
+ vus __builtin_vec_vadu (vus, vus);
+ VADUH
+ vui __builtin_vec_vadu (vui, vui);
+ VADUW
+
+[VEC_ABSS, vec_abss, __builtin_vec_abss]
+ vsc __builtin_vec_abss (vsc);
+ ABSS_V16QI
+ vss __builtin_vec_abss (vss);
+ ABSS_V8HI
+ vsi __builtin_vec_abss (vsi);
+ ABSS_V4SI
+
+[VEC_ADD, vec_add, __builtin_vec_add]
+ vsc __builtin_vec_add (vsc, vsc);
+ VADDUBM VADDUBM_VSC
+ vuc __builtin_vec_add (vuc, vuc);
+ VADDUBM VADDUBM_VUC
+ vss __builtin_vec_add (vss, vss);
+ VADDUHM VADDUHM_VSS
+ vus __builtin_vec_add (vus, vus);
+ VADDUHM VADDUHM_VUS
+ vsi __builtin_vec_add (vsi, vsi);
+ VADDUWM VADDUWM_VSI
+ vui __builtin_vec_add (vui, vui);
+ VADDUWM VADDUWM_VUI
+ vsll __builtin_vec_add (vsll, vsll);
+ VADDUDM VADDUDM_VSLL
+ vull __builtin_vec_add (vull, vull);
+ VADDUDM VADDUDM_VULL
+ vsq __builtin_vec_add (vsq, vsq);
+ VADDUQM VADDUQM_VSQ
+ vuq __builtin_vec_add (vuq, vuq);
+ VADDUQM VADDUQM_VUQ
+ vf __builtin_vec_add (vf, vf);
+ XVADDSP
+ vd __builtin_vec_add (vd, vd);
+ XVADDDP
+
+[VEC_ADDC, vec_addc, __builtin_vec_addc]
+ vsi __builtin_vec_addc (vsi, vsi);
+ VADDCUW VADDCUW_VSI
+ vui __builtin_vec_addc (vui, vui);
+ VADDCUW VADDCUW_VUI
+ vsq __builtin_vec_addc (vsq, vsq);
+ VADDCUQ VADDCUQ_VSQ
+ vuq __builtin_vec_addc (vuq, vuq);
+ VADDCUQ VADDCUQ_VUQ
+
+; TODO: Note that the entry for VEC_ADDEC currently gets ignored in
+; altivec_resolve_overloaded_builtin. There are also forms for
+; vsi and vui arguments, but rather than building a define_expand
+; for the instruction sequence generated for those, we do some RTL
+; hackery. Revisit whether we can remove that. For now, keep this
+; much of the entry here to generate the #define, at least.
+[VEC_ADDEC, vec_addec, __builtin_vec_addec]
+ vsq __builtin_vec_addec (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ
+ vuq __builtin_vec_addec (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ
+
+[VEC_ADDS, vec_adds, __builtin_vec_adds]
+ vuc __builtin_vec_adds (vuc, vuc);
+ VADDUBS
+ vsc __builtin_vec_adds (vsc, vsc);
+ VADDSBS
+ vus __builtin_vec_adds (vus, vus);
+ VADDUHS
+ vss __builtin_vec_adds (vss, vss);
+ VADDSHS
+ vui __builtin_vec_adds (vui, vui);
+ VADDUWS
+ vsi __builtin_vec_adds (vsi, vsi);
+ VADDSWS
+
+[VEC_ANDC, vec_andc, __builtin_vec_andc]
+ vbc __builtin_vec_andc (vbc, vbc);
+ VANDC_V16QI_UNS VANDC_VBC
+ vsc __builtin_vec_andc (vsc, vsc);
+ VANDC_V16QI
+ vuc __builtin_vec_andc (vuc, vuc);
+ VANDC_V16QI_UNS VANDC_VUC
+ vbs __builtin_vec_andc (vbs, vbs);
+ VANDC_V8HI_UNS VANDC_VBS
+ vss __builtin_vec_andc (vss, vss);
+ VANDC_V8HI
+ vus __builtin_vec_andc (vus, vus);
+ VANDC_V8HI_UNS VANDC_VUS
+ vbi __builtin_vec_andc (vbi, vbi);
+ VANDC_V4SI_UNS VANDC_VBI
+ vsi __builtin_vec_andc (vsi, vsi);
+ VANDC_V4SI
+ vui __builtin_vec_andc (vui, vui);
+ VANDC_V4SI_UNS VANDC_VUI
+ vbll __builtin_vec_andc (vbll, vbll);
+ VANDC_V4SI_UNS VANDC_VBLL
+ vsll __builtin_vec_andc (vsll, vsll);
+ VANDC_V2DI
+ vull __builtin_vec_andc (vull, vull);
+ VANDC_V2DI_UNS VANDC_VULL
+ vf __builtin_vec_andc (vf, vf);
+ VANDC_V4SF
+ vd __builtin_vec_andc (vd, vd);
+ VANDC_V2DF
+
+[VEC_AVG, vec_avg, __builtin_vec_avg]
+ vsc __builtin_vec_avg (vsc, vsc);
+ VAVGSB
+ vuc __builtin_vec_avg (vuc, vuc);
+ VAVGUB
+ vss __builtin_vec_avg (vss, vss);
+ VAVGSH
+ vus __builtin_vec_avg (vus, vus);
+ VAVGUH
+ vsi __builtin_vec_avg (vsi, vsi);
+ VAVGSW
+ vui __builtin_vec_avg (vui, vui);
+ VAVGUW
+
+[VEC_BLENDV, vec_blendv, __builtin_vec_xxblend, _ARCH_PWR10]
+ vsc __builtin_vec_xxblend (vsc, vsc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VSC
+ vuc __builtin_vec_xxblend (vuc, vuc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VUC
+ vss __builtin_vec_xxblend (vss, vss, vus);
+ VXXBLEND_V8HI VXXBLEND_VSS
+ vus __builtin_vec_xxblend (vus, vus, vus);
+ VXXBLEND_V8HI VXXBLEND_VUS
+ vsi __builtin_vec_xxblend (vsi, vsi, vui);
+ VXXBLEND_V4SI VXXBLEND_VSI
+ vui __builtin_vec_xxblend (vui, vui, vui);
+ VXXBLEND_V4SI VXXBLEND_VUI
+ vsll __builtin_vec_xxblend (vsll, vsll, vull);
+ VXXBLEND_V2DI VXXBLEND_VSLL
+ vull __builtin_vec_xxblend (vull, vull, vull);
+ VXXBLEND_V2DI VXXBLEND_VULL
+ vf __builtin_vec_xxblend (vf, vf, vui);
+ VXXBLEND_V4SF
+ vd __builtin_vec_xxblend (vd, vd, vull);
+ VXXBLEND_V2DF
+
+[VEC_BPERM, vec_bperm, __builtin_vec_vbperm_api, _ARCH_PWR8]
+ vull __builtin_vec_vbperm_api (vull, vuc);
+ VBPERMD VBPERMD_VULL
+ vull __builtin_vec_vbperm_api (vuq, vuc);
+ VBPERMD VBPERMD_VUQ
+ vuc __builtin_vec_vbperm_api (vuc, vuc);
+ VBPERMQ2
+
+[VEC_CEIL, vec_ceil, __builtin_vec_ceil]
+ vf __builtin_vec_ceil (vf);
+ XVRSPIP
+ vd __builtin_vec_ceil (vd);
+ XVRDPIP
+
+[VEC_CFUGE, vec_cfuge, __builtin_vec_cfuge, _ARCH_PWR10]
+ vull __builtin_vec_cfuge (vull, vull);
+ VCFUGED
+
+[VEC_CIPHER_BE, vec_cipher_be, __builtin_vec_vcipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipher_be (vuc, vuc);
+ VCIPHER_BE
+
+[VEC_CIPHERLAST_BE, vec_cipherlast_be, __builtin_vec_vcipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipherlast_be (vuc, vuc);
+ VCIPHERLAST_BE
+
+[VEC_CLRL, vec_clrl, __builtin_vec_clrl, _ARCH_PWR10]
+ vsc __builtin_vec_clrl (vsc, unsigned int);
+ VCLRLB VCLRLB_S
+ vuc __builtin_vec_clrl (vuc, unsigned int);
+ VCLRLB VCLRLB_U
+
+[VEC_CLRR, vec_clrr, __builtin_vec_clrr, ARCH_PWR10]
+ vsc __builtin_vec_clrr (vsc, unsigned int);
+ VCLRRB VCLRRB_S
+ vuc __builtin_vec_clrr (vuc, unsigned int);
+ VCLRRB VCLRRB_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPAE_P, SKIP, __builtin_vec_vcmpae_p]
+ signed int __builtin_vec_vcmpae_p (vsc, vsc);
+ VCMPAEB_P VCMPAEB_VSC_P
+ signed int __builtin_vec_vcmpae_p (vuc, vuc);
+ VCMPAEB_P VCMPAEB_VUC_P
+ signed int __builtin_vec_vcmpae_p (vbc, vbc);
+ VCMPAEB_P VCMPAEB_VBC_P
+ signed int __builtin_vec_vcmpae_p (vss, vss);
+ VCMPAEH_P VCMPAEH_VSS_P
+ signed int __builtin_vec_vcmpae_p (vus, vus);
+ VCMPAEH_P VCMPAEH_VUS_P
+ signed int __builtin_vec_vcmpae_p (vbs, vbs);
+ VCMPAEH_P VCMPAEH_VBS_P
+ signed int __builtin_vec_vcmpae_p (vp, vp);
+ VCMPAEH_P VCMPAEH_VP_P
+ signed int __builtin_vec_vcmpae_p (vsi, vsi);
+ VCMPAEW_P VCMPAEW_VSI_P
+ signed int __builtin_vec_vcmpae_p (vui, vui);
+ VCMPAEW_P VCMPAEW_VUI_P
+ signed int __builtin_vec_vcmpae_p (vbi, vbi);
+ VCMPAEW_P VCMPAEW_VBI_P
+ signed int __builtin_vec_vcmpae_p (vsll, vsll);
+ VCMPAED_P VCMPAED_VSLL_P
+ signed int __builtin_vec_vcmpae_p (vull, vull);
+ VCMPAED_P VCMPAED_VULL_P
+ signed int __builtin_vec_vcmpae_p (vbll, vbll);
+ VCMPAED_P VCMPAED_VBLL_P
+ signed int __builtin_vec_vcmpae_p (vf, vf);
+ VCMPAEFP_P
+ signed int __builtin_vec_vcmpae_p (vd, vd);
+ VCMPAEDP_P
+
+[VEC_CMPB, vec_cmpb, __builtin_vec_cmpb]
+ vsi __builtin_vec_cmpb (vf, vf);
+ VCMPBFP
+
+[VEC_CMPEQ, vec_cmpeq, __builtin_vec_cmpeq]
+ vbc __builtin_vec_cmpeq (vsc, vsc);
+ VCMPEQUB VCMPEQUB_VSC
+ vbc __builtin_vec_cmpeq (vuc, vuc);
+ VCMPEQUB VCMPEQUB_VUC
+ vbc __builtin_vec_cmpeq (vbc, vbc);
+ VCMPEQUB VCMPEQUB_VBC
+ vbs __builtin_vec_cmpeq (vss, vss);
+ VCMPEQUH VCMPEQUH_VSS
+ vbs __builtin_vec_cmpeq (vus, vus);
+ VCMPEQUH VCMPEQUH_VUS
+ vbs __builtin_vec_cmpeq (vbs, vbs);
+ VCMPEQUH VCMPEQUH_VBS
+ vbi __builtin_vec_cmpeq (vsi, vsi);
+ VCMPEQUW VCMPEQUW_VSI
+ vbi __builtin_vec_cmpeq (vui, vui);
+ VCMPEQUW VCMPEQUW_VUI
+ vbi __builtin_vec_cmpeq (vbi, vbi);
+ VCMPEQUW VCMPEQUW_VBI
+ vbll __builtin_vec_cmpeq (vsll, vsll);
+ VCMPEQUD VCMPEQUD_VSLL
+ vbll __builtin_vec_cmpeq (vull, vull);
+ VCMPEQUD VCMPEQUD_VULL
+ vbll __builtin_vec_cmpeq (vbll, vbll);
+ VCMPEQUD VCMPEQUD_VBLL
+ vbi __builtin_vec_cmpeq (vf, vf);
+ XVCMPEQSP
+ vbll __builtin_vec_cmpeq (vd, vd);
+ XVCMPEQDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPEQ_P, SKIP, __builtin_vec_vcmpeq_p]
+ signed int __builtin_vec_vcmpeq_p (signed int, vuc, vuc);
+ VCMPEQUB_P VCMPEQUB_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsc, vsc);
+ VCMPEQUB_P VCMPEQUB_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbc, vbc);
+ VCMPEQUB_P VCMPEQUB_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vus, vus);
+ VCMPEQUH_P VCMPEQUH_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vss, vss);
+ VCMPEQUH_P VCMPEQUH_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbs, vbs);
+ VCMPEQUH_P VCMPEQUH_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vp, vp);
+ VCMPEQUH_P VCMPEQUH_PP
+ signed int __builtin_vec_vcmpeq_p (signed int, vui, vui);
+ VCMPEQUW_P VCMPEQUW_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsi, vsi);
+ VCMPEQUW_P VCMPEQUW_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbi, vbi);
+ VCMPEQUW_P VCMPEQUW_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vull, vull);
+ VCMPEQUD_P VCMPEQUD_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsll, vsll);
+ VCMPEQUD_P VCMPEQUD_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbll, vbll);
+ VCMPEQUD_P VCMPEQUD_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vf, vf);
+ XVCMPEQSP_P
+ signed int __builtin_vec_vcmpeq_p (signed int, vd, vd);
+ XVCMPEQDP_P
+
+[VEC_CMPEQB, SKIP, __builtin_byte_in_set]
+ signed int __builtin_byte_in_set (unsigned char, unsigned long long);
+ CMPEQB
+
+[VEC_CMPGE, vec_cmpge, __builtin_vec_cmpge]
+ vbc __builtin_vec_cmpge (vsc, vsc);
+ CMPGE_16QI CMPGE_16QI_VSC
+ vbc __builtin_vec_cmpge (vuc, vuc);
+ CMPGE_16QI CMPGE_16QI_VUC
+ vbs __builtin_vec_cmpge (vss, vss);
+ CMPGE_8HI CMPGE_8HI_VSS
+ vbs __builtin_vec_cmpge (vus, vus);
+ CMPGE_8HI CMPGE_8HI_VUS
+ vbi __builtin_vec_cmpge (vsi, vsi);
+ CMPGE_4SI CMPGE_4SI_VSI
+ vbi __builtin_vec_cmpge (vui, vui);
+ CMPGE_4SI CMPGE_4SI_VUI
+ vbll __builtin_vec_cmpge (vsll, vsll);
+ CMPGE_2DI CMPGE_2DI_VSLL
+ vbll __builtin_vec_cmpge (vull, vull);
+ CMPGE_2DI CMPGE_2DI_VULL
+ vbi __builtin_vec_cmpge (vf, vf);
+ XVCMPGESP
+ vbll __builtin_vec_cmpge (vd, vd);
+ XVCMPGEDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+; Also, cmpge is the same as cmpgt for all cases except floating point.
+; There is further code to deal with this special case in
+; altivec_build_resolved_builtin. TODO: Make sure this is still true.
+[VEC_CMPGE_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P VCMPGTUB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P VCMPGTSB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P VCMPGTUH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P VCMPGTSH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P VCMPGTUW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P VCMPGTSW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P VCMPGTUD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P VCMPGTSD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGESP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGEDP_P
+
+[VEC_CMPGT, vec_cmpgt, __builtin_vec_cmpgt]
+ vbc __builtin_vec_cmpgt (vsc, vsc);
+ VCMPGTSB
+ vbc __builtin_vec_cmpgt (vuc, vuc);
+ VCMPGTUB
+ vbs __builtin_vec_cmpgt (vss, vss);
+ VCMPGTSH
+ vbs __builtin_vec_cmpgt (vus, vus);
+ VCMPGTUH
+ vbi __builtin_vec_cmpgt (vsi, vsi);
+ VCMPGTSW
+ vbi __builtin_vec_cmpgt (vui, vui);
+ VCMPGTUW
+ vbll __builtin_vec_cmpgt (vsll, vsll);
+ VCMPGTSD
+ vbll __builtin_vec_cmpgt (vull, vull);
+ VCMPGTUD
+ vbi __builtin_vec_cmpgt (vf, vf);
+ XVCMPGTSP
+ vbll __builtin_vec_cmpgt (vd, vd);
+ XVCMPGTDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPGT_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGTSP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGTDP_P
+
+; Note that there is no entry for VEC_CMPLE. VEC_CMPLE is implemented
+; using VEC_CMPGE with reversed arguments in altivec.h.
+
+; Note that there is no entry for VEC_CMPLT. VEC_CMPLT is implemented
+; using VEC_CMPGT with reversed arguments in altivec.h.
+
+[VEC_CMPNE, vec_cmpne, __builtin_vec_cmpne]
+ vbc __builtin_vec_cmpne (vbc, vbc);
+ VCMPNEB VCMPNEB_VBC
+ vbc __builtin_vec_cmpne (vsc, vsc);
+ VCMPNEB VCMPNEB_VSC
+ vbc __builtin_vec_cmpne (vuc, vuc);
+ VCMPNEB VCMPNEB_VUC
+ vbs __builtin_vec_cmpne (vbs, vbs);
+ VCMPNEH VCMPNEH_VBS
+ vbs __builtin_vec_cmpne (vss, vss);
+ VCMPNEH VCMPNEH_VSS
+ vbs __builtin_vec_cmpne (vus, vus);
+ VCMPNEH VCMPNEH_VUS
+ vbi __builtin_vec_cmpne (vbi, vbi);
+ VCMPNEW VCMPNEW_VBI
+ vbi __builtin_vec_cmpne (vsi, vsi);
+ VCMPNEW VCMPNEW_VSI
+ vbi __builtin_vec_cmpne (vui, vui);
+ VCMPNEW VCMPNEW_VUI
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNE_P, SKIP, __builtin_vec_vcmpne_p]
+ signed int __builtin_vec_vcmpne_p (vsc, vsc);
+ VCMPNEB_P VCMPNEB_VSC_P
+ signed int __builtin_vec_vcmpne_p (vuc, vuc);
+ VCMPNEB_P VCMPNEB_VUC_P
+ signed int __builtin_vec_vcmpne_p (vbc, vbc);
+ VCMPNEB_P VCMPNEB_VBC_P
+ signed int __builtin_vec_vcmpne_p (vss, vss);
+ VCMPNEH_P VCMPNEH_VSS_P
+ signed int __builtin_vec_vcmpne_p (vus, vus);
+ VCMPNEH_P VCMPNEH_VUS_P
+ signed int __builtin_vec_vcmpne_p (vbs, vbs);
+ VCMPNEH_P VCMPNEH_VBS_P
+ signed int __builtin_vec_vcmpne_p (vp, vp);
+ VCMPNEH_P VCMPNEH_VP_P
+ signed int __builtin_vec_vcmpne_p (vsi, vsi);
+ VCMPNEW_P VCMPNEW_VSI_P
+ signed int __builtin_vec_vcmpne_p (vui, vui);
+ VCMPNEW_P VCMPNEW_VUI_P
+ signed int __builtin_vec_vcmpne_p (vbi, vbi);
+ VCMPNEW_P VCMPNEW_VBI_P
+ signed int __builtin_vec_vcmpne_p (vsll, vsll);
+ VCMPNED_P VCMPNED_VSLL_P
+ signed int __builtin_vec_vcmpne_p (vull, vull);
+ VCMPNED_P VCMPNED_VULL_P
+ signed int __builtin_vec_vcmpne_p (vbll, vbll);
+ VCMPNED_P VCMPNED_VBLL_P
+ signed int __builtin_vec_vcmpne_p (vf, vf);
+ VCMPNEFP_P
+ signed int __builtin_vec_vcmpne_p (vd, vd);
+ VCMPNEDP_P
+
+[VEC_CMPNEZ, vec_cmpnez, __builtin_vec_cmpnez, _ARCH_PWR9]
+ vbc __builtin_vec_cmpnez (vsc, vsc);
+ CMPNEZB CMPNEZB_S
+ vbc __builtin_vec_cmpnez (vuc, vuc);
+ CMPNEZB CMPNEZB_U
+ vbs __builtin_vec_cmpnez (vss, vss);
+ CMPNEZH CMPNEZH_S
+ vbs __builtin_vec_cmpnez (vus, vus);
+ CMPNEZH CMPNEZH_U
+ vbi __builtin_vec_cmpnez (vsi, vsi);
+ CMPNEZW CMPNEZW_S
+ vbi __builtin_vec_cmpnez (vui, vui);
+ CMPNEZW CMPNEZW_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNEZ_P, SKIP, __builtin_vec_vcmpnez_p]
+ signed int __builtin_vec_vcmpnez_p (signed int, vsc, vsc);
+ VCMPNEZB_P VCMPNEZB_VSC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vuc, vuc);
+ VCMPNEZB_P VCMPNEZB_VUC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vss, vss);
+ VCMPNEZH_P VCMPNEZH_VSS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vus, vus);
+ VCMPNEZH_P VCMPNEZH_VUS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vsi, vsi);
+ VCMPNEZW_P VCMPNEZW_VSI_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vui, vui);
+ VCMPNEZW_P VCMPNEZW_VUI_P
+
+[VEC_CMPRB, SKIP, __builtin_byte_in_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB
+
+[VEC_CMPRB2, SKIP, __builtin_byte_in_either_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB2
+
+[VEC_CNTLZ, vec_cntlz, __builtin_vec_vclz, _ARCH_PWR8]
+ vsc __builtin_vec_vclz (vsc);
+ VCLZB VCLZB_S
+ vuc __builtin_vec_vclz (vuc);
+ VCLZB VCLZB_U
+ vss __builtin_vec_vclz (vss);
+ VCLZH VCLZH_S
+ vus __builtin_vec_vclz (vus);
+ VCLZH VCLZH_U
+ vsi __builtin_vec_vclz (vsi);
+ VCLZW VCLZW_S
+ vui __builtin_vec_vclz (vui);
+ VCLZW VCLZW_U
+ vsll __builtin_vec_vclz (vsll);
+ VCLZD VCLZD_S
+ vull __builtin_vec_vclz (vull);
+ VCLZD VCLZD_U
+
+[VEC_CNTLZM, vec_cntlzm, __builtin_vec_vclzdm, _ARCH_PWR10]
+ vull __builtin_vec_vclzdm (vull, vull);
+ VCLZDM
+
+[VEC_CNTTZM, vec_cnttzm, __builtin_vec_vctzdm, _ARCH_PWR10]
+ vull __builtin_vec_vctzdm (vull, vull);
+ CNTTZDM
+
+[VEC_CNTLZ_LSBB, vec_cntlz_lsbb, __builtin_vec_vclzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vclzlsbb (vsc);
+ VCLZLSBB_V16QI VCLZLSBB_VSC
+ signed int __builtin_vec_vclzlsbb (vuc);
+ VCLZLSBB_V16QI VCLZLSBB_VUC
+ signed int __builtin_vec_vclzlsbb (vss);
+ VCLZLSBB_V8HI VCLZLSBB_VSS
+ signed int __builtin_vec_vclzlsbb (vus);
+ VCLZLSBB_V8HI VCLZLSBB_VUS
+ signed int __builtin_vec_vclzlsbb (vsi);
+ VCLZLSBB_V4SI VCLZLSBB_VSI
+ signed int __builtin_vec_vclzlsbb (vui);
+ VCLZLSBB_V4SI VCLZLSBB_VUI
+
+[VEC_CNTM, vec_cntm, __builtin_vec_cntm, _ARCH_PWR10]
+ unsigned long long __builtin_vec_cntm (vuc, const int);
+ VCNTMBB
+ unsigned long long __builtin_vec_cntm (vus, const int);
+ VCNTMBH
+ unsigned long long __builtin_vec_cntm (vui, const int);
+ VCNTMBW
+ unsigned long long __builtin_vec_cntm (vull, const int);
+ VCNTMBD
+
+[VEC_CNTTZ, vec_cnttz, __builtin_vec_vctz, _ARCH_PWR9]
+ vsc __builtin_vec_vctz (vsc);
+ VCTZB VCTZB_S
+ vuc __builtin_vec_vctz (vuc);
+ VCTZB VCTZB_U
+ vss __builtin_vec_vctz (vss);
+ VCTZH VCTZH_S
+ vus __builtin_vec_vctz (vus);
+ VCTZH VCTZH_U
+ vsi __builtin_vec_vctz (vsi);
+ VCTZW VCTZW_S
+ vui __builtin_vec_vctz (vui);
+ VCTZW VCTZW_U
+ vsll __builtin_vec_vctz (vsll);
+ VCTZD VCTZD_S
+ vull __builtin_vec_vctz (vull);
+ VCTZD VCTZD_U
+
+[VEC_CNTTZ_LSBB, vec_cnttz_lsbb, __builtin_vec_vctzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vctzlsbb (vsc);
+ VCTZLSBB_V16QI VCTZLSBB_VSC
+ signed int __builtin_vec_vctzlsbb (vuc);
+ VCTZLSBB_V16QI VCTZLSBB_VUC
+ signed int __builtin_vec_vctzlsbb (vss);
+ VCTZLSBB_V8HI VCTZLSBB_VSS
+ signed int __builtin_vec_vctzlsbb (vus);
+ VCTZLSBB_V8HI VCTZLSBB_VUS
+ signed int __builtin_vec_vctzlsbb (vsi);
+ VCTZLSBB_V4SI VCTZLSBB_VSI
+ signed int __builtin_vec_vctzlsbb (vui);
+ VCTZLSBB_V4SI VCTZLSBB_VUI
+
+[VEC_CONVERT_4F32_8I16, SKIP, __builtin_vec_convert_4f32_8i16]
+ vus __builtin_vec_convert_4f32_8i16 (vf, vf);
+ CONVERT_4F32_8I16
+
+[VEC_CONVERT_4F32_8F16, vec_pack_to_short_fp32, __builtin_vec_convert_4f32_8f16, _ARCH_PWR9]
+ vus __builtin_vec_convert_4f32_8f16 (vf, vf);
+ CONVERT_4F32_8F16
+
+[VEC_COPYSIGN, vec_cpsgn, __builtin_vec_copysign]
+ vf __builtin_vec_copysign (vf, vf);
+ CPSGNSP
+ vd __builtin_vec_copysign (vd, vd);
+ CPSGNDP
+
+[VEC_CTF, vec_ctf, __builtin_vec_ctf]
+ vf __builtin_vec_ctf (vsi, const int);
+ VCFSX
+ vf __builtin_vec_ctf (vui, const int);
+ VCFUX
+ vd __builtin_vec_ctf (vsll, const int);
+ XVCVSXDDP_SCALE
+ vd __builtin_vec_ctf (vull, const int);
+ XVCVUXDDP_SCALE
+
+[VEC_CTS, vec_cts, __builtin_vec_cts]
+ vsi __builtin_vec_cts (vf, const int);
+ VCTSXS
+ vsll __builtin_vec_cts (vd, const int);
+ XVCVDPSXDS_SCALE
+
+[VEC_CTU, vec_ctu, __builtin_vec_ctu]
+ vui __builtin_vec_ctu (vf, const int);
+ VCTUXS
+ vull __builtin_vec_ctu (vd, const int);
+ XVCVDPUXDS_SCALE
+
+[VEC_DIV, vec_div, __builtin_vec_div, __VSX__]
+ vsll __builtin_vec_div (vsll, vsll);
+ DIV_V2DI
+ vull __builtin_vec_div (vull, vull);
+ UDIV_V2DI
+ vf __builtin_vec_div (vf, vf);
+ XVDIVSP
+ vd __builtin_vec_div (vd, vd);
+ XVDIVDP
+
+[VEC_DOUBLE, vec_double, __builtin_vec_double]
+ vd __builtin_vec_double (vsll);
+ XVCVSXDDP
+ vd __builtin_vec_double (vull);
+ XVCVUXDDP
+
+[VEC_DOUBLEE, vec_doublee, __builtin_vec_doublee]
+ vd __builtin_vec_doublee (vsi);
+ DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vui);
+ UNS_DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vf);
+ DOUBLEE_V4SF
+
+[VEC_DOUBLEH, vec_doubleh, __builtin_vec_doubleh]
+ vd __builtin_vec_doubleh (vsi);
+ DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vui);
+ UNS_DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vf);
+ DOUBLEH_V4SF
+
+[VEC_DOUBLEL, vec_doublel, __builtin_vec_doublel]
+ vd __builtin_vec_doublel (vsi);
+ DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vui);
+ UNS_DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vf);
+ DOUBLEL_V4SF
+
+[VEC_DOUBLEO, vec_doubleo, __builtin_vec_doubleo]
+ vd __builtin_vec_doubleo (vsi);
+ DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vui);
+ UNS_DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vf);
+ DOUBLEO_V4SF
+
+[VEC_DST, vec_dst, __builtin_vec_dst]
+ void __builtin_vec_dst (unsigned char *, const int, const int);
+ DST DST_UC
+ void __builtin_vec_dst (signed char *, const int, const int);
+ DST DST_SC
+ void __builtin_vec_dst (unsigned short *, const int, const int);
+ DST DST_US
+ void __builtin_vec_dst (signed short *, const int, const int);
+ DST DST_SS
+ void __builtin_vec_dst (unsigned int *, const int, const int);
+ DST DST_UI
+ void __builtin_vec_dst (signed int *, const int, const int);
+ DST DST_SI
+ void __builtin_vec_dst (unsigned long long *, const int, const int);
+ DST DST_ULL
+ void __builtin_vec_dst (signed long long *, const int, const int);
+ DST DST_SLL
+ void __builtin_vec_dst (float *, const int, const int);
+ DST DST_F
+ void __builtin_vec_dst (vuc *, const int, const int);
+ DST DST_VUC
+ void __builtin_vec_dst (vsc *, const int, const int);
+ DST DST_VSC
+ void __builtin_vec_dst (vbc *, const int, const int);
+ DST DST_VBC
+ void __builtin_vec_dst (vus *, const int, const int);
+ DST DST_VUS
+ void __builtin_vec_dst (vss *, const int, const int);
+ DST DST_VSS
+ void __builtin_vec_dst (vbs *, const int, const int);
+ DST DST_VBS
+ void __builtin_vec_dst (vp *, const int, const int);
+ DST DST_VP
+ void __builtin_vec_dst (vui *, const int, const int);
+ DST DST_VUI
+ void __builtin_vec_dst (vsi *, const int, const int);
+ DST DST_VSI
+ void __builtin_vec_dst (vbi *, const int, const int);
+ DST DST_VBI
+ void __builtin_vec_dst (vf *, const int, const int);
+ DST DST_VF
+
+[VEC_DSTST, vec_dstst, __builtin_vec_dstst]
+ void __builtin_vec_dstst (unsigned char *, const int, const int);
+ DSTST DSTST_UC
+ void __builtin_vec_dstst (signed char *, const int, const int);
+ DSTST DSTST_SC
+ void __builtin_vec_dstst (unsigned short *, const int, const int);
+ DSTST DSTST_US
+ void __builtin_vec_dstst (signed short *, const int, const int);
+ DSTST DSTST_SS
+ void __builtin_vec_dstst (unsigned int *, const int, const int);
+ DSTST DSTST_UI
+ void __builtin_vec_dstst (signed int *, const int, const int);
+ DSTST DSTST_SI
+ void __builtin_vec_dstst (unsigned long long *, const int, const int);
+ DSTST DSTST_ULL
+ void __builtin_vec_dstst (signed long long *, const int, const int);
+ DSTST DSTST_SLL
+ void __builtin_vec_dstst (float *, const int, const int);
+ DSTST DSTST_F
+ void __builtin_vec_dstst (vuc *, const int, const int);
+ DSTST DSTST_VUC
+ void __builtin_vec_dstst (vsc *, const int, const int);
+ DSTST DSTST_VSC
+ void __builtin_vec_dstst (vbc *, const int, const int);
+ DSTST DSTST_VBC
+ void __builtin_vec_dstst (vus *, const int, const int);
+ DSTST DSTST_VUS
+ void __builtin_vec_dstst (vss *, const int, const int);
+ DSTST DSTST_VSS
+ void __builtin_vec_dstst (vbs *, const int, const int);
+ DSTST DSTST_VBS
+ void __builtin_vec_dstst (vp *, const int, const int);
+ DSTST DSTST_VP
+ void __builtin_vec_dstst (vui *, const int, const int);
+ DSTST DSTST_VUI
+ void __builtin_vec_dstst (vsi *, const int, const int);
+ DSTST DSTST_VSI
+ void __builtin_vec_dstst (vbi *, const int, const int);
+ DSTST DSTST_VBI
+ void __builtin_vec_dstst (vf *, const int, const int);
+ DSTST DSTST_VF
+
+[VEC_DSTSTT, vec_dststt, __builtin_vec_dststt]
+ void __builtin_vec_dststt (unsigned char *, const int, const int);
+ DSTSTT DSTSTT_UC
+ void __builtin_vec_dststt (signed char *, const int, const int);
+ DSTSTT DSTSTT_SC
+ void __builtin_vec_dststt (unsigned short *, const int, const int);
+ DSTSTT DSTSTT_US
+ void __builtin_vec_dststt (signed short *, const int, const int);
+ DSTSTT DSTSTT_SS
+ void __builtin_vec_dststt (unsigned int *, const int, const int);
+ DSTSTT DSTSTT_UI
+ void __builtin_vec_dststt (signed int *, const int, const int);
+ DSTSTT DSTSTT_SI
+ void __builtin_vec_dststt (unsigned long long *, const int, const int);
+ DSTSTT DSTSTT_ULL
+ void __builtin_vec_dststt (signed long long *, const int, const int);
+ DSTSTT DSTSTT_SLL
+ void __builtin_vec_dststt (float *, const int, const int);
+ DSTSTT DSTSTT_F
+ void __builtin_vec_dststt (vuc *, const int, const int);
+ DSTSTT DSTSTT_VUC
+ void __builtin_vec_dststt (vsc *, const int, const int);
+ DSTSTT DSTSTT_VSC
+ void __builtin_vec_dststt (vbc *, const int, const int);
+ DSTSTT DSTSTT_VBC
+ void __builtin_vec_dststt (vus *, const int, const int);
+ DSTSTT DSTSTT_VUS
+ void __builtin_vec_dststt (vss *, const int, const int);
+ DSTSTT DSTSTT_VSS
+ void __builtin_vec_dststt (vbs *, const int, const int);
+ DSTSTT DSTSTT_VBS
+ void __builtin_vec_dststt (vp *, const int, const int);
+ DSTSTT DSTSTT_VP
+ void __builtin_vec_dststt (vui *, const int, const int);
+ DSTSTT DSTSTT_VUI
+ void __builtin_vec_dststt (vsi *, const int, const int);
+ DSTSTT DSTSTT_VSI
+ void __builtin_vec_dststt (vbi *, const int, const int);
+ DSTSTT DSTSTT_VBI
+ void __builtin_vec_dststt (vf *, const int, const int);
+ DSTSTT DSTSTT_VF
+
+[VEC_DSTT, vec_dstt, __builtin_vec_dstt]
+ void __builtin_vec_dstt (unsigned char *, const int, const int);
+ DSTT DSTT_UC
+ void __builtin_vec_dstt (signed char *, const int, const int);
+ DSTT DSTT_SC
+ void __builtin_vec_dstt (unsigned short *, const int, const int);
+ DSTT DSTT_US
+ void __builtin_vec_dstt (signed short *, const int, const int);
+ DSTT DSTT_SS
+ void __builtin_vec_dstt (unsigned int *, const int, const int);
+ DSTT DSTT_UI
+ void __builtin_vec_dstt (signed int *, const int, const int);
+ DSTT DSTT_SI
+ void __builtin_vec_dstt (unsigned long long *, const int, const int);
+ DSTT DSTT_ULL
+ void __builtin_vec_dstt (signed long long *, const int, const int);
+ DSTT DSTT_SLL
+ void __builtin_vec_dstt (float *, const int, const int);
+ DSTT DSTT_F
+ void __builtin_vec_dstt (vuc *, const int, const int);
+ DSTT DSTT_VUC
+ void __builtin_vec_dstt (vsc *, const int, const int);
+ DSTT DSTT_VSC
+ void __builtin_vec_dstt (vbc *, const int, const int);
+ DSTT DSTT_VBC
+ void __builtin_vec_dstt (vus *, const int, const int);
+ DSTT DSTT_VUS
+ void __builtin_vec_dstt (vss *, const int, const int);
+ DSTT DSTT_VSS
+ void __builtin_vec_dstt (vbs *, const int, const int);
+ DSTT DSTT_VBS
+ void __builtin_vec_dstt (vp *, const int, const int);
+ DSTT DSTT_VP
+ void __builtin_vec_dstt (vui *, const int, const int);
+ DSTT DSTT_VUI
+ void __builtin_vec_dstt (vsi *, const int, const int);
+ DSTT DSTT_VSI
+ void __builtin_vec_dstt (vbi *, const int, const int);
+ DSTT DSTT_VBI
+ void __builtin_vec_dstt (vf *, const int, const int);
+ DSTT DSTT_VF
+
+[VEC_EQV, vec_eqv, __builtin_vec_eqv, _ARCH_PWR8]
+ vsc __builtin_vec_eqv (vsc, vsc);
+ EQV_V16QI
+ vuc __builtin_vec_eqv (vuc, vuc);
+ EQV_V16QI_UNS EQV_V16QI_VUC
+ vbc __builtin_vec_eqv (vbc, vbc);
+ EQV_V16QI_UNS EQV_V16QI_VBC
+ vss __builtin_vec_eqv (vss, vss);
+ EQV_V8HI
+ vus __builtin_vec_eqv (vus, vus);
+ EQV_V8HI_UNS EQV_V8HI_VUS
+ vbs __builtin_vec_eqv (vbs, vbs);
+ EQV_V8HI_UNS EQV_V8HI_VBS
+ vsi __builtin_vec_eqv (vsi, vsi);
+ EQV_V4SI
+ vui __builtin_vec_eqv (vui, vui);
+ EQV_V4SI_UNS EQV_V4SI_VUI
+ vbi __builtin_vec_eqv (vbi, vbi);
+ EQV_V4SI_UNS EQV_V4SI_VBI
+ vsll __builtin_vec_eqv (vsll, vsll);
+ EQV_V2DI
+ vull __builtin_vec_eqv (vull, vull);
+ EQV_V2DI_UNS EQV_V2DI_VULL
+ vbll __builtin_vec_eqv (vbll, vbll);
+ EQV_V2DI_UNS EQV_V2DI_VBLL
+ vf __builtin_vec_eqv (vf, vf);
+ EQV_V4SF
+ vd __builtin_vec_eqv (vd, vd);
+ EQV_V2DF
+
+[VEC_EXPANDM, vec_expandm, __builtin_vec_vexpandm, _ARCH_PWR10]
+ vuc __builtin_vec_vexpandm (vuc);
+ VEXPANDMB
+ vus __builtin_vec_vexpandm (vus);
+ VEXPANDMH
+ vui __builtin_vec_vexpandm (vui);
+ VEXPANDMW
+ vull __builtin_vec_vexpandm (vull);
+ VEXPANDMD
+ vuq __builtin_vec_vexpandm (vuq);
+ VEXPANDMQ
+
+[VEC_EXPTE, vec_expte, __builtin_vec_expte]
+ vf __builtin_vec_expte (vf);
+ VEXPTEFP
+
+[VEC_EXTRACTM, vec_extractm, __builtin_vec_vextractm, _ARCH_PWR10]
+ signed int __builtin_vec_vextractm (vuc);
+ VEXTRACTMB
+ signed int __builtin_vec_vextractm (vus);
+ VEXTRACTMH
+ signed int __builtin_vec_vextractm (vui);
+ VEXTRACTMW
+ signed int __builtin_vec_vextractm (vull);
+ VEXTRACTMD
+ signed int __builtin_vec_vextractm (vuq);
+ VEXTRACTMQ
+
+[VEC_EXTRACT_FP_FROM_SHORTH, vec_extract_fp32_from_shorth, __builtin_vec_vextract_fp_from_shorth, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shorth (vus);
+ VEXTRACT_FP_FROM_SHORTH
+
+[VEC_EXTRACT_FP_FROM_SHORTL, vec_extract_fp32_from_shortl, __builtin_vec_vextract_fp_from_shortl, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shortl (vus);
+ VEXTRACT_FP_FROM_SHORTL
+
+[VEC_EXTRACTH, vec_extracth, __builtin_vec_extracth, _ARCH_PWR10]
+ vull __builtin_vec_extracth (vuc, vuc, unsigned char);
+ VEXTRACTBR
+ vull __builtin_vec_extracth (vus, vus, unsigned char);
+ VEXTRACTHR
+ vull __builtin_vec_extracth (vui, vui, unsigned char);
+ VEXTRACTWR
+ vull __builtin_vec_extracth (vull, vull, unsigned char);
+ VEXTRACTDR
+
+[VEC_EXTRACTL, vec_extractl, __builtin_vec_extractl, _ARCH_PWR10]
+ vull __builtin_vec_extractl (vuc, vuc, unsigned char);
+ VEXTRACTBL
+ vull __builtin_vec_extractl (vus, vus, unsigned char);
+ VEXTRACTHL
+ vull __builtin_vec_extractl (vui, vui, unsigned char);
+ VEXTRACTWL
+ vull __builtin_vec_extractl (vull, vull, unsigned char);
+ VEXTRACTDL
+
+[VEC_EXTRACT4B, vec_extract4b, __builtin_vec_extract4b, _ARCH_PWR9]
+ vull __builtin_vec_extract4b (vuc, const int);
+ EXTRACT4B
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTULX, vec_xlx, __builtin_vec_vextulx, _ARCH_PWR9]
+ signed char __builtin_vec_vextulx (unsigned int, vsc);
+ VEXTUBLX VEXTUBLX_S
+ unsigned char __builtin_vec_vextulx (unsigned int, vuc);
+ VEXTUBLX VEXTUBLX_U
+ signed short __builtin_vec_vextulx (unsigned int, vss);
+ VEXTUHLX VEXTUHLX_S
+ unsigned short __builtin_vec_vextulx (unsigned int, vus);
+ VEXTUHLX VEXTUHLX_U
+ signed int __builtin_vec_vextulx (unsigned int, vsi);
+ VEXTUWLX VEXTUWLX_S
+ unsigned int __builtin_vec_vextulx (unsigned int, vui);
+ VEXTUWLX VEXTUWLX_U
+ float __builtin_vec_vextulx (unsigned int, vf);
+ VEXTUWLX VEXTUWLX_F
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTURX, vec_xrx, __builtin_vec_vexturx, _ARCH_PWR9]
+ signed char __builtin_vec_vexturx (unsigned int, vsc);
+ VEXTUBRX VEXTUBRX_S
+ unsigned char __builtin_vec_vexturx (unsigned int, vuc);
+ VEXTUBRX VEXTUBRX_U
+ signed short __builtin_vec_vexturx (unsigned int, vss);
+ VEXTUHRX VEXTUHRX_S
+ unsigned short __builtin_vec_vexturx (unsigned int, vus);
+ VEXTUHRX VEXTUHRX_U
+ signed int __builtin_vec_vexturx (unsigned int, vsi);
+ VEXTUWRX VEXTUWRX_S
+ unsigned int __builtin_vec_vexturx (unsigned int, vui);
+ VEXTUWRX VEXTUWRX_U
+ float __builtin_vec_vexturx (unsigned int, vf);
+ VEXTUWRX VEXTUWRX_F
+
+[VEC_FIRSTMATCHINDEX, vec_first_match_index, __builtin_vec_first_match_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_index (vsc, vsc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_match_index (vuc, vuc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_match_index (vss, vss);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_match_index (vus, vus);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_match_index (vsi, vsi);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_match_index (vui, vui);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VUI
+
+[VEC_FIRSTMATCHOREOSINDEX, vec_first_match_or_eos_index, __builtin_vec_first_match_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_or_eos_index (vsc, vsc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_match_or_eos_index (vuc, vuc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_match_or_eos_index (vss, vss);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_match_or_eos_index (vus, vus);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_match_or_eos_index (vsi, vsi);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_match_or_eos_index (vui, vui);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VUI
+
+[VEC_FIRSTMISMATCHINDEX, vec_first_mismatch_index, __builtin_vec_first_mismatch_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_index (vsc, vsc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_index (vuc, vuc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_index (vss, vss);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_index (vus, vus);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_index (vsi, vsi);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_index (vui, vui);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VUI
+
+[VEC_FIRSTMISMATCHOREOSINDEX, vec_first_mismatch_or_eos_index, __builtin_vec_first_mismatch_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsc, vsc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vuc, vuc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vss, vss);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vus, vus);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsi, vsi);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vui, vui);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VUI
+
+[VEC_FLOAT, vec_float, __builtin_vec_float]
+ vf __builtin_vec_float (vsi);
+ XVCVSXWSP_V4SF
+ vf __builtin_vec_float (vui);
+ XVCVUXWSP_V4SF
+
+[VEC_FLOAT2, vec_float2, __builtin_vec_float2]
+ vf __builtin_vec_float2 (vsll, vsll);
+ FLOAT2_V2DI
+ vf __builtin_vec_float2 (vull, vull);
+ UNS_FLOAT2_V2DI
+ vf __builtin_vec_float2 (vd, vd);
+ FLOAT2_V2DF
+
+[VEC_FLOATE, vec_floate, __builtin_vec_floate]
+ vf __builtin_vec_floate (vsll);
+ FLOATE_V2DI
+ vf __builtin_vec_floate (vull);
+ UNS_FLOATE_V2DI
+ vf __builtin_vec_floate (vd);
+ FLOATE_V2DF
+
+[VEC_FLOATO, vec_floato, __builtin_vec_floato]
+ vf __builtin_vec_floato (vsll);
+ FLOATO_V2DI
+ vf __builtin_vec_floato (vull);
+ UNS_FLOATO_V2DI
+ vf __builtin_vec_floato (vd);
+ FLOATO_V2DF
+
+[VEC_FLOOR, vec_floor, __builtin_vec_floor]
+ vf __builtin_vec_floor (vf);
+ XVRSPIM
+ vd __builtin_vec_floor (vd);
+ XVRDPIM
+
+[VEC_GB, vec_gb, __builtin_vec_vgbbd, _ARCH_PWR8]
+ vsc __builtin_vec_vgbbd (vsc);
+ VGBBD VGBBD_S
+ vuc __builtin_vec_vgbbd (vuc);
+ VGBBD VGBBD_U
+
+[VEC_GENBM, vec_genbm, __builtin_vec_mtvsrbm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrbm (unsigned long long);
+ MTVSRBM
+
+[VEC_GENHM, vec_genhm, __builtin_vec_mtvsrhm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrhm (unsigned long long);
+ MTVSRHM
+
+[VEC_GENWM, vec_genwm, __builtin_vec_mtvsrwm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrwm (unsigned long long);
+ MTVSRWM
+
+[VEC_GENDM, vec_gendm, __builtin_vec_mtvsrdm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrdm (unsigned long long);
+ MTVSRDM
+
+[VEC_GENQM, vec_genqm, __builtin_vec_mtvsrqm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrqm (unsigned long long);
+ MTVSRQM
+
+[VEC_GENPCVM, vec_genpcvm, __builtin_vec_xxgenpcvm, _ARCH_PWR10]
+ vuc __builtin_vec_xxgenpcvm (vuc, const int);
+ XXGENPCVM_V16QI
+ vus __builtin_vec_xxgenpcvm (vus, const int);
+ XXGENPCVM_V8HI
+ vui __builtin_vec_xxgenpcvm (vui, const int);
+ XXGENPCVM_V4SI
+ vull __builtin_vec_xxgenpcvm (vull, const int);
+ XXGENPCVM_V2DI
+
+[VEC_GNB, vec_gnb, __builtin_vec_gnb, _ARCH_PWR10]
+ vull __builtin_vec_gnb (vuq, unsigned char);
+ VGNB
+
+[VEC_INSERTH, vec_inserth, __builtin_vec_inserth, _ARCH_PWR10]
+ vuc __builtin_vec_inserth (unsigned char, vuc, unsigned int);
+ VINSERTGPRBR
+ vuc __builtin_vec_inserth (vuc, vuc, unsigned int);
+ VINSERTVPRBR
+ vus __builtin_vec_inserth (unsigned short, vus, unsigned int);
+ VINSERTGPRHR
+ vus __builtin_vec_inserth (vus, vus, unsigned int);
+ VINSERTVPRHR
+ vui __builtin_vec_inserth (unsigned int, vui, unsigned int);
+ VINSERTGPRWR
+ vui __builtin_vec_inserth (vui, vui, unsigned int);
+ VINSERTVPRWR
+ vull __builtin_vec_inserth (unsigned long long, vull, unsigned int);
+ VINSERTGPRDR
+
+[VEC_INSERTL, vec_insertl, __builtin_vec_insertl, _ARCH_PWR10]
+ vuc __builtin_vec_insertl (unsigned char, vuc, unsigned int);
+ VINSERTGPRBL
+ vuc __builtin_vec_insertl (vuc, vuc, unsigned int);
+ VINSERTVPRBL
+ vus __builtin_vec_insertl (unsigned short, vus, unsigned int);
+ VINSERTGPRHL
+ vus __builtin_vec_insertl (vus, vus, unsigned int);
+ VINSERTVPRHL
+ vui __builtin_vec_insertl (unsigned int, vui, unsigned int);
+ VINSERTGPRWL
+ vui __builtin_vec_insertl (vui, vui, unsigned int);
+ VINSERTVPRWL
+ vull __builtin_vec_insertl (unsigned long long, vull, unsigned int);
+ VINSERTGPRDL
+
+[VEC_INSERT4B, vec_insert4b, __builtin_vec_insert4b, _ARCH_PWR9]
+ vuc __builtin_vec_insert4b (vsi, vuc, const int);
+ INSERT4B INSERT4B_S
+ vuc __builtin_vec_insert4b (vui, vuc, const int);
+ INSERT4B INSERT4B_U
+
+[VEC_LD, vec_ld, __builtin_vec_ld]
+ vsc __builtin_vec_ld (signed long long, vsc *);
+ LVX_V16QI LVX_V16QI_VSC
+ vsc __builtin_vec_ld (signed long long, signed char *);
+ LVX_V16QI LVX_V16QI_SC
+ vuc __builtin_vec_ld (signed long long, vuc *);
+ LVX_V16QI LVX_V16QI_VUC
+ vuc __builtin_vec_ld (signed long long, unsigned char *);
+ LVX_V16QI LVX_V16QI_UC
+ vbc __builtin_vec_ld (signed long long, vbc *);
+ LVX_V16QI LVX_V16QI_VBC
+ vss __builtin_vec_ld (signed long long, vss *);
+ LVX_V8HI LVX_V8HI_VSS
+ vss __builtin_vec_ld (signed long long, signed short *);
+ LVX_V8HI LVX_V8HI_SS
+ vus __builtin_vec_ld (signed long long, vus *);
+ LVX_V8HI LVX_V8HI_VUS
+ vus __builtin_vec_ld (signed long long, unsigned short *);
+ LVX_V8HI LVX_V8HI_US
+ vbs __builtin_vec_ld (signed long long, vbs *);
+ LVX_V8HI LVX_V8HI_VBS
+ vp __builtin_vec_ld (signed long long, vp *);
+ LVX_V8HI LVX_V8HI_VP
+ vsi __builtin_vec_ld (signed long long, vsi *);
+ LVX_V4SI LVX_V4SI_VSI
+ vsi __builtin_vec_ld (signed long long, signed int *);
+ LVX_V4SI LVX_V4SI_SI
+ vui __builtin_vec_ld (signed long long, vui *);
+ LVX_V4SI LVX_V4SI_VUI
+ vui __builtin_vec_ld (signed long long, unsigned int *);
+ LVX_V4SI LVX_V4SI_UI
+ vbi __builtin_vec_ld (signed long long, vbi *);
+ LVX_V4SI LVX_V4SI_VBI
+ vsll __builtin_vec_ld (signed long long, vsll *);
+ LVX_V2DI LVX_V2DI_VSLL
+ vsll __builtin_vec_ld (signed long long, signed long long *);
+ LVX_V2DI LVX_V2DI_SLL
+ vull __builtin_vec_ld (signed long long, vull *);
+ LVX_V2DI LVX_V2DI_VULL
+ vull __builtin_vec_ld (signed long long, unsigned long long *);
+ LVX_V2DI LVX_V2DI_ULL
+ vbll __builtin_vec_ld (signed long long, vbll *);
+ LVX_V2DI LVX_V2DI_VBLL
+ vsq __builtin_vec_ld (signed long long, const vsq *);
+ LVX_V1TI LVX_V1TI_VSQ
+ vuq __builtin_vec_ld (signed long long, const vuq *);
+ LVX_V1TI LVX_V1TI_VUQ
+ vsq __builtin_vec_ld (signed long long, __int128 *);
+ LVX_V1TI LVX_V1TI_TI
+ vuq __builtin_vec_ld (signed long long, unsigned __int128 *);
+ LVX_V1TI LVX_V1TI_UTI
+ vf __builtin_vec_ld (signed long long, vf *);
+ LVX_V4SF LVX_V4SF_VF
+ vf __builtin_vec_ld (signed long long, float *);
+ LVX_V4SF LVX_V4SF_F
+ vd __builtin_vec_ld (signed long long, vd *);
+ LVX_V2DF LVX_V2DF_VD
+ vd __builtin_vec_ld (signed long long, double *);
+ LVX_V2DF LVX_V2DF_D
+
+[VEC_LDE, vec_lde, __builtin_vec_lde]
+ vsc __builtin_vec_lde (signed long long, signed char *);
+ LVEBX LVEBX_SC
+ vuc __builtin_vec_lde (signed long long, unsigned char *);
+ LVEBX LVEBX_UC
+ vss __builtin_vec_lde (signed long long, signed short *);
+ LVEHX LVEHX_SS
+ vus __builtin_vec_lde (signed long long, unsigned short *);
+ LVEHX LVEHX_US
+ vsi __builtin_vec_lde (signed long long, signed int *);
+ LVEWX LVEWX_SI
+ vui __builtin_vec_lde (signed long long, unsigned int *);
+ LVEWX LVEWX_UI
+ vf __builtin_vec_lde (signed long long, float *);
+ LVEWX LVEWX_F
+
+[VEC_LDL, vec_ldl, __builtin_vec_ldl]
+ vsc __builtin_vec_ldl (signed long long, vsc *);
+ LVXL_V16QI LVXL_V16QI_VSC
+ vsc __builtin_vec_ldl (signed long long, signed char *);
+ LVXL_V16QI LVXL_V16QI_SC
+ vuc __builtin_vec_ldl (signed long long, vuc *);
+ LVXL_V16QI LVXL_V16QI_VUC
+ vuc __builtin_vec_ldl (signed long long, unsigned char *);
+ LVXL_V16QI LVXL_V16QI_UC
+ vbc __builtin_vec_ldl (signed long long, vbc *);
+ LVXL_V16QI LVXL_V16QI_VBC
+ vss __builtin_vec_ldl (signed long long, vss *);
+ LVXL_V8HI LVXL_V8HI_VSS
+ vss __builtin_vec_ldl (signed long long, signed short *);
+ LVXL_V8HI LVXL_V8HI_SS
+ vus __builtin_vec_ldl (signed long long, vus *);
+ LVXL_V8HI LVXL_V8HI_VUS
+ vus __builtin_vec_ldl (signed long long, unsigned short *);
+ LVXL_V8HI LVXL_V8HI_US
+ vbs __builtin_vec_ldl (signed long long, vbs *);
+ LVXL_V8HI LVXL_V8HI_VBS
+ vp __builtin_vec_ldl (signed long long, vp *);
+ LVXL_V8HI LVXL_V8HI_VP
+ vsi __builtin_vec_ldl (signed long long, vsi *);
+ LVXL_V4SI LVXL_V4SI_VSI
+ vsi __builtin_vec_ldl (signed long long, signed int *);
+ LVXL_V4SI LVXL_V4SI_SI
+ vui __builtin_vec_ldl (signed long long, vui *);
+ LVXL_V4SI LVXL_V4SI_VUI
+ vui __builtin_vec_ldl (signed long long, unsigned int *);
+ LVXL_V4SI LVXL_V4SI_UI
+ vbi __builtin_vec_ldl (signed long long, vbi *);
+ LVXL_V4SI LVXL_V4SI_VBI
+ vsll __builtin_vec_ldl (signed long long, vsll *);
+ LVXL_V2DI LVXL_V2DI_VSLL
+ vsll __builtin_vec_ldl (signed long long, signed long long *);
+ LVXL_V2DI LVXL_V2DI_SLL
+ vull __builtin_vec_ldl (signed long long, vull *);
+ LVXL_V2DI LVXL_V2DI_VULL
+ vull __builtin_vec_ldl (signed long long, unsigned long long *);
+ LVXL_V2DI LVXL_V2DI_ULL
+ vbll __builtin_vec_ldl (signed long long, vbll *);
+ LVXL_V2DI LVXL_V2DI_VBLL
+ vsq __builtin_vec_ldl (signed long long, const vsq *);
+ LVXL_V1TI LVXL_V1TI_VSQ
+ vuq __builtin_vec_ldl (signed long long, const vuq *);
+ LVXL_V1TI LVXL_V1TI_VUQ
+ vsq __builtin_vec_ldl (signed long long, __int128 *);
+ LVXL_V1TI LVXL_V1TI_TI
+ vuq __builtin_vec_ldl (signed long long, unsigned __int128 *);
+ LVXL_V1TI LVXL_V1TI_UTI
+ vf __builtin_vec_ldl (signed long long, vf *);
+ LVXL_V4SF LVXL_V4SF_VF
+ vf __builtin_vec_ldl (signed long long, float *);
+ LVXL_V4SF LVXL_V4SF_F
+ vd __builtin_vec_ldl (signed long long, vd *);
+ LVXL_V2DF LVXL_V2DF_VD
+ vd __builtin_vec_ldl (signed long long, double *);
+ LVXL_V2DF LVXL_V2DF_D
+
+[VEC_LOGE, vec_loge, __builtin_vec_loge]
+ vf __builtin_vec_loge (vf);
+ VLOGEFP
+
+[VEC_LVLX, vec_lvlx, __builtin_vec_lvlx, __PPU__]
+ vbc __builtin_vec_lvlx (signed long long, vbc *);
+ LVLX LVLX_VBC
+ vsc __builtin_vec_lvlx (signed long long, vsc *);
+ LVLX LVLX_VSC
+ vsc __builtin_vec_lvlx (signed long long, signed char *);
+ LVLX LVLX_SC
+ vuc __builtin_vec_lvlx (signed long long, vuc *);
+ LVLX LVLX_VUC
+ vuc __builtin_vec_lvlx (signed long long, unsigned char *);
+ LVLX LVLX_UC
+ vbs __builtin_vec_lvlx (signed long long, vbs *);
+ LVLX LVLX_VBS
+ vss __builtin_vec_lvlx (signed long long, vss *);
+ LVLX LVLX_VSS
+ vss __builtin_vec_lvlx (signed long long, signed short *);
+ LVLX LVLX_SS
+ vus __builtin_vec_lvlx (signed long long, vus *);
+ LVLX LVLX_VUS
+ vus __builtin_vec_lvlx (signed long long, unsigned short *);
+ LVLX LVLX_US
+ vp __builtin_vec_lvlx (signed long long, vp *);
+ LVLX LVLX_VP
+ vbi __builtin_vec_lvlx (signed long long, vbi *);
+ LVLX LVLX_VBI
+ vsi __builtin_vec_lvlx (signed long long, vsi *);
+ LVLX LVLX_VSI
+ vsi __builtin_vec_lvlx (signed long long, signed int *);
+ LVLX LVLX_SI
+ vui __builtin_vec_lvlx (signed long long, vui *);
+ LVLX LVLX_VUI
+ vui __builtin_vec_lvlx (signed long long, unsigned int *);
+ LVLX LVLX_UI
+ vf __builtin_vec_lvlx (signed long long, vf *);
+ LVLX LVLX_VF
+ vf __builtin_vec_lvlx (signed long long, float *);
+ LVLX LVLX_F
+
+[VEC_LVLXL, vec_lvlxl, __builtin_vec_lvlxl, __PPU__]
+ vbc __builtin_vec_lvlxl (signed long long, vbc *);
+ LVLXL LVLXL_VBC
+ vsc __builtin_vec_lvlxl (signed long long, vsc *);
+ LVLXL LVLXL_VSC
+ vsc __builtin_vec_lvlxl (signed long long, signed char *);
+ LVLXL LVLXL_SC
+ vuc __builtin_vec_lvlxl (signed long long, vuc *);
+ LVLXL LVLXL_VUC
+ vuc __builtin_vec_lvlxl (signed long long, unsigned char *);
+ LVLXL LVLXL_UC
+ vbs __builtin_vec_lvlxl (signed long long, vbs *);
+ LVLXL LVLXL_VBS
+ vss __builtin_vec_lvlxl (signed long long, vss *);
+ LVLXL LVLXL_VSS
+ vss __builtin_vec_lvlxl (signed long long, signed short *);
+ LVLXL LVLXL_SS
+ vus __builtin_vec_lvlxl (signed long long, vus *);
+ LVLXL LVLXL_VUS
+ vus __builtin_vec_lvlxl (signed long long, unsigned short *);
+ LVLXL LVLXL_US
+ vp __builtin_vec_lvlxl (signed long long, vp *);
+ LVLXL LVLXL_VP
+ vbi __builtin_vec_lvlxl (signed long long, vbi *);
+ LVLXL LVLXL_VBI
+ vsi __builtin_vec_lvlxl (signed long long, vsi *);
+ LVLXL LVLXL_VSI
+ vsi __builtin_vec_lvlxl (signed long long, signed int *);
+ LVLXL LVLXL_SI
+ vui __builtin_vec_lvlxl (signed long long, vui *);
+ LVLXL LVLXL_VUI
+ vui __builtin_vec_lvlxl (signed long long, unsigned int *);
+ LVLXL LVLXL_UI
+ vf __builtin_vec_lvlxl (signed long long, vf *);
+ LVLXL LVLXL_VF
+ vf __builtin_vec_lvlxl (signed long long, float *);
+ LVLXL LVLXL_F
+
+[VEC_LVRX, vec_lvrx, __builtin_vec_lvrx, __PPU__]
+ vbc __builtin_vec_lvrx (signed long long, vbc *);
+ LVRX LVRX_VBC
+ vsc __builtin_vec_lvrx (signed long long, vsc *);
+ LVRX LVRX_VSC
+ vsc __builtin_vec_lvrx (signed long long, signed char *);
+ LVRX LVRX_SC
+ vuc __builtin_vec_lvrx (signed long long, vuc *);
+ LVRX LVRX_VUC
+ vuc __builtin_vec_lvrx (signed long long, unsigned char *);
+ LVRX LVRX_UC
+ vbs __builtin_vec_lvrx (signed long long, vbs *);
+ LVRX LVRX_VBS
+ vss __builtin_vec_lvrx (signed long long, vss *);
+ LVRX LVRX_VSS
+ vss __builtin_vec_lvrx (signed long long, signed short *);
+ LVRX LVRX_SS
+ vus __builtin_vec_lvrx (signed long long, vus *);
+ LVRX LVRX_VUS
+ vus __builtin_vec_lvrx (signed long long, unsigned short *);
+ LVRX LVRX_US
+ vp __builtin_vec_lvrx (signed long long, vp *);
+ LVRX LVRX_VP
+ vbi __builtin_vec_lvrx (signed long long, vbi *);
+ LVRX LVRX_VBI
+ vsi __builtin_vec_lvrx (signed long long, vsi *);
+ LVRX LVRX_VSI
+ vsi __builtin_vec_lvrx (signed long long, signed int *);
+ LVRX LVRX_SI
+ vui __builtin_vec_lvrx (signed long long, vui *);
+ LVRX LVRX_VUI
+ vui __builtin_vec_lvrx (signed long long, unsigned int *);
+ LVRX LVRX_UI
+ vf __builtin_vec_lvrx (signed long long, vf *);
+ LVRX LVRX_VF
+ vf __builtin_vec_lvrx (signed long long, float *);
+ LVRX LVRX_F
+
+[VEC_LVRXL, vec_lvrxl, __builtin_vec_lvrxl, __PPU__]
+ vbc __builtin_vec_lvrxl (signed long long, vbc *);
+ LVRXL LVRXL_VBC
+ vsc __builtin_vec_lvrxl (signed long long, vsc *);
+ LVRXL LVRXL_VSC
+ vsc __builtin_vec_lvrxl (signed long long, signed char *);
+ LVRXL LVRXL_SC
+ vuc __builtin_vec_lvrxl (signed long long, vuc *);
+ LVRXL LVRXL_VUC
+ vuc __builtin_vec_lvrxl (signed long long, unsigned char *);
+ LVRXL LVRXL_UC
+ vbs __builtin_vec_lvrxl (signed long long, vbs *);
+ LVRXL LVRXL_VBS
+ vss __builtin_vec_lvrxl (signed long long, vss *);
+ LVRXL LVRXL_VSS
+ vss __builtin_vec_lvrxl (signed long long, signed short *);
+ LVRXL LVRXL_SS
+ vus __builtin_vec_lvrxl (signed long long, vus *);
+ LVRXL LVRXL_VUS
+ vus __builtin_vec_lvrxl (signed long long, unsigned short *);
+ LVRXL LVRXL_US
+ vp __builtin_vec_lvrxl (signed long long, vp *);
+ LVRXL LVRXL_VP
+ vbi __builtin_vec_lvrxl (signed long long, vbi *);
+ LVRXL LVRXL_VBI
+ vsi __builtin_vec_lvrxl (signed long long, vsi *);
+ LVRXL LVRXL_VSI
+ vsi __builtin_vec_lvrxl (signed long long, signed int *);
+ LVRXL LVRXL_SI
+ vui __builtin_vec_lvrxl (signed long long, vui *);
+ LVRXL LVRXL_VUI
+ vui __builtin_vec_lvrxl (signed long long, unsigned int *);
+ LVRXL LVRXL_UI
+ vf __builtin_vec_lvrxl (signed long long, vf *);
+ LVRXL LVRXL_VF
+ vf __builtin_vec_lvrxl (signed long long, float *);
+ LVRXL LVRXL_F
+
+[VEC_LVSL, vec_lvsl, __builtin_vec_lvsl]
+ vuc __builtin_vec_lvsl (signed long long, unsigned char *);
+ LVSL LVSL_UC
+ vuc __builtin_vec_lvsl (signed long long, signed char *);
+ LVSL LVSL_SC
+
+[VEC_LVSR, vec_lvsr, __builtin_vec_lvsr]
+ vuc __builtin_vec_lvsr (signed long long, unsigned char *);
+ LVSR LVSR_UC
+ vuc __builtin_vec_lvsr (signed long long, signed char *);
+ LVSR LVSR_SC
+
+[VEC_LXVL, vec_xl_len, __builtin_vec_lxvl, _ARCH_PPC64_PWR9]
+ vsc __builtin_vec_lxvl (signed char *, unsigned long long);
+ LXVL LXVL_VSC
+ vuc __builtin_vec_lxvl (unsigned char *, unsigned long long);
+ LXVL LXVL_VUC
+ vss __builtin_vec_lxvl (signed short *, unsigned long long);
+ LXVL LXVL_VSS
+ vus __builtin_vec_lxvl (unsigned short *, unsigned long long);
+ LXVL LXVL_VUS
+ vsi __builtin_vec_lxvl (signed int *, unsigned long long);
+ LXVL LXVL_VSI
+ vui __builtin_vec_lxvl (unsigned int *, unsigned long long);
+ LXVL LXVL_VUI
+ vsll __builtin_vec_lxvl (signed long long *, unsigned long long);
+ LXVL LXVL_VSLL
+ vull __builtin_vec_lxvl (unsigned long long *, unsigned long long);
+ LXVL LXVL_VULL
+ vsq __builtin_vec_lxvl (signed __int128 *, unsigned long long);
+ LXVL LXVL_VSQ
+ vuq __builtin_vec_lxvl (unsigned __int128 *, unsigned long long);
+ LXVL LXVL_VUQ
+ vf __builtin_vec_lxvl (float *, unsigned long long);
+ LXVL LXVL_VF
+ vd __builtin_vec_lxvl (double *, unsigned long long);
+ LXVL LXVL_VD
+
+[VEC_MADD, vec_madd, __builtin_vec_madd]
+ vss __builtin_vec_madd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS
+ vss __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS
+ vss __builtin_vec_madd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS
+ vus __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS
+ vf __builtin_vec_madd (vf, vf, vf);
+ XVMADDSP
+ vd __builtin_vec_madd (vd, vd, vd);
+ XVMADDDP
+
+[VEC_MADDS, vec_madds, __builtin_vec_madds]
+ vss __builtin_vec_madds (vss, vss, vss);
+ VMHADDSHS
+
+[VEC_MAX, vec_max, __builtin_vec_max]
+ vsc __builtin_vec_max (vsc, vsc);
+ VMAXSB
+ vuc __builtin_vec_max (vuc, vuc);
+ VMAXUB
+ vss __builtin_vec_max (vss, vss);
+ VMAXSH
+ vus __builtin_vec_max (vus, vus);
+ VMAXUH
+ vsi __builtin_vec_max (vsi, vsi);
+ VMAXSW
+ vui __builtin_vec_max (vui, vui);
+ VMAXUW
+ vsll __builtin_vec_max (vsll, vsll);
+ VMAXSD
+ vull __builtin_vec_max (vull, vull);
+ VMAXUD
+ vf __builtin_vec_max (vf, vf);
+ XVMAXSP
+ vd __builtin_vec_max (vd, vd);
+ XVMAXDP
+
+[VEC_MERGEE, vec_mergee, __builtin_vec_vmrgew]
+ vsi __builtin_vec_vmrgew (vsi, vsi);
+ VMRGEW_V4SI VMRGEW_VSI
+ vui __builtin_vec_vmrgew (vui, vui);
+ VMRGEW_V4SI VMRGEW_VUI
+ vbi __builtin_vec_vmrgew (vbi, vbi);
+ VMRGEW_V4SI VMRGEW_VBI
+ vsll __builtin_vec_vmrgew (vsll, vsll);
+ VMRGEW_V2DI VMRGEW_VSLL
+ vull __builtin_vec_vmrgew (vull, vull);
+ VMRGEW_V2DI VMRGEW_VULL
+ vbll __builtin_vec_vmrgew (vbll, vbll);
+ VMRGEW_V2DI VMRGEW_VBLL
+ vf __builtin_vec_vmrgew (vf, vf);
+ VMRGEW_V4SF
+ vd __builtin_vec_vmrgew (vd, vd);
+ VMRGEW_V2DF
+
+[VEC_MERGEH, vec_mergeh, __builtin_vec_mergeh]
+ vbc __builtin_vec_mergeh (vbc, vbc);
+ VMRGHB VMRGHB_VBC
+ vsc __builtin_vec_mergeh (vsc, vsc);
+ VMRGHB VMRGHB_VSC
+ vuc __builtin_vec_mergeh (vuc, vuc);
+ VMRGHB VMRGHB_VUC
+ vbs __builtin_vec_mergeh (vbs, vbs);
+ VMRGHH VMRGHH_VBS
+ vss __builtin_vec_mergeh (vss, vss);
+ VMRGHH VMRGHH_VSS
+ vus __builtin_vec_mergeh (vus, vus);
+ VMRGHH VMRGHH_VUS
+ vp __builtin_vec_mergeh (vp, vp);
+ VMRGHH VMRGHH_VP
+ vbi __builtin_vec_mergeh (vbi, vbi);
+ VMRGHW VMRGHW_VBI
+ vsi __builtin_vec_mergeh (vsi, vsi);
+ VMRGHW VMRGHW_VSI
+ vui __builtin_vec_mergeh (vui, vui);
+ VMRGHW VMRGHW_VUI
+ vbll __builtin_vec_mergeh (vbll, vbll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VBLL
+ vsll __builtin_vec_mergeh (vsll, vsll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VSLL
+ vull __builtin_vec_mergeh (vull, vull);
+ VEC_MERGEH_V2DI VEC_MERGEH_VULL
+ vf __builtin_vec_mergeh (vf, vf);
+ VMRGHW VMRGHW_VF
+ vd __builtin_vec_mergeh (vd, vd);
+ VEC_MERGEH_V2DF
+
+[VEC_MERGEL, vec_mergel, __builtin_vec_mergel]
+ vbc __builtin_vec_mergel (vbc, vbc);
+ VMRGLB VMRGLB_VBC
+ vsc __builtin_vec_mergel (vsc, vsc);
+ VMRGLB VMRGLB_VSC
+ vuc __builtin_vec_mergel (vuc, vuc);
+ VMRGLB VMRGLB_VUC
+ vbs __builtin_vec_mergel (vbs, vbs);
+ VMRGLH VMRGLH_VBS
+ vss __builtin_vec_mergel (vss, vss);
+ VMRGLH VMRGLH_VSS
+ vus __builtin_vec_mergel (vus, vus);
+ VMRGLH VMRGLH_VUS
+ vp __builtin_vec_mergel (vp, vp);
+ VMRGLH VMRGLH_VP
+ vbi __builtin_vec_mergel (vbi, vbi);
+ VMRGLW VMRGLW_VBI
+ vsi __builtin_vec_mergel (vsi, vsi);
+ VMRGLW VMRGLW_VSI
+ vui __builtin_vec_mergel (vui, vui);
+ VMRGLW VMRGLW_VUI
+ vbll __builtin_vec_mergel (vbll, vbll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VBLL
+ vsll __builtin_vec_mergel (vsll, vsll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VSLL
+ vull __builtin_vec_mergel (vull, vull);
+ VEC_MERGEL_V2DI VEC_MERGEL_VULL
+ vf __builtin_vec_mergel (vf, vf);
+ VMRGLW VMRGLW_VF
+ vd __builtin_vec_mergel (vd, vd);
+ VEC_MERGEL_V2DF
+
+[VEC_MERGEO, vec_mergeo, __builtin_vec_vmrgow]
+ vsi __builtin_vec_vmrgow (vsi, vsi);
+ VMRGOW_V4SI VMRGOW_VSI
+ vui __builtin_vec_vmrgow (vui, vui);
+ VMRGOW_V4SI VMRGOW_VUI
+ vbi __builtin_vec_vmrgow (vbi, vbi);
+ VMRGOW_V4SI VMRGOW_VBI
+ vsll __builtin_vec_vmrgow (vsll, vsll);
+ VMRGOW_V2DI VMRGOW_VSLL
+ vull __builtin_vec_vmrgow (vull, vull);
+ VMRGOW_V2DI VMRGOW_VULL
+ vbll __builtin_vec_vmrgow (vbll, vbll);
+ VMRGOW_V2DI VMRGOW_VBLL
+ vf __builtin_vec_vmrgow (vf, vf);
+ VMRGOW_V4SF
+ vd __builtin_vec_vmrgow (vd, vd);
+ VMRGOW_V2DF
+
+[VEC_MIN, vec_min, __builtin_vec_min]
+ vsc __builtin_vec_min (vsc, vsc);
+ VMINSB
+ vuc __builtin_vec_min (vuc, vuc);
+ VMINUB
+ vss __builtin_vec_min (vss, vss);
+ VMINSH
+ vus __builtin_vec_min (vus, vus);
+ VMINUH
+ vsi __builtin_vec_min (vsi, vsi);
+ VMINSW
+ vui __builtin_vec_min (vui, vui);
+ VMINUW
+ vsll __builtin_vec_min (vsll, vsll);
+ VMINSD
+ vull __builtin_vec_min (vull, vull);
+ VMINUD
+ vf __builtin_vec_min (vf, vf);
+ XVMINSP
+ vd __builtin_vec_min (vd, vd);
+ XVMINDP
+
+[VEC_MLADD, vec_mladd, __builtin_vec_mladd]
+ vss __builtin_vec_mladd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS2
+ vss __builtin_vec_mladd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS2
+ vss __builtin_vec_mladd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS2
+ vus __builtin_vec_mladd (vus, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS2
+
+[VEC_MRADDS, vec_mradds, __builtin_vec_mradds]
+ vss __builtin_vec_mradds (vss, vss, vss);
+ VMHRADDSHS
+
+[VEC_MSUB, vec_msub, __builtin_vec_msub, __VSX__]
+ vf __builtin_vec_msub (vf, vf, vf);
+ XVMSUBSP
+ vd __builtin_vec_msub (vd, vd, vd);
+ XVMSUBDP
+
+[VEC_MSUM, vec_msum, __builtin_vec_msum]
+ vui __builtin_vec_msum (vuc, vuc, vui);
+ VMSUMUBM
+ vsi __builtin_vec_msum (vsc, vuc, vsi);
+ VMSUMMBM
+ vui __builtin_vec_msum (vus, vus, vui);
+ VMSUMUHM
+ vsi __builtin_vec_msum (vss, vss, vsi);
+ VMSUMSHM
+ vsq __builtin_vec_msum (vsll, vsll, vsq);
+ VMSUMUDM VMSUMUDM_S
+ vuq __builtin_vec_msum (vull, vull, vuq);
+ VMSUMUDM VMSUMUDM_U
+
+[VEC_MSUMS, vec_msums, __builtin_vec_msums]
+ vui __builtin_vec_msums (vus, vus, vui);
+ VMSUMUHS
+ vsi __builtin_vec_msums (vss, vss, vsi);
+ VMSUMSHS
+
+[VEC_MTVSCR, vec_mtvscr, __builtin_vec_mtvscr]
+ void __builtin_vec_mtvscr (vbc);
+ MTVSCR MTVSCR_VBC
+ void __builtin_vec_mtvscr (vsc);
+ MTVSCR MTVSCR_VSC
+ void __builtin_vec_mtvscr (vuc);
+ MTVSCR MTVSCR_VUC
+ void __builtin_vec_mtvscr (vbs);
+ MTVSCR MTVSCR_VBS
+ void __builtin_vec_mtvscr (vss);
+ MTVSCR MTVSCR_VSS
+ void __builtin_vec_mtvscr (vus);
+ MTVSCR MTVSCR_VUS
+ void __builtin_vec_mtvscr (vp);
+ MTVSCR MTVSCR_VP
+ void __builtin_vec_mtvscr (vbi);
+ MTVSCR MTVSCR_VBI
+ void __builtin_vec_mtvscr (vsi);
+ MTVSCR MTVSCR_VSI
+ void __builtin_vec_mtvscr (vui);
+ MTVSCR MTVSCR_VUI
+
+; Note that there is no entry for VEC_MUL. See rs6000-c.c:
+; altivec_resolve_overloaded_builtin, where there is special-case
+; code for VEC_MUL. TODO: Is this really necessary? Investigate.
+
+[VEC_MULE, vec_mule, __builtin_vec_mule]
+ vss __builtin_vec_mule (vsc, vsc);
+ VMULESB
+ vus __builtin_vec_mule (vuc, vuc);
+ VMULEUB
+ vsi __builtin_vec_mule (vss, vss);
+ VMULESH
+ vui __builtin_vec_mule (vus, vus);
+ VMULEUH
+ vsll __builtin_vec_mule (vsi, vsi);
+ VMULESW
+ vull __builtin_vec_mule (vui, vui);
+ VMULEUW
+
+[VEC_MULO, vec_mulo, __builtin_vec_mulo]
+ vss __builtin_vec_mulo (vsc, vsc);
+ VMULOSB
+ vus __builtin_vec_mulo (vuc, vuc);
+ VMULOUB
+ vsi __builtin_vec_mulo (vss, vss);
+ VMULOSH
+ vui __builtin_vec_mulo (vus, vus);
+ VMULOUH
+ vsll __builtin_vec_mulo (vsi, vsi);
+ VMULOSW
+ vull __builtin_vec_mulo (vui, vui);
+ VMULOUW
+
+[VEC_NABS, vec_nabs, __builtin_vec_nabs]
+ vsc __builtin_vec_nabs (vsc);
+ NABS_V16QI
+ vss __builtin_vec_nabs (vss);
+ NABS_V8HI
+ vsi __builtin_vec_nabs (vsi);
+ NABS_V4SI
+ vsll __builtin_vec_nabs (vsll);
+ NABS_V2DI
+ vf __builtin_vec_nabs (vf);
+ NABS_V4SF
+ vd __builtin_vec_nabs (vd);
+ NABS_V2DF
+
+[VEC_NAND, vec_nand, __builtin_vec_nand, _ARCH_PWR8]
+ vsc __builtin_vec_nand (vsc, vsc);
+ NAND_V16QI
+ vuc __builtin_vec_nand (vuc, vuc);
+ NAND_V16QI_UNS NAND_VUC
+ vbc __builtin_vec_nand (vbc, vbc);
+ NAND_V16QI_UNS NAND_VBC
+ vss __builtin_vec_nand (vss, vss);
+ NAND_V8HI
+ vus __builtin_vec_nand (vus, vus);
+ NAND_V8HI_UNS NAND_VUS
+ vbs __builtin_vec_nand (vbs, vbs);
+ NAND_V8HI_UNS NAND_VBS
+ vsi __builtin_vec_nand (vsi, vsi);
+ NAND_V4SI
+ vui __builtin_vec_nand (vui, vui);
+ NAND_V4SI_UNS NAND_VUI
+ vbi __builtin_vec_nand (vbi, vbi);
+ NAND_V4SI_UNS NAND_VBI
+ vsll __builtin_vec_nand (vsll, vsll);
+ NAND_V2DI
+ vull __builtin_vec_nand (vull, vull);
+ NAND_V2DI_UNS NAND_VULL
+ vbll __builtin_vec_nand (vbll, vbll);
+ NAND_V2DI_UNS NAND_VBLL
+ vf __builtin_vec_nand (vf, vf);
+ NAND_V4SF
+ vd __builtin_vec_nand (vd, vd);
+ NAND_V2DF
+
+[VEC_NCIPHER_BE, vec_ncipher_be, __builtin_vec_vncipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipher_be (vuc, vuc);
+ VNCIPHER_BE
+
+[VEC_NCIPHERLAST_BE, vec_ncipherlast_be, __builtin_vec_vncipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipherlast_be (vuc, vuc);
+ VNCIPHERLAST_BE
+
+[VEC_NEARBYINT, vec_nearbyint, __builtin_vec_nearbyint, __VSX__]
+ vf __builtin_vec_nearbyint (vf);
+ XVRSPI XVRSPI_NBI
+ vd __builtin_vec_nearbyint (vd);
+ XVRDPI XVRDPI_NBI
+
+[VEC_NEG, vec_neg, __builtin_vec_neg]
+ vsc __builtin_vec_neg (vsc);
+ NEG_V16QI
+ vss __builtin_vec_neg (vss);
+ NEG_V8HI
+ vsi __builtin_vec_neg (vsi);
+ NEG_V4SI
+ vsll __builtin_vec_neg (vsll);
+ NEG_V2DI
+ vf __builtin_vec_neg (vf);
+ NEG_V4SF
+ vd __builtin_vec_neg (vd);
+ NEG_V2DF
+
+[VEC_NMADD, vec_nmadd, __builtin_vec_nmadd, __VSX__]
+ vf __builtin_vec_nmadd (vf, vf, vf);
+ XVNMADDSP
+ vd __builtin_vec_nmadd (vd, vd, vd);
+ XVNMADDDP
+
+[VEC_NMSUB, vec_nmsub, __builtin_vec_nmsub]
+ vf __builtin_vec_nmsub (vf, vf, vf);
+ XVNMSUBSP
+ vd __builtin_vec_nmsub (vd, vd, vd);
+ XVNMSUBDP
+
+[VEC_NOR, vec_nor, __builtin_vec_nor]
+ vsc __builtin_vec_nor (vsc, vsc);
+ VNOR_V16QI
+ vuc __builtin_vec_nor (vuc, vuc);
+ VNOR_V16QI_UNS VNOR_V16QI_U
+ vbc __builtin_vec_nor (vbc, vbc);
+ VNOR_V16QI_UNS VNOR_V16QI_B
+ vss __builtin_vec_nor (vss, vss);
+ VNOR_V8HI
+ vus __builtin_vec_nor (vus, vus);
+ VNOR_V8HI_UNS VNOR_V8HI_U
+ vbs __builtin_vec_nor (vbs, vbs);
+ VNOR_V8HI_UNS VNOR_V8HI_B
+ vsi __builtin_vec_nor (vsi, vsi);
+ VNOR_V4SI
+ vui __builtin_vec_nor (vui, vui);
+ VNOR_V4SI_UNS VNOR_V4SI_U
+ vbi __builtin_vec_nor (vbi, vbi);
+ VNOR_V4SI_UNS VNOR_V4SI_B
+ vsll __builtin_vec_nor (vsll, vsll);
+ VNOR_V2DI
+ vull __builtin_vec_nor (vull, vull);
+ VNOR_V2DI_UNS VNOR_V2DI_U
+ vbll __builtin_vec_nor (vbll, vbll);
+ VNOR_V2DI_UNS VNOR_V2DI_B
+ vf __builtin_vec_nor (vf, vf);
+ VNOR_V4SF
+ vd __builtin_vec_nor (vd, vd);
+ VNOR_V2DF
+
+[VEC_OR, vec_or, __builtin_vec_or]
+ vsc __builtin_vec_or (vsc, vsc);
+ VOR_V16QI
+ vuc __builtin_vec_or (vuc, vuc);
+ VOR_V16QI_UNS VOR_V16QI_U
+ vbc __builtin_vec_or (vbc, vbc);
+ VOR_V16QI_UNS VOR_V16QI_B
+ vss __builtin_vec_or (vss, vss);
+ VOR_V8HI
+ vus __builtin_vec_or (vus, vus);
+ VOR_V8HI_UNS VOR_V8HI_U
+ vbs __builtin_vec_or (vbs, vbs);
+ VOR_V8HI_UNS VOR_V8HI_B
+ vsi __builtin_vec_or (vsi, vsi);
+ VOR_V4SI
+ vui __builtin_vec_or (vui, vui);
+ VOR_V4SI_UNS VOR_V4SI_U
+ vbi __builtin_vec_or (vbi, vbi);
+ VOR_V4SI_UNS VOR_V4SI_B
+ vsll __builtin_vec_or (vsll, vsll);
+ VOR_V2DI
+ vull __builtin_vec_or (vull, vull);
+ VOR_V2DI_UNS VOR_V2DI_U
+ vbll __builtin_vec_or (vbll, vbll);
+ VOR_V2DI_UNS VOR_V2DI_B
+ vf __builtin_vec_or (vf, vf);
+ VOR_V4SF
+ vd __builtin_vec_or (vd, vd);
+ VOR_V2DF
+
+[VEC_ORC, vec_orc, __builtin_vec_orc, _ARCH_PWR8]
+ vsc __builtin_vec_orc (vsc, vsc);
+ ORC_V16QI
+ vuc __builtin_vec_orc (vuc, vuc);
+ ORC_V16QI_UNS ORC_VUC
+ vbc __builtin_vec_orc (vbc, vbc);
+ ORC_V16QI_UNS ORC_VBC
+ vss __builtin_vec_orc (vss, vss);
+ ORC_V8HI
+ vus __builtin_vec_orc (vus, vus);
+ ORC_V8HI_UNS ORC_VUS
+ vbs __builtin_vec_orc (vbs, vbs);
+ ORC_V8HI_UNS ORC_VBS
+ vsi __builtin_vec_orc (vsi, vsi);
+ ORC_V4SI
+ vui __builtin_vec_orc (vui, vui);
+ ORC_V4SI_UNS ORC_VUI
+ vbi __builtin_vec_orc (vbi, vbi);
+ ORC_V4SI_UNS ORC_VBI
+ vsll __builtin_vec_orc (vsll, vsll);
+ ORC_V2DI
+ vull __builtin_vec_orc (vull, vull);
+ ORC_V2DI_UNS ORC_VULL
+ vbll __builtin_vec_orc (vbll, vbll);
+ ORC_V2DI_UNS ORC_VBLL
+ vf __builtin_vec_orc (vf, vf);
+ ORC_V4SF
+ vd __builtin_vec_orc (vd, vd);
+ ORC_V2DF
+
+[VEC_PACK, vec_pack, __builtin_vec_pack]
+ vsc __builtin_vec_pack (vss, vss);
+ VPKUHUM VPKUHUM_VSS
+ vuc __builtin_vec_pack (vus, vus);
+ VPKUHUM VPKUHUM_VUS
+ vbc __builtin_vec_pack (vbs, vbs);
+ VPKUHUM VPKUHUM_VBS
+ vss __builtin_vec_pack (vsi, vsi);
+ VPKUWUM VPKUWUM_VSI
+ vus __builtin_vec_pack (vui, vui);
+ VPKUWUM VPKUWUM_VUI
+ vbs __builtin_vec_pack (vbi, vbi);
+ VPKUWUM VPKUWUM_VBI
+ vsi __builtin_vec_pack (vsll, vsll);
+ VPKUDUM VPKUDUM_VSLL
+ vui __builtin_vec_pack (vull, vull);
+ VPKUDUM VPKUDUM_VULL
+ vbi __builtin_vec_pack (vbll, vbll);
+ VPKUDUM VPKUDUM_VBLL
+ vf __builtin_vec_pack (vd, vd);
+ FLOAT2_V2DF FLOAT2_V2DF_PACK
+
+[VEC_PACKPX, vec_packpx, __builtin_vec_packpx]
+ vp __builtin_vec_packpx (vui, vui);
+ VPKPX
+
+[VEC_PACKS, vec_packs, __builtin_vec_packs]
+ vuc __builtin_vec_packs (vus, vus);
+ VPKUHUS
+ vsc __builtin_vec_packs (vss, vss);
+ VPKSHSS
+ vus __builtin_vec_packs (vui, vui);
+ VPKUWUS
+ vss __builtin_vec_packs (vsi, vsi);
+ VPKSWSS
+ vui __builtin_vec_packs (vull, vull);
+ VPKUDUS
+ vsi __builtin_vec_packs (vsll, vsll);
+ VPKSDSS
+
+[VEC_PDEP, vec_pdep, __builtin_vec_vpdepd, _ARCH_PWR10]
+ vull __builtin_vec_vpdepd (vull, vull);
+ VPDEPD
+
+[VEC_PERM, vec_perm, __builtin_vec_perm]
+ vsc __builtin_vec_perm (vsc, vsc, vuc);
+ VPERM_16QI
+ vuc __builtin_vec_perm (vuc, vuc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VUC
+ vbc __builtin_vec_perm (vbc, vbc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VBC
+ vss __builtin_vec_perm (vss, vss, vuc);
+ VPERM_8HI
+ vus __builtin_vec_perm (vus, vus, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VUS
+ vbs __builtin_vec_perm (vbs, vbs, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VBS
+ vp __builtin_vec_perm (vp, vp, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VP
+ vsi __builtin_vec_perm (vsi, vsi, vuc);
+ VPERM_4SI
+ vui __builtin_vec_perm (vui, vui, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VUI
+ vbi __builtin_vec_perm (vbi, vbi, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VBI
+ vsll __builtin_vec_perm (vsll, vsll, vuc);
+ VPERM_2DI
+ vull __builtin_vec_perm (vull, vull, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VULL
+ vbll __builtin_vec_perm (vbll, vbll, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VBLL
+ vf __builtin_vec_perm (vf, vf, vuc);
+ VPERM_4SF
+ vd __builtin_vec_perm (vd, vd, vuc);
+ VPERM_2DF
+
+[VEC_PERMX, vec_permx, __builtin_vec_xxpermx, _ARCH_PWR10]
+ vsc __builtin_vec_xxpermx (vsc, vsc, vuc, const int);
+ XXPERMX_V16QI
+ vuc __builtin_vec_xxpermx (vuc, vuc, vuc, const int);
+ XXPERMX_UV16QI
+ vss __builtin_vec_xxpermx (vss, vss, vuc, const int);
+ XXPERMX_V8HI
+ vus __builtin_vec_xxpermx (vus, vus, vuc, const int);
+ XXPERMX_UV8HI
+ vsi __builtin_vec_xxpermx (vsi, vsi, vuc, const int);
+ XXPERMX_V4SI
+ vui __builtin_vec_xxpermx (vui, vui, vuc, const int);
+ XXPERMX_UV4SI
+ vsll __builtin_vec_xxpermx (vsll, vsll, vuc, const int);
+ XXPERMX_V2DI
+ vull __builtin_vec_xxpermx (vull, vull, vuc, const int);
+ XXPERMX_UV2DI
+ vf __builtin_vec_xxpermx (vf, vf, vuc, const int);
+ XXPERMX_V4SF
+ vd __builtin_vec_xxpermx (vd, vd, vuc, const int);
+ XXPERMX_V2DF
+
+[VEC_PERMXOR, vec_permxor, __builtin_vec_vpermxor]
+ vsc __builtin_vec_vpermxor (vsc, vsc, vsc);
+ VPERMXOR VPERMXOR_VSC
+ vuc __builtin_vec_vpermxor (vuc, vuc, vuc);
+ VPERMXOR VPERMXOR_VUC
+ vbc __builtin_vec_vpermxor (vbc, vbc, vbc);
+ VPERMXOR VPERMXOR_VBC
+
+[VEC_PEXT, vec_pext, __builtin_vec_vpextd, _ARCH_PWR10]
+ vull __builtin_vec_vpextd (vull, vull);
+ VPEXTD
+
+[VEC_PMSUM, vec_pmsum_be, __builtin_vec_vpmsum]
+ vus __builtin_vec_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_V
+ vui __builtin_vec_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_V
+ vull __builtin_vec_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_V
+ vuq __builtin_vec_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_V
+
+[VEC_POPCNT, vec_popcnt, __builtin_vec_vpopcnt, _ARCH_PWR8]
+ vsc __builtin_vec_vpopcnt (vsc);
+ VPOPCNTB
+ vuc __builtin_vec_vpopcnt (vuc);
+ VPOPCNTUB
+ vss __builtin_vec_vpopcnt (vss);
+ VPOPCNTH
+ vus __builtin_vec_vpopcnt (vus);
+ VPOPCNTUH
+ vsi __builtin_vec_vpopcnt (vsi);
+ VPOPCNTW
+ vui __builtin_vec_vpopcnt (vui);
+ VPOPCNTUW
+ vsll __builtin_vec_vpopcnt (vsll);
+ VPOPCNTD
+ vull __builtin_vec_vpopcnt (vull);
+ VPOPCNTUD
+
+[VEC_PARITY_LSBB, vec_parity_lsbb, __builtin_vec_vparity_lsbb, _ARCH_PWR9]
+ vui __builtin_vec_vparity_lsbb (vsi);
+ VPRTYBW VPRTYBW_S
+ vui __builtin_vec_vparity_lsbb (vui);
+ VPRTYBW VPRTYBW_U
+ vull __builtin_vec_vparity_lsbb (vsll);
+ VPRTYBD VPRTYBD_S
+ vull __builtin_vec_vparity_lsbb (vull);
+ VPRTYBD VPRTYBD_U
+ vuq __builtin_vec_vparity_lsbb (vsq);
+ VPRTYBQ VPRTYBQ_S
+ vuq __builtin_vec_vparity_lsbb (vuq);
+ VPRTYBQ VPRTYBQ_U
+
+[VEC_RE, vec_re, __builtin_vec_re]
+ vf __builtin_vec_re (vf);
+ XVRESP
+ vd __builtin_vec_re (vd);
+ XVREDP
+
+[VEC_RECIP, vec_recipdiv, __builtin_vec_recipdiv]
+ vf __builtin_vec_recipdiv (vf, vf);
+ RECIP_V4SF
+ vd __builtin_vec_recipdiv (vd, vd);
+ RECIP_V2DF
+
+[VEC_REPLACE_ELT, vec_replace_elt, __builtin_vec_replace_elt, _ARCH_PWR10]
+ vui __builtin_vec_replace_elt (vui, unsigned int, const int);
+ VREPLACE_ELT_UV4SI
+ vsi __builtin_vec_replace_elt (vsi, signed int, const int);
+ VREPLACE_ELT_V4SI
+ vull __builtin_vec_replace_elt (vull, unsigned long long, const int);
+ VREPLACE_ELT_UV2DI
+ vsll __builtin_vec_replace_elt (vsll, signed long long, const int);
+ VREPLACE_ELT_V2DI
+ vf __builtin_vec_replace_elt (vf, float, const int);
+ VREPLACE_ELT_V4SF
+ vd __builtin_vec_replace_elt (vd, double, const int);
+ VREPLACE_ELT_V2DF
+
+[VEC_REPLACE_UN, vec_replace_unaligned, __builtin_vec_replace_un, _ARCH_PWR10]
+ vui __builtin_vec_replace_un (vui, unsigned int, const int);
+ VREPLACE_UN_UV4SI
+ vsi __builtin_vec_replace_un (vsi, signed int, const int);
+ VREPLACE_UN_V4SI
+ vull __builtin_vec_replace_un (vull, unsigned long long, const int);
+ VREPLACE_UN_UV2DI
+ vsll __builtin_vec_replace_un (vsll, signed long long, const int);
+ VREPLACE_UN_V2DI
+ vf __builtin_vec_replace_un (vf, float, const int);
+ VREPLACE_UN_V4SF
+ vd __builtin_vec_replace_un (vd, double, const int);
+ VREPLACE_UN_V2DF
+
+[VEC_REVB, vec_revb, __builtin_vec_revb, _ARCH_PWR8]
+ vss __builtin_vec_revb (vss);
+ REVB_V8HI REVB_VSS
+ vus __builtin_vec_revb (vus);
+ REVB_V8HI REVB_VUS
+ vsi __builtin_vec_revb (vsi);
+ REVB_V4SI REVB_VSI
+ vui __builtin_vec_revb (vui);
+ REVB_V4SI REVB_VUI
+ vsll __builtin_vec_revb (vsll);
+ REVB_V2DI REVB_VSLL
+ vull __builtin_vec_revb (vull);
+ REVB_V2DI REVB_VULL
+ vsq __builtin_vec_revb (vsq);
+ REVB_V1TI REVB_VSQ
+ vuq __builtin_vec_revb (vuq);
+ REVB_V1TI REVB_VUQ
+ vf __builtin_vec_revb (vf);
+ REVB_V4SF
+ vd __builtin_vec_revb (vd);
+ REVB_V2DF
+
+[VEC_REVE, vec_reve, __builtin_vec_vreve]
+ vsc __builtin_vec_vreve (vsc);
+ VREVE_V16QI VREVE_VSC
+ vuc __builtin_vec_vreve (vuc);
+ VREVE_V16QI VREVE_VUC
+ vbc __builtin_vec_vreve (vbc);
+ VREVE_V16QI VREVE_VBC
+ vss __builtin_vec_vreve (vss);
+ VREVE_V8HI VREVE_VSS
+ vus __builtin_vec_vreve (vus);
+ VREVE_V8HI VREVE_VUS
+ vbs __builtin_vec_vreve (vbs);
+ VREVE_V8HI VREVE_VBS
+ vsi __builtin_vec_vreve (vsi);
+ VREVE_V4SI VREVE_VSI
+ vui __builtin_vec_vreve (vui);
+ VREVE_V4SI VREVE_VUI
+ vbi __builtin_vec_vreve (vbi);
+ VREVE_V4SI VREVE_VBI
+ vsll __builtin_vec_vreve (vsll);
+ VREVE_V2DI VREVE_VSLL
+ vull __builtin_vec_vreve (vull);
+ VREVE_V2DI VREVE_VULL
+ vbll __builtin_vec_vreve (vbll);
+ VREVE_V2DI VREVE_VBLL
+ vf __builtin_vec_vreve (vf);
+ VREVE_V4SF
+ vd __builtin_vec_vreve (vd);
+ VREVE_V2DF
+
+[VEC_RINT, vec_rint, __builtin_vec_rint, __VSX__]
+ vf __builtin_vec_rint (vf);
+ XVRSPIC
+ vd __builtin_vec_rint (vd);
+ XVRDPIC
+
+[VEC_RL, vec_rl, __builtin_vec_rl]
+ vsc __builtin_vec_rl (vsc, vuc);
+ VRLB VRLB_VSC
+ vuc __builtin_vec_rl (vuc, vuc);
+ VRLB VRLB_VUC
+ vss __builtin_vec_rl (vss, vus);
+ VRLH VRLH_VSS
+ vus __builtin_vec_rl (vus, vus);
+ VRLH VRLH_VUS
+ vsi __builtin_vec_rl (vsi, vui);
+ VRLW VRLW_VSI
+ vui __builtin_vec_rl (vui, vui);
+ VRLW VRLW_VUI
+ vsll __builtin_vec_rl (vsll, vull);
+ VRLD VRLD_VSLL
+ vull __builtin_vec_rl (vull, vull);
+ VRLD VRLD_VULL
+
+[VEC_RLMI, vec_rlmi, __builtin_vec_rlmi]
+ vui __builtin_vec_rlmi (vui, vui, vui);
+ VRLWMI
+ vull __builtin_vec_rlmi (vull, vull, vull);
+ VRLDMI
+
+[VEC_RLNM, vec_rlnm, __builtin_vec_rlnm]
+ vui __builtin_vec_rlnm (vui, vui);
+ VRLWNM
+ vull __builtin_vec_rlnm (vull, vull);
+ VRLDNM
+
+[VEC_ROUND, vec_round, __builtin_vec_round]
+ vf __builtin_vec_round (vf);
+ XVRSPI
+ vd __builtin_vec_round (vd);
+ XVRDPI
+
+[VEC_RSQRT, vec_rsqrt, __builtin_vec_rsqrt]
+ vf __builtin_vec_rsqrt (vf);
+ RSQRT_4SF
+ vd __builtin_vec_rsqrt (vd);
+ RSQRT_2DF
+
+[VEC_RSQRTE, vec_rsqrte, __builtin_vec_rsqrte]
+ vf __builtin_vec_rsqrte (vf);
+ XVRSQRTESP
+ vd __builtin_vec_rsqrte (vd);
+ XVRSQRTEDP
+
+[VEC_SBOX_BE, vec_sbox_be, __builtin_vec_sbox_be, _ARCH_PWR8]
+ vuc __builtin_vec_sbox_be (vuc);
+ VSBOX_BE
+
+[VEC_SEL, vec_sel, __builtin_vec_sel]
+ vsc __builtin_vec_sel (vsc, vsc, vbc);
+ VSEL_16QI VSEL_16QI_B
+ vsc __builtin_vec_sel (vsc, vsc, vuc);
+ VSEL_16QI VSEL_16QI_U
+ vuc __builtin_vec_sel (vuc, vuc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_UB
+ vuc __builtin_vec_sel (vuc, vuc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_UU
+ vbc __builtin_vec_sel (vbc, vbc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_BB
+ vbc __builtin_vec_sel (vbc, vbc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_BU
+ vss __builtin_vec_sel (vss, vss, vbs);
+ VSEL_8HI VSEL_8HI_B
+ vss __builtin_vec_sel (vss, vss, vus);
+ VSEL_8HI VSEL_8HI_U
+ vus __builtin_vec_sel (vus, vus, vbs);
+ VSEL_8HI_UNS VSEL_8HI_UB
+ vus __builtin_vec_sel (vus, vus, vus);
+ VSEL_8HI_UNS VSEL_8HI_UU
+ vbs __builtin_vec_sel (vbs, vbs, vbs);
+ VSEL_8HI_UNS VSEL_8HI_BB
+ vbs __builtin_vec_sel (vbs, vbs, vus);
+ VSEL_8HI_UNS VSEL_8HI_BU
+ vsi __builtin_vec_sel (vsi, vsi, vbi);
+ VSEL_4SI VSEL_4SI_B
+ vsi __builtin_vec_sel (vsi, vsi, vui);
+ VSEL_4SI VSEL_4SI_U
+ vui __builtin_vec_sel (vui, vui, vbi);
+ VSEL_4SI_UNS VSEL_4SI_UB
+ vui __builtin_vec_sel (vui, vui, vui);
+ VSEL_4SI_UNS VSEL_4SI_UU
+ vbi __builtin_vec_sel (vbi, vbi, vbi);
+ VSEL_4SI_UNS VSEL_4SI_BB
+ vbi __builtin_vec_sel (vbi, vbi, vui);
+ VSEL_4SI_UNS VSEL_4SI_BU
+ vsll __builtin_vec_sel (vsll, vsll, vbll);
+ VSEL_2DI_B VSEL_2DI_B
+ vsll __builtin_vec_sel (vsll, vsll, vull);
+ VSEL_2DI_B VSEL_2DI_U
+ vull __builtin_vec_sel (vull, vull, vbll);
+ VSEL_2DI_UNS VSEL_2DI_UB
+ vull __builtin_vec_sel (vull, vull, vull);
+ VSEL_2DI_UNS VSEL_2DI_UU
+ vbll __builtin_vec_sel (vbll, vbll, vbll);
+ VSEL_2DI_UNS VSEL_2DI_BB
+ vbll __builtin_vec_sel (vbll, vbll, vull);
+ VSEL_2DI_UNS VSEL_2DI_BU
+ vf __builtin_vec_sel (vf, vf, vbll);
+ VSEL_4SF VSEL_4SF_B
+ vf __builtin_vec_sel (vf, vf, vull);
+ VSEL_4SF VSEL_4SF_U
+ vd __builtin_vec_sel (vd, vd, vbll);
+ VSEL_2DF VSEL_2DF_B
+ vd __builtin_vec_sel (vd, vd, vull);
+ VSEL_2DF VSEL_2DF_U
+
+[VEC_SHASIGMA_BE, vec_shasigma_be, __builtin_crypto_vshasigma]
+ vui __builtin_crypto_vshasigma (vui, const int, const int);
+ VSHASIGMAW
+ vull __builtin_crypto_vshasigma (vull, const int, const int);
+ VSHASIGMAD
+
+[VEC_SIGNED, vec_signed, __builtin_vec_vsigned]
+ vsi __builtin_vec_vsigned (vf);
+ VEC_VSIGNED_V4SF
+ vsll __builtin_vec_vsigned (vd);
+ VEC_VSIGNED_V2DF
+
+[VEC_SIGNED2, vec_signed2, __builtin_vec_vsigned2]
+ vsi __builtin_vec_vsigned2 (vd, vd);
+ VEC_VSIGNED2_V2DF
+
+[VEC_SIGNEDO, vec_signedo, __builtin_vec_vsignedo]
+ vui __builtin_vec_vsignedo (vd);
+ VEC_VSIGNEDO_V2DF
+
+[VEC_SL, vec_sl, __builtin_vec_sl]
+ vsc __builtin_vec_sl (vsc, vuc);
+ VSLB VSLB_VSC
+ vuc __builtin_vec_sl (vuc, vuc);
+ VSLB VSLB_VUC
+ vss __builtin_vec_sl (vss, vus);
+ VSLH VSLH_VSS
+ vus __builtin_vec_sl (vus, vus);
+ VSLH VSLH_VUS
+ vsi __builtin_vec_sl (vsi, vui);
+ VSLW VSLW_VSI
+ vui __builtin_vec_sl (vui, vui);
+ VSLW VSLW_VUI
+ vsll __builtin_vec_sl (vsll, vull);
+ VSLD VSLD_VSLL
+ vull __builtin_vec_sl (vull, vull);
+ VSLD VSLD_VULL
+
+[VEC_SLD, vec_sld, __builtin_vec_sld]
+ vsc __builtin_vec_sld (vsc, vsc, const int);
+ VSLDOI_16QI VSLDOI_VSC
+ vbc __builtin_vec_sld (vbc, vbc, const int);
+ VSLDOI_16QI VSLDOI_VBC
+ vuc __builtin_vec_sld (vuc, vuc, const int);
+ VSLDOI_16QI VSLDOI_VUC
+ vss __builtin_vec_sld (vss, vss, const int);
+ VSLDOI_8HI VSLDOI_VSS
+ vbs __builtin_vec_sld (vbs, vbs, const int);
+ VSLDOI_8HI VSLDOI_VBS
+ vus __builtin_vec_sld (vus, vus, const int);
+ VSLDOI_8HI VSLDOI_VUS
+ vp __builtin_vec_sld (vp, vp, const int);
+ VSLDOI_8HI VSLDOI_VP
+ vsi __builtin_vec_sld (vsi, vsi, const int);
+ VSLDOI_4SI VSLDOI_VSI
+ vbi __builtin_vec_sld (vbi, vbi, const int);
+ VSLDOI_4SI VSLDOI_VBI
+ vui __builtin_vec_sld (vui, vui, const int);
+ VSLDOI_4SI VSLDOI_VUI
+ vsll __builtin_vec_sld (vsll, vsll, const int);
+ VSLDOI_2DI VSLDOI_VSLL
+ vbll __builtin_vec_sld (vbll, vbll, const int);
+ VSLDOI_2DI VSLDOI_VBLL
+ vull __builtin_vec_sld (vull, vull, const int);
+ VSLDOI_2DI VSLDOI_VULL
+ vf __builtin_vec_sld (vf, vf, const int);
+ VSLDOI_4SF
+ vd __builtin_vec_sld (vd, vd, const int);
+ VSLDOI_2DF
+
+[VEC_SLDB, vec_sldb, __builtin_vec_sldb, _ARCH_PWR10]
+ vsc __builtin_vec_sldb (vsc, vsc, const int);
+ VSLDB_V16QI VSLDB_VSC
+ vuc __builtin_vec_sldb (vuc, vuc, const int);
+ VSLDB_V16QI VSLDB_VUC
+ vss __builtin_vec_sldb (vss, vss, const int);
+ VSLDB_V8HI VSLDB_VSS
+ vus __builtin_vec_sldb (vus, vus, const int);
+ VSLDB_V8HI VSLDB_VUS
+ vsi __builtin_vec_sldb (vsi, vsi, const int);
+ VSLDB_V4SI VSLDB_VSI
+ vui __builtin_vec_sldb (vui, vui, const int);
+ VSLDB_V4SI VSLDB_VUI
+ vsll __builtin_vec_sldb (vsll, vsll, const int);
+ VSLDB_V2DI VSLDB_VSLL
+ vull __builtin_vec_sldb (vull, vull, const int);
+ VSLDB_V2DI VSLDB_VULL
+
+[VEC_SLDW, vec_sldw, __builtin_vec_sldw]
+ vsc __builtin_vec_sldw (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC
+ vuc __builtin_vec_sldw (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC
+ vss __builtin_vec_sldw (vss, vss, const int);
+ XXSLDWI_16QI XXSLDWI_VSS
+ vus __builtin_vec_sldw (vus, vus, const int);
+ XXSLDWI_16QI XXSLDWI_VUS
+ vsi __builtin_vec_sldw (vsi, vsi, const int);
+ XXSLDWI_16QI XXSLDWI_VSI
+ vui __builtin_vec_sldw (vui, vui, const int);
+ XXSLDWI_16QI XXSLDWI_VUI
+ vsll __builtin_vec_sldw (vsll, vsll, const int);
+ XXSLDWI_16QI XXSLDWI_VSLL
+ vull __builtin_vec_sldw (vull, vull, const int);
+ XXSLDWI_16QI XXSLDWI_VULL
+
+[VEC_SLL, vec_sll, __builtin_vec_sll]
+ vsc __builtin_vec_sll (vsc, vuc);
+ VSL VSL_VSC
+ vuc __builtin_vec_sll (vuc, vuc);
+ VSL VSL_VUC
+ vss __builtin_vec_sll (vss, vuc);
+ VSL VSL_VSS
+ vus __builtin_vec_sll (vus, vuc);
+ VSL VSL_VUS
+ vp __builtin_vec_sll (vp, vuc);
+ VSL VSL_VP
+ vsi __builtin_vec_sll (vsi, vuc);
+ VSL VSL_VSI
+ vui __builtin_vec_sll (vui, vuc);
+ VSL VSL_VUI
+ vsll __builtin_vec_sll (vsll, vuc);
+ VSL VSL_VSLL
+ vull __builtin_vec_sll (vull, vuc);
+ VSL VSL_VULL
+
+[VEC_SLO, vec_slo, __builtin_vec_slo]
+ vsc __builtin_vec_slo (vsc, vsc);
+ VSLO VSLO_VSCS
+ vsc __builtin_vec_slo (vsc, vuc);
+ VSLO VSLO_VSCU
+ vuc __builtin_vec_slo (vuc, vsc);
+ VSLO VSLO_VUCS
+ vuc __builtin_vec_slo (vuc, vuc);
+ VSLO VSLO_VUCU
+ vss __builtin_vec_slo (vss, vsc);
+ VSLO VSLO_VSSS
+ vss __builtin_vec_slo (vss, vuc);
+ VSLO VSLO_VSSU
+ vus __builtin_vec_slo (vus, vsc);
+ VSLO VSLO_VUSS
+ vus __builtin_vec_slo (vus, vuc);
+ VSLO VSLO_VUSU
+ vp __builtin_vec_slo (vp, vsc);
+ VSLO VSLO_VPS
+ vp __builtin_vec_slo (vp, vuc);
+ VSLO VSLO_VPU
+ vsi __builtin_vec_slo (vsi, vsc);
+ VSLO VSLO_VSIS
+ vsi __builtin_vec_slo (vsi, vuc);
+ VSLO VSLO_VSIU
+ vui __builtin_vec_slo (vui, vsc);
+ VSLO VSLO_VUIS
+ vui __builtin_vec_slo (vui, vuc);
+ VSLO VSLO_VUIU
+ vsll __builtin_vec_slo (vsll, vsc);
+ VSLO VSLO_VSLLS
+ vsll __builtin_vec_slo (vsll, vuc);
+ VSLO VSLO_VSLLU
+ vull __builtin_vec_slo (vull, vsc);
+ VSLO VSLO_VULLS
+ vull __builtin_vec_slo (vull, vuc);
+ VSLO VSLO_VULLU
+ vf __builtin_vec_slo (vf, vsc);
+ VSLO VSLO_VFS
+ vf __builtin_vec_slo (vf, vuc);
+ VSLO VSLO_VFU
+
+[VEC_SLV, vec_slv, __builtin_vec_vslv, _ARCH_PWR9]
+ vuc __builtin_vec_vslv (vuc, vuc);
+ VSLV
+
+[VEC_SPLAT, vec_splat, __builtin_vec_splat]
+ vsc __builtin_vec_splat (vsc, signed int);
+ VSPLTB VSPLTB_VSC
+ vuc __builtin_vec_splat (vuc, signed int);
+ VSPLTB VSPLTB_VUC
+ vbc __builtin_vec_splat (vbc, signed int);
+ VSPLTB VSPLTB_VBC
+ vss __builtin_vec_splat (vss, signed int);
+ VSPLTH VSPLTH_VSS
+ vus __builtin_vec_splat (vus, signed int);
+ VSPLTH VSPLTH_VUS
+ vbs __builtin_vec_splat (vbs, signed int);
+ VSPLTH VSPLTH_VBS
+ vp __builtin_vec_splat (vp, signed int);
+ VSPLTH VSPLTH_VP
+ vf __builtin_vec_splat (vf, signed int);
+ VSPLTW VSPLTW_VF
+ vsi __builtin_vec_splat (vsi, signed int);
+ VSPLTW VSPLTW_VSI
+ vui __builtin_vec_splat (vui, signed int);
+ VSPLTW VSPLTW_VUI
+ vbi __builtin_vec_splat (vbi, signed int);
+ VSPLTW VSPLTW_VBI
+ vd __builtin_vec_splat (vd, signed int);
+ XXSPLTD_V2DF
+ vsll __builtin_vec_splat (vsll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VSLL
+ vull __builtin_vec_splat (vull, signed int);
+ XXSPLTD_V2DI XXSPLTD_VULL
+ vbll __builtin_vec_splat (vbll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VBLL
+
+[VEC_SPLATI, vec_splati, __builtin_vec_xxspltiw, _ARCH_PWR10]
+ vsi __builtin_vec_xxspltiw (signed int);
+ VXXSPLTIW_V4SI
+ vf __builtin_vec_xxspltiw (float);
+ VXXSPLTIW_V4SF
+
+[VEC_SPLATID, vec_splatid, __builtin_vec_xxspltid, ARCH_PWR10]
+ vd __builtin_vec_xxspltid (float);
+ VXXSPLTIDP
+
+[VEC_SPLATI_INS, vec_splati_ins, __builtin_vec_xxsplti32dx, _ARCH_PWR10]
+ vsi __builtin_vec_xxsplti32dx (vsi, const int, signed int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VSI
+ vui __builtin_vec_xxsplti32dx (vui, const int, unsigned int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VUI
+ vf __builtin_vec_xxsplti32dx (vf, const int, float);
+ VXXSPLTI32DX_V4SF
+
+[VEC_SQRT, vec_sqrt, __builtin_vec_sqrt, __VSX__]
+ vf __builtin_vec_sqrt (vf);
+ XVSQRTSP
+ vd __builtin_vec_sqrt (vd);
+ XVSQRTDP
+
+[VEC_SR, vec_sr, __builtin_vec_sr]
+ vsc __builtin_vec_sr (vsc, vuc);
+ VSRB VSRB_VSC
+ vuc __builtin_vec_sr (vuc, vuc);
+ VSRB VSRB_VUC
+ vss __builtin_vec_sr (vss, vus);
+ VSRH VSRH_VSS
+ vus __builtin_vec_sr (vus, vus);
+ VSRH VSRH_VUS
+ vsi __builtin_vec_sr (vsi, vui);
+ VSRW VSRW_VSI
+ vui __builtin_vec_sr (vui, vui);
+ VSRW VSRW_VUI
+ vsll __builtin_vec_sr (vsll, vull);
+ VSRD VSRD_VSLL
+ vull __builtin_vec_sr (vull, vull);
+ VSRD VSRD_VULL
+
+[VEC_SRA, vec_sra, __builtin_vec_sra]
+ vsc __builtin_vec_sra (vsc, vuc);
+ VSRAB VSRAB_VSC
+ vuc __builtin_vec_sra (vuc, vuc);
+ VSRAB VSRAB_VUC
+ vss __builtin_vec_sra (vss, vus);
+ VSRAH VSRAH_VSS
+ vus __builtin_vec_sra (vus, vus);
+ VSRAH VSRAH_VUS
+ vsi __builtin_vec_sra (vsi, vui);
+ VSRAW VSRAW_VSI
+ vui __builtin_vec_sra (vui, vui);
+ VSRAW VSRAW_VUI
+ vsll __builtin_vec_sra (vsll, vull);
+ VSRAD VSRAD_VSLL
+ vull __builtin_vec_sra (vull, vull);
+ VSRAD VSRAD_VULL
+
+[VEC_SRDB, vec_srdb, __builtin_vec_srdb, _ARCH_PWR10]
+ vsc __builtin_vec_srdb (vsc, vsc, const int);
+ VSRDB_V16QI VSRDB_VSC
+ vuc __builtin_vec_srdb (vuc, vuc, const int);
+ VSRDB_V16QI VSRDB_VUC
+ vss __builtin_vec_srdb (vss, vss, const int);
+ VSRDB_V8HI VSRDB_VSS
+ vus __builtin_vec_srdb (vus, vus, const int);
+ VSRDB_V8HI VSRDB_VUS
+ vsi __builtin_vec_srdb (vsi, vsi, const int);
+ VSRDB_V4SI VSRDB_VSI
+ vui __builtin_vec_srdb (vui, vui, const int);
+ VSRDB_V4SI VSRDB_VUI
+ vsll __builtin_vec_srdb (vsll, vsll, const int);
+ VSRDB_V2DI VSRDB_VSLL
+ vull __builtin_vec_srdb (vull, vull, const int);
+ VSRDB_V2DI VSRDB_VULL
+
+[VEC_SRL, vec_srl, __builtin_vec_srl]
+ vsc __builtin_vec_srl (vsc, vuc);
+ VSR VSR_VSC
+ vuc __builtin_vec_srl (vuc, vuc);
+ VSR VSR_VUC
+ vss __builtin_vec_srl (vss, vuc);
+ VSR VSR_VSS
+ vus __builtin_vec_srl (vus, vuc);
+ VSR VSR_VUS
+ vp __builtin_vec_srl (vp, vuc);
+ VSR VSR_VP
+ vsi __builtin_vec_srl (vsi, vuc);
+ VSR VSR_VSI
+ vui __builtin_vec_srl (vui, vuc);
+ VSR VSR_VUI
+ vsll __builtin_vec_srl (vsll, vuc);
+ VSR VSR_VSLL
+ vull __builtin_vec_srl (vull, vuc);
+ VSR VSR_VULL
+
+[VEC_SRO, vec_sro, __builtin_vec_sro]
+ vsc __builtin_vec_sro (vsc, vsc);
+ VSRO VSRO_VSCS
+ vsc __builtin_vec_sro (vsc, vuc);
+ VSRO VSRO_VSCU
+ vuc __builtin_vec_sro (vuc, vsc);
+ VSRO VSRO_VUCS
+ vuc __builtin_vec_sro (vuc, vuc);
+ VSRO VSRO_VUCU
+ vss __builtin_vec_sro (vss, vsc);
+ VSRO VSRO_VSSS
+ vss __builtin_vec_sro (vss, vuc);
+ VSRO VSRO_VSSU
+ vus __builtin_vec_sro (vus, vsc);
+ VSRO VSRO_VUSS
+ vus __builtin_vec_sro (vus, vuc);
+ VSRO VSRO_VUSU
+ vp __builtin_vec_sro (vp, vsc);
+ VSRO VSRO_VPS
+ vp __builtin_vec_sro (vp, vuc);
+ VSRO VSRO_VPU
+ vsi __builtin_vec_sro (vsi, vsc);
+ VSRO VSRO_VSIS
+ vsi __builtin_vec_sro (vsi, vuc);
+ VSRO VSRO_VSIU
+ vui __builtin_vec_sro (vui, vsc);
+ VSRO VSRO_VUIS
+ vui __builtin_vec_sro (vui, vuc);
+ VSRO VSRO_VUIU
+ vsll __builtin_vec_sro (vsll, vsc);
+ VSRO VSRO_VSLLS
+ vsll __builtin_vec_sro (vsll, vuc);
+ VSRO VSRO_VSLLU
+ vull __builtin_vec_sro (vull, vsc);
+ VSRO VSRO_VULLS
+ vull __builtin_vec_sro (vull, vuc);
+ VSRO VSRO_VULLU
+ vf __builtin_vec_sro (vf, vsc);
+ VSRO VSRO_VFS
+ vf __builtin_vec_sro (vf, vuc);
+ VSRO VSRO_VFU
+
+[VEC_SRV, vec_srv, __builtin_vec_vsrv, _ARCH_PWR9]
+ vuc __builtin_vec_vsrv (vuc, vuc);
+ VSRV
+
+[VEC_ST, vec_st, __builtin_vec_st]
+ void __builtin_vec_st (vsc, signed long long, vsc *);
+ STVX_V16QI STVX_VSC
+ void __builtin_vec_st (vsc, signed long long, signed char *);
+ STVX_V16QI STVX_SC
+ void __builtin_vec_st (vuc, signed long long, vuc *);
+ STVX_V16QI STVX_VUC
+ void __builtin_vec_st (vuc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC
+ void __builtin_vec_st (vbc, signed long long, vbc *);
+ STVX_V16QI STVX_VBC
+ void __builtin_vec_st (vbc, signed long long, signed char *);
+ STVX_V16QI STVX_SC_B
+ void __builtin_vec_st (vbc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC_B
+ void __builtin_vec_st (vss, signed long long, vss *);
+ STVX_V8HI STVX_VSS
+ void __builtin_vec_st (vss, signed long long, signed short *);
+ STVX_V8HI STVX_SS
+ void __builtin_vec_st (vus, signed long long, vus *);
+ STVX_V8HI STVX_VUS
+ void __builtin_vec_st (vus, signed long long, unsigned short *);
+ STVX_V8HI STVX_US
+ void __builtin_vec_st (vbs, signed long long, vbs *);
+ STVX_V8HI STVX_VBS
+ void __builtin_vec_st (vbs, signed long long, signed short *);
+ STVX_V8HI STVX_SS_B
+ void __builtin_vec_st (vbs, signed long long, unsigned short *);
+ STVX_V8HI STVX_US_B
+ void __builtin_vec_st (vp, signed long long, vp *);
+ STVX_V8HI STVX_P
+ void __builtin_vec_st (vsi, signed long long, vsi *);
+ STVX_V4SI STVX_VSI
+ void __builtin_vec_st (vsi, signed long long, signed int *);
+ STVX_V4SI STVX_SI
+ void __builtin_vec_st (vui, signed long long, vui *);
+ STVX_V4SI STVX_VUI
+ void __builtin_vec_st (vui, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI
+ void __builtin_vec_st (vbi, signed long long, vbi *);
+ STVX_V4SI STVX_VBI
+ void __builtin_vec_st (vbi, signed long long, signed int *);
+ STVX_V4SI STVX_SI_B
+ void __builtin_vec_st (vbi, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI_B
+ void __builtin_vec_st (vsll, signed long long, vsll *);
+ STVX_V2DI STVX_VSLL
+ void __builtin_vec_st (vsll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL
+ void __builtin_vec_st (vull, signed long long, vull *);
+ STVX_V2DI STVX_VULL
+ void __builtin_vec_st (vull, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL
+ void __builtin_vec_st (vbll, signed long long, vbll *);
+ STVX_V2DI STVX_VBLL
+ void __builtin_vec_st (vbll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL_B
+ void __builtin_vec_st (vbll, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL_B
+ void __builtin_vec_st (vf, signed long long, vf *);
+ STVX_V4SF STVX_VF
+ void __builtin_vec_st (vf, signed long long, float *);
+ STVX_V4SF STVX_F
+ void __builtin_vec_st (vd, signed long long, vd *);
+ STVX_V2DF STVX_VD
+ void __builtin_vec_st (vd, signed long long, double *);
+ STVX_V2DF STVX_D
+
+[VEC_STE, vec_ste, __builtin_vec_ste]
+ void __builtin_vec_ste (vsc, signed long long, signed char *);
+ STVEBX STVEBX_S
+ void __builtin_vec_ste (vuc, signed long long, unsigned char *);
+ STVEBX STVEBX_U
+ void __builtin_vec_ste (vbc, signed long long, signed char *);
+ STVEBX STVEBX_BS
+ void __builtin_vec_ste (vbc, signed long long, unsigned char *);
+ STVEBX STVEBX_BU
+ void __builtin_vec_ste (vss, signed long long, signed short *);
+ STVEHX STVEHX_S
+ void __builtin_vec_ste (vus, signed long long, unsigned short *);
+ STVEHX STVEHX_U
+ void __builtin_vec_ste (vbs, signed long long, signed short *);
+ STVEHX STVEHX_BS
+ void __builtin_vec_ste (vbs, signed long long, unsigned short *);
+ STVEHX STVEHX_BU
+ void __builtin_vec_ste (vp, signed long long, signed short *);
+ STVEHX STVEHX_PS
+ void __builtin_vec_ste (vp, signed long long, unsigned short *);
+ STVEHX STVEHX_PU
+ void __builtin_vec_ste (vsi, signed long long, signed int *);
+ STVEWX STVEHWX_S
+ void __builtin_vec_ste (vui, signed long long, unsigned int *);
+ STVEWX STVEWX_U
+ void __builtin_vec_ste (vbi, signed long long, signed int *);
+ STVEWX STVEWX_BS
+ void __builtin_vec_ste (vbi, signed long long, unsigned int *);
+ STVEWX STVEWX_BU
+ void __builtin_vec_ste (vf, signed long long, float *);
+ STVEWX STVEWX_F
+
+[VEC_STL, vec_stl, __builtin_vec_stl]
+ void __builtin_vec_stl (vsc, signed long long, vsc *);
+ STVXL_V16QI STVXL_VSC
+ void __builtin_vec_stl (vsc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC
+ void __builtin_vec_stl (vuc, signed long long, vuc *);
+ STVXL_V16QI STVXL_VUC
+ void __builtin_vec_stl (vuc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC
+ void __builtin_vec_stl (vbc, signed long long, vbc *);
+ STVXL_V16QI STVXL_VBC
+ void __builtin_vec_stl (vbc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC_B
+ void __builtin_vec_stl (vbc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC_B
+ void __builtin_vec_stl (vss, signed long long, vss *);
+ STVXL_V8HI STVXL_VSS
+ void __builtin_vec_stl (vss, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS
+ void __builtin_vec_stl (vus, signed long long, vus *);
+ STVXL_V8HI STVXL_VUS
+ void __builtin_vec_stl (vus, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US
+ void __builtin_vec_stl (vbs, signed long long, vbs *);
+ STVXL_V8HI STVXL_VBS
+ void __builtin_vec_stl (vbs, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS_B
+ void __builtin_vec_stl (vbs, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US_B
+ void __builtin_vec_stl (vp, signed long long, vp *);
+ STVXL_V8HI STVXL_P
+ void __builtin_vec_stl (vsi, signed long long, vsi *);
+ STVXL_V4SI STVXL_VSI
+ void __builtin_vec_stl (vsi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI
+ void __builtin_vec_stl (vui, signed long long, vui *);
+ STVXL_V4SI STVXL_VUI
+ void __builtin_vec_stl (vui, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI
+ void __builtin_vec_stl (vbi, signed long long, vbi *);
+ STVXL_V4SI STVXL_VBI
+ void __builtin_vec_stl (vbi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI_B
+ void __builtin_vec_stl (vbi, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI_B
+ void __builtin_vec_stl (vsll, signed long long, vsll *);
+ STVXL_V2DI STVXL_VSLL
+ void __builtin_vec_stl (vsll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL
+ void __builtin_vec_stl (vull, signed long long, vull *);
+ STVXL_V2DI STVXL_VULL
+ void __builtin_vec_stl (vull, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL
+ void __builtin_vec_stl (vbll, signed long long, vbll *);
+ STVXL_V2DI STVXL_VBLL
+ void __builtin_vec_stl (vbll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL_B
+ void __builtin_vec_stl (vbll, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL_B
+ void __builtin_vec_stl (vf, signed long long, vf *);
+ STVXL_V4SF STVXL_VF
+ void __builtin_vec_stl (vf, signed long long, float *);
+ STVXL_V4SF STVXL_F
+ void __builtin_vec_stl (vd, signed long long, vd *);
+ STVXL_V2DF STVXL_VD
+ void __builtin_vec_stl (vd, signed long long, double *);
+ STVXL_V2DF STVXL_D
+
+[VEC_STRIL, vec_stril, __builtin_vec_stril, ARCH_PWR10]
+ vuc __builtin_vec_stril (vuc);
+ VSTRIBL VSTRIBL_U
+ vsc __builtin_vec_stril (vsc);
+ VSTRIBL VSTRIBL_S
+ vus __builtin_vec_stril (vus);
+ VSTRIHL VSTRIHL_U
+ vss __builtin_vec_stril (vss);
+ VSTRIHL VSTRIHL_S
+
+[VEC_STRIL_P, vec_stril_p, __builtin_vec_stril_p, _ARCH_PWR10]
+ signed int __builtin_vec_stril_p (vuc);
+ VSTRIBL_P VSTRIBL_PU
+ signed int __builtin_vec_stril_p (vsc);
+ VSTRIBL_P VSTRIBL_PS
+ signed int __builtin_vec_stril_p (vus);
+ VSTRIHL_P VSTRIHL_PU
+ signed int __builtin_vec_stril_p (vss);
+ VSTRIHL_P VSTRIHL_PS
+
+[VEC_STRIR, vec_strir, __builtin_vec_strir, _ARCH_PWR10]
+ vuc __builtin_vec_strir (vuc);
+ VSTRIBR VSTRIBR_U
+ vsc __builtin_vec_strir (vsc);
+ VSTRIBR VSTRIBR_S
+ vus __builtin_vec_strir (vus);
+ VSTRIHR VSTRIHR_U
+ vss __builtin_vec_strir (vss);
+ VSTRIHR VSTRIHR_S
+
+[VEC_STRIR_P, vec_strir_p, __builtin_vec_strir_p, ARCH_PWR10]
+ signed int __builtin_vec_strir_p (vuc);
+ VSTRIBR_P VSTRIBR_PU
+ signed int __builtin_vec_strir_p (vsc);
+ VSTRIBR_P VSTRIBR_PS
+ signed int __builtin_vec_strir_p (vus);
+ VSTRIHR_P VSTRIHR_PU
+ signed int __builtin_vec_strir_p (vss);
+ VSTRIHR_P VSTRIHR_PS
+
+[VEC_STVLX, vec_stvlx, __builtin_vec_stvlx, __PPU__]
+ void __builtin_vec_stvlx (vbc, signed long long, vbc *);
+ STVLX STVLX_VBC
+ void __builtin_vec_stvlx (vsc, signed long long, vsc *);
+ STVLX STVLX_VSC
+ void __builtin_vec_stvlx (vsc, signed long long, signed char *);
+ STVLX STVLX_SC
+ void __builtin_vec_stvlx (vuc, signed long long, vuc *);
+ STVLX STVLX_VUC
+ void __builtin_vec_stvlx (vuc, signed long long, unsigned char *);
+ STVLX STVLX_UC
+ void __builtin_vec_stvlx (vbs, signed long long, vbs *);
+ STVLX STVLX_VBS
+ void __builtin_vec_stvlx (vss, signed long long, vss *);
+ STVLX STVLX_VSS
+ void __builtin_vec_stvlx (vss, signed long long, signed short *);
+ STVLX STVLX_SS
+ void __builtin_vec_stvlx (vus, signed long long, vus *);
+ STVLX STVLX_VUS
+ void __builtin_vec_stvlx (vus, signed long long, unsigned short *);
+ STVLX STVLX_US
+ void __builtin_vec_stvlx (vp, signed long long, vp *);
+ STVLX STVLX_VP
+ void __builtin_vec_stvlx (vbi, signed long long, vbi *);
+ STVLX STVLX_VBI
+ void __builtin_vec_stvlx (vsi, signed long long, vsi *);
+ STVLX STVLX_VSI
+ void __builtin_vec_stvlx (vsi, signed long long, signed int *);
+ STVLX STVLX_SI
+ void __builtin_vec_stvlx (vui, signed long long, vui *);
+ STVLX STVLX_VUI
+ void __builtin_vec_stvlx (vui, signed long long, unsigned int *);
+ STVLX STVLX_UI
+ void __builtin_vec_stvlx (vf, signed long long, vf *);
+ STVLX STVLX_VF
+ void __builtin_vec_stvlx (vf, signed long long, float *);
+ STVLX STVLX_F
+
+[VEC_STVLXL, vec_stvlxl, __builtin_vec_stvlxl, __PPU__]
+ void __builtin_vec_stvlxl (vbc, signed long long, vbc *);
+ STVLXL STVLXL_VBC
+ void __builtin_vec_stvlxl (vsc, signed long long, vsc *);
+ STVLXL STVLXL_VSC
+ void __builtin_vec_stvlxl (vsc, signed long long, signed char *);
+ STVLXL STVLXL_SC
+ void __builtin_vec_stvlxl (vuc, signed long long, vuc *);
+ STVLXL STVLXL_VUC
+ void __builtin_vec_stvlxl (vuc, signed long long, unsigned char *);
+ STVLXL STVLXL_UC
+ void __builtin_vec_stvlxl (vbs, signed long long, vbs *);
+ STVLXL STVLXL_VBS
+ void __builtin_vec_stvlxl (vss, signed long long, vss *);
+ STVLXL STVLXL_VSS
+ void __builtin_vec_stvlxl (vss, signed long long, signed short *);
+ STVLXL STVLXL_SS
+ void __builtin_vec_stvlxl (vus, signed long long, vus *);
+ STVLXL STVLXL_VUS
+ void __builtin_vec_stvlxl (vus, signed long long, unsigned short *);
+ STVLXL STVLXL_US
+ void __builtin_vec_stvlxl (vp, signed long long, vp *);
+ STVLXL STVLXL_VP
+ void __builtin_vec_stvlxl (vbi, signed long long, vbi *);
+ STVLXL STVLXL_VBI
+ void __builtin_vec_stvlxl (vsi, signed long long, vsi *);
+ STVLXL STVLXL_VSI
+ void __builtin_vec_stvlxl (vsi, signed long long, signed int *);
+ STVLXL STVLXL_SI
+ void __builtin_vec_stvlxl (vui, signed long long, vui *);
+ STVLXL STVLXL_VUI
+ void __builtin_vec_stvlxl (vui, signed long long, unsigned int *);
+ STVLXL STVLXL_UI
+ void __builtin_vec_stvlxl (vf, signed long long, vf *);
+ STVLXL STVLXL_VF
+ void __builtin_vec_stvlxl (vf, signed long long, float *);
+ STVLXL STVLXL_F
+
+[VEC_STVRX, vec_stvrx, __builtin_vec_stvrx, __PPU__]
+ void __builtin_vec_stvrx (vbc, signed long long, vbc *);
+ STVRX STVRX_VBC
+ void __builtin_vec_stvrx (vsc, signed long long, vsc *);
+ STVRX STVRX_VSC
+ void __builtin_vec_stvrx (vsc, signed long long, signed char *);
+ STVRX STVRX_SC
+ void __builtin_vec_stvrx (vuc, signed long long, vuc *);
+ STVRX STVRX_VUC
+ void __builtin_vec_stvrx (vuc, signed long long, unsigned char *);
+ STVRX STVRX_UC
+ void __builtin_vec_stvrx (vbs, signed long long, vbs *);
+ STVRX STVRX_VBS
+ void __builtin_vec_stvrx (vss, signed long long, vss *);
+ STVRX STVRX_VSS
+ void __builtin_vec_stvrx (vss, signed long long, signed short *);
+ STVRX STVRX_SS
+ void __builtin_vec_stvrx (vus, signed long long, vus *);
+ STVRX STVRX_VUS
+ void __builtin_vec_stvrx (vus, signed long long, unsigned short *);
+ STVRX STVRX_US
+ void __builtin_vec_stvrx (vp, signed long long, vp *);
+ STVRX STVRX_VP
+ void __builtin_vec_stvrx (vbi, signed long long, vbi *);
+ STVRX STVRX_VBI
+ void __builtin_vec_stvrx (vsi, signed long long, vsi *);
+ STVRX STVRX_VSI
+ void __builtin_vec_stvrx (vsi, signed long long, signed int *);
+ STVRX STVRX_SI
+ void __builtin_vec_stvrx (vui, signed long long, vui *);
+ STVRX STVRX_VUI
+ void __builtin_vec_stvrx (vui, signed long long, unsigned int *);
+ STVRX STVRX_UI
+ void __builtin_vec_stvrx (vf, signed long long, vf *);
+ STVRX STVRX_VF
+ void __builtin_vec_stvrx (vf, signed long long, float *);
+ STVRX STVRX_F
+
+[VEC_STVRXL, vec_stvrxl, __builtin_vec_stvrxl, __PPU__]
+ void __builtin_vec_stvrxl (vbc, signed long long, vbc *);
+ STVRXL STVRXL_VBC
+ void __builtin_vec_stvrxl (vsc, signed long long, vsc *);
+ STVRXL STVRXL_VSC
+ void __builtin_vec_stvrxl (vsc, signed long long, signed char *);
+ STVRXL STVRXL_SC
+ void __builtin_vec_stvrxl (vuc, signed long long, vuc *);
+ STVRXL STVRXL_VUC
+ void __builtin_vec_stvrxl (vuc, signed long long, unsigned char *);
+ STVRXL STVRXL_UC
+ void __builtin_vec_stvrxl (vbs, signed long long, vbs *);
+ STVRXL STVRXL_VBS
+ void __builtin_vec_stvrxl (vss, signed long long, vss *);
+ STVRXL STVRXL_VSS
+ void __builtin_vec_stvrxl (vss, signed long long, signed short *);
+ STVRXL STVRXL_SS
+ void __builtin_vec_stvrxl (vus, signed long long, vus *);
+ STVRXL STVRXL_VUS
+ void __builtin_vec_stvrxl (vus, signed long long, unsigned short *);
+ STVRXL STVRXL_US
+ void __builtin_vec_stvrxl (vp, signed long long, vp *);
+ STVRXL STVRXL_VP
+ void __builtin_vec_stvrxl (vbi, signed long long, vbi *);
+ STVRXL STVRXL_VBI
+ void __builtin_vec_stvrxl (vsi, signed long long, vsi *);
+ STVRXL STVRXL_VSI
+ void __builtin_vec_stvrxl (vsi, signed long long, signed int *);
+ STVRXL STVRXL_SI
+ void __builtin_vec_stvrxl (vui, signed long long, vui *);
+ STVRXL STVRXL_VUI
+ void __builtin_vec_stvrxl (vui, signed long long, unsigned int *);
+ STVRXL STVRXL_UI
+ void __builtin_vec_stvrxl (vf, signed long long, vf *);
+ STVRXL STVRXL_VF
+ void __builtin_vec_stvrxl (vf, signed long long, float *);
+ STVRXL STVRXL_F
+
+[VEC_STXVL, vec_xst_len, __builtin_vec_stxvl, _ARCH_PPC64_PWR9]
+ void __builtin_vec_stxvl (vsc, signed char *, unsigned long long);
+ STXVL STXVL_VSC
+ void __builtin_vec_stxvl (vuc, unsigned char *, unsigned long long);
+ STXVL STXVL_VUC
+ void __builtin_vec_stxvl (vss, signed short *, unsigned long long);
+ STXVL STXVL_VSS
+ void __builtin_vec_stxvl (vus, unsigned short *, unsigned long long);
+ STXVL STXVL_VUS
+ void __builtin_vec_stxvl (vsi, signed int *, unsigned long long);
+ STXVL STXVL_VSI
+ void __builtin_vec_stxvl (vui, unsigned int *, unsigned long long);
+ STXVL STXVL_VUI
+ void __builtin_vec_stxvl (vsll, signed long long *, unsigned long long);
+ STXVL STXVL_VSLL
+ void __builtin_vec_stxvl (vull, unsigned long long *, unsigned long long);
+ STXVL STXVL_VULL
+ void __builtin_vec_stxvl (vsq, signed __int128 *, unsigned long long);
+ STXVL STXVL_VSQ
+ void __builtin_vec_stxvl (vuq, unsigned __int128 *, unsigned long long);
+ STXVL STXVL_VUQ
+ void __builtin_vec_stxvl (vf, float *, unsigned long long);
+ STXVL STXVL_VF
+ void __builtin_vec_stxvl (vd, double *, unsigned long long);
+ STXVL STXVL_VD
+
+[VEC_SUB, vec_sub, __builtin_vec_sub]
+ vsc __builtin_vec_sub (vsc, vsc);
+ VSUBUBM VSUBUBM_VSC
+ vuc __builtin_vec_sub (vuc, vuc);
+ VSUBUBM VSUBUBM_VUC
+ vss __builtin_vec_sub (vss, vss);
+ VSUBUHM VSUBUHM_VSS
+ vus __builtin_vec_sub (vus, vus);
+ VSUBUHM VSUBUHM_VUS
+ vsi __builtin_vec_sub (vsi, vsi);
+ VSUBUWM VSUBUWM_VSI
+ vui __builtin_vec_sub (vui, vui);
+ VSUBUWM VSUBUWM_VUI
+ vsll __builtin_vec_sub (vsll, vsll);
+ VSUBUDM VSUBUDM_VSLL
+ vull __builtin_vec_sub (vull, vull);
+ VSUBUDM VSUBUDM_VULL
+ vsq __builtin_vec_sub (vsq, vsq);
+ VSUBUQM VSUBUQM_VSQ
+ vuq __builtin_vec_sub (vuq, vuq);
+ VSUBUQM VSUBUQM_VUQ
+ vf __builtin_vec_sub (vf, vf);
+ XVSUBSP
+ vd __builtin_vec_sub (vd, vd);
+ XVSUBDP
+
+[VEC_SUBC, vec_subc, __builtin_vec_subc]
+ vsi __builtin_vec_subc (vsi, vsi);
+ VSUBCUW VSUBCUW_VSI
+ vui __builtin_vec_subc (vui, vui);
+ VSUBCUW VSUBCUW_VUI
+ vsq __builtin_vec_subc (vsq, vsq);
+ VSUBCUQ VSUBCUQ_VSQ
+ vuq __builtin_vec_subc (vuq, vuq);
+ VSUBCUQ VSUBCUQ_VUQ
+
+[VEC_SUBS, vec_subs, __builtin_vec_subs]
+ vuc __builtin_vec_subs (vuc, vuc);
+ VSUBUBS
+ vsc __builtin_vec_subs (vsc, vsc);
+ VSUBSBS
+ vus __builtin_vec_subs (vus, vus);
+ VSUBUHS
+ vss __builtin_vec_subs (vss, vss);
+ VSUBSHS
+ vui __builtin_vec_subs (vui, vui);
+ VSUBUWS
+ vsi __builtin_vec_subs (vsi, vsi);
+ VSUBSWS
+
+[VEC_SUM2S, vec_sum2s, __builtin_vec_sum2s]
+ vsi __builtin_vec_sum2s (vsi, vsi);
+ VSUM2SWS
+
+[VEC_SUM4S, vec_sum4s, __builtin_vec_sum4s]
+ vui __builtin_vec_sum4s (vuc, vui);
+ VSUM4UBS
+ vsi __builtin_vec_sum4s (vsc, vui);
+ VSUM4SBS
+ vsi __builtin_vec_sum4s (vss, vsi);
+ VSUM4SHS
+
+[VEC_SUMS, vec_sums, __builtin_vec_sums]
+ vsi __builtin_vec_sums (vsi, vsi);
+ VSUMSWS
+
+[VEC_TERNARYLOGIC, vec_ternarylogic, __builtin_vec_xxeval, _ARCH_PWR10]
+ vuc __builtin_vec_xxeval (vuc, vuc, vuc, const int);
+ XXEVAL XXEVAL_VUC
+ vus __builtin_vec_xxeval (vus, vus, vus, const int);
+ XXEVAL XXEVAL_VUS
+ vui __builtin_vec_xxeval (vui, vui, vui, const int);
+ XXEVAL XXEVAL_VUI
+ vull __builtin_vec_xxeval (vull, vull, vull, const int);
+ XXEVAL XXEVAL_VULL
+ vuq __builtin_vec_xxeval (vuq, vuq, vuq, const int);
+ XXEVAL XXEVAL_VUQ
+
+[VEC_TEST_LSBB_ALL_ONES, vec_test_lsbb_all_ones, __builtin_vec_xvtlsbb_all_ones, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_ones (vuc);
+ XVTLSBB_ONES
+
+[VEC_TEST_LSBB_ALL_ZEROS, vec_test_lsbb_all_zeros, __builtin_vec_xvtlsbb_all_zeros, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_zeros (vuc);
+ XVTLSBB_ZEROS
+
+[VEC_TRUNC, vec_trunc, __builtin_vec_trunc]
+ vf __builtin_vec_trunc (vf);
+ XVRSPIZ
+ vd __builtin_vec_trunc (vd);
+ XVRDPIZ
+
+[VEC_TSTSFI_GT, SKIP, __builtin_dfp_dtstsfi_gt]
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal64);
+ TSTSFI_GT_DD
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal128);
+ TSTSFI_GT_TD
+
+[VEC_TSTSFI_EQ, SKIP, __builtin_dfp_dtstsfi_eq]
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal64);
+ TSTSFI_EQ_DD
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal128);
+ TSTSFI_EQ_TD
+
+[VEC_TSTSFI_LT, SKIP, __builtin_dfp_dtstsfi_lt]
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal64);
+ TSTSFI_LT_DD
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal128);
+ TSTSFI_LT_TD
+
+[VEC_TSTSFI_OV, SKIP, __builtin_dfp_dtstsfi_ov]
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal64);
+ TSTSFI_OV_DD
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal128);
+ TSTSFI_OV_TD
+
+[VEC_UNPACKH, vec_unpackh, __builtin_vec_unpackh]
+ vss __builtin_vec_unpackh (vsc);
+ VUPKHSB VUPKHSB_VSC
+ vbs __builtin_vec_unpackh (vbc);
+ VUPKHSB VUPKHSB_VBC
+ vsi __builtin_vec_unpackh (vss);
+ VUPKHSH VUPKHSH_VSS
+ vbi __builtin_vec_unpackh (vbs);
+ VUPKHSH VUPKHSH_VBS
+ vui __builtin_vec_unpackh (vp);
+ VUPKHPX
+ vsll __builtin_vec_unpackh (vsi);
+ VUPKHSW VUPKHSW_VSI
+ vbll __builtin_vec_unpackh (vbi);
+ VUPKHSW VUPKHSW_VBI
+ vd __builtin_vec_unpackh (vf);
+ DOUBLEH_V4SF VUPKHF
+
+[VEC_UNPACKL, vec_unpackl, __builtin_vec_unpackl]
+ vss __builtin_vec_unpackl (vsc);
+ VUPKLSB VUPKLSB_VSC
+ vbs __builtin_vec_unpackl (vbc);
+ VUPKLSB VUPKLSB_VBC
+ vsi __builtin_vec_unpackl (vss);
+ VUPKLSH VUPKLSH_VSS
+ vbi __builtin_vec_unpackl (vbs);
+ VUPKLSH VUPKLSH_VBS
+ vui __builtin_vec_unpackl (vp);
+ VUPKLPX
+ vsll __builtin_vec_unpackl (vsi);
+ VUPKLSW VUPKLSW_VSI
+ vbll __builtin_vec_unpackl (vbi);
+ VUPKLSW VUPKLSW_VBI
+ vd __builtin_vec_unpackl (vf);
+ DOUBLEL_V4SF VUPKLF
+
+[VEC_UNSIGNED, vec_unsigned, __builtin_vec_vunsigned]
+ vsi __builtin_vec_vunsigned (vf);
+ VEC_VUNSIGNED_V4SF
+ vsll __builtin_vec_vunsigned (vd);
+ VEC_VUNSIGNED_V2DF
+
+[VEC_UNSIGNED2, vec_unsigned2, __builtin_vec_vunsigned2]
+ vsi __builtin_vec_vunsigned2 (vd, vd);
+ VEC_VUNSIGNED2_V2DF
+
+[VEC_UNSIGNEDO, vec_unsignedo, __builtin_vec_vunsignedo]
+ vui __builtin_vec_vunsignedo (vd);
+ VEC_VUNSIGNEDO_V2DF
+
+; Not sure this should exist, but it does. This group is redundant with
+; vec_addec, but the next three don't have an alias.
+[VEC_VADDECUQ, vec_vaddecuq, __builtin_vec_vaddecuq]
+ vsq __builtin_vec_vaddecuq (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ2
+ vuq __builtin_vec_vaddecuq (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ2
+
+; Not sure this should exist, but it does.
+[VEC_VADDEUQM, vec_vaddeuqm, __builtin_vec_vaddeuqm]
+ vsq __builtin_vec_vaddeuqm (vsq, vsq, vsq);
+ VADDEUQM VADDEUQM_VSQ
+ vuq __builtin_vec_vaddeuqm (vuq, vuq, vuq);
+ VADDEUQM VADDEUQM_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBECUQ, vec_vsubecuq, __builtin_vec_vsubecuq]
+ vsq __builtin_vec_vsubecuq (vsq, vsq, vsq);
+ VSUBECUQ VSUBECUQ_VSQ
+ vuq __builtin_vec_vsubecuq (vuq, vuq, vuq);
+ VSUBECUQ VSUBECUQ_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBEUQM, vec_vsubeuqm, __builtin_vec_vsubeuqm]
+ vsq __builtin_vec_vsubeuqm (vsq, vsq, vsq);
+ VSUBEUQM VSUBEUQM_VSQ
+ vuq __builtin_vec_vsubeuqm (vuq, vuq, vuq);
+ VSUBEUQM VSUBEUQM_VUQ
+
+[VEC_VEE, vec_extract_exp, __builtin_vec_extract_exp, _ARCH_PWR9]
+ vui __builtin_vec_extract_exp (vf);
+ VEESP
+ vull __builtin_vec_extract_exp (vd);
+ VEEDP
+
+[VEC_VES, vec_extract_sig, __builtin_vec_extract_sig, _ARCH_PWR9]
+ vui __builtin_vec_extract_sig (vf);
+ VESSP
+ vull __builtin_vec_extract_sig (vd);
+ VESDP
+
+[VEC_VIE, vec_insert_exp, __builtin_vec_insert_exp, ARCH_PWR9]
+ vf __builtin_vec_insert_exp (vf, vui);
+ VIESP VIESP_VF
+ vf __builtin_vec_insert_exp (vui, vui);
+ VIESP VIESP_VUI
+ vd __builtin_vec_insert_exp (vd, vull);
+ VIEDP VIEDP_VD
+ vd __builtin_vec_insert_exp (vull, vull);
+ VIEDP VIEDP_VULL
+
+[VEC_VSCEEQ, scalar_cmp_exp_eq, __builtin_vec_scalar_cmp_exp_eq, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_eq (double, double);
+ VSCEDPEQ
+ signed int __builtin_vec_scalar_cmp_exp_eq (_Float128, _Float128);
+ VSCEQPEQ
+
+[VEC_VSCEGT, scalar_cmp_exp_gt, __builtin_vec_scalar_cmp_exp_gt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_gt (double, double);
+ VSCEDPGT
+ signed int __builtin_vec_scalar_cmp_exp_gt (_Float128, _Float128);
+ VSCEQPGT
+
+[VEC_VSCELT, scalar_cmp_exp_lt, __builtin_vec_scalar_cmp_exp_lt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_lt (double, double);
+ VSCEDPLT
+ signed int __builtin_vec_scalar_cmp_exp_lt (_Float128, _Float128);
+ VSCEQPLT
+
+[VEC_VSCEUO, scalar_cmp_exp_unordered, __builtin_vec_scalar_cmp_exp_unordered, ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_unordered (double, double);
+ VSCEDPUO
+ signed int __builtin_vec_scalar_cmp_exp_unordered (_Float128, _Float128);
+ VSCEQPUO
+
+[VEC_VSEE, scalar_extract_exp, __builtin_vec_scalar_extract_exp, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_exp (double);
+ VSEEDP
+ unsigned int __builtin_vec_scalar_extract_exp (_Float128);
+ VSEEQP
+
+[VEC_VSES, scalar_extract_sig, __builtin_vec_scalar_extract_sig, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_sig (double);
+ VSESDP
+ unsigned int __builtin_vec_scalar_extract_sig (_Float128);
+ VSESQP
+
+[VEC_VSIE, scalar_insert_exp, __builtin_vec_scalar_insert_exp, _ARCH_PWR9]
+ double __builtin_vec_scalar_insert_exp (unsigned int, unsigned int);
+ VSIEDP VSIEDP_UI
+ double __builtin_vec_scalar_insert_exp (double, unsigned int);
+ VSIEDP VSIEDP_D
+ _Float128 __builtin_vec_scalar_insert_exp (unsigned long long, unsigned long long);
+ VSIEQP VSIEQP_ULL
+ _Float128 __builtin_vec_scalar_insert_exp (_Float128, unsigned long long);
+ VSIEQP VSIEQP_F128
+
+[VEC_VSTDC, scalar_test_data_class, __builtin_vec_scalar_test_data_class, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_data_class (float, signed int);
+ VSTDCSP
+ bool __builtin_vec_scalar_test_data_class (double, signed int);
+ VSTDCDP
+ bool __builtin_vec_scalar_test_data_class (_Float128, signed int);
+ VSTDCQP
+
+[VEC_VSTDCN, scalar_test_neg, __builtin_vec_scalar_test_neg, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_neg (float);
+ VSTDCNSP
+ bool __builtin_vec_scalar_test_neg (double);
+ VSTDCNDP
+ bool __builtin_vec_scalar_test_neg (_Float128);
+ VSTDCNQP
+
+[VEC_VTDC, vec_test_data_class, __builtin_vec_test_data_class, _ARCH_PWR9]
+ vbi __builtin_vec_test_data_class (vf, signed int);
+ VTDCSP
+ vbll __builtin_vec_test_data_class (vd, signed int);
+ VTDCDP
+
+[VEC_XL, vec_xl, __builtin_vec_vsx_ld, __VSX__]
+ vsc __builtin_vec_vsx_ld (signed long long, vsc *);
+ LXVW4X_V4SI LXVW4X_VSC
+ vsc __builtin_vec_vsx_ld (signed long long, signed char *);
+ LXVW4X_V4SI LXVW4X_SC
+ vuc __builtin_vec_vsx_ld (signed long long, vuc *);
+ LXVW4X_V4SI LXVW4X_VUC
+ vuc __builtin_vec_vsx_ld (signed long long, unsigned char *);
+ LXVW4X_V4SI LXVW4X_UC
+ vss __builtin_vec_vsx_ld (signed long long, vss *);
+ LXVW4X_V4SI LXVW4X_VSS
+ vss __builtin_vec_vsx_ld (signed long long, signed short *);
+ LXVW4X_V4SI LXVW4X_SS
+ vus __builtin_vec_vsx_ld (signed long long, vus *);
+ LXVW4X_V4SI LXVW4X_VUS
+ vus __builtin_vec_vsx_ld (signed long long, unsigned short *);
+ LXVW4X_V4SI LXVW4X_US
+ vsi __builtin_vec_vsx_ld (signed long long, vsi *);
+ LXVW4X_V4SI LXVW4X_VSI
+ vsi __builtin_vec_vsx_ld (signed long long, signed int *);
+ LXVW4X_V4SI LXVW4X_SI
+ vui __builtin_vec_vsx_ld (signed long long, vui *);
+ LXVW4X_V4SI LXVW4X_VUI
+ vui __builtin_vec_vsx_ld (signed long long, unsigned int *);
+ LXVW4X_V4SI LXVW4X_UI
+ vsll __builtin_vec_vsx_ld (signed long long, vsll *);
+ LXVD2X_V2DI LXVD2X_VSLL
+ vsll __builtin_vec_vsx_ld (signed long long, signed long long *);
+ LXVD2X_V2DI LXVD2X_SLL
+ vull __builtin_vec_vsx_ld (signed long long, vull *);
+ LXVD2X_V2DI LXVD2X_VULL
+ vull __builtin_vec_vsx_ld (signed long long, unsigned long long *);
+ LXVD2X_V2DI LXVD2X_ULL
+ vsq __builtin_vec_vsx_ld (signed long long, vsq *);
+ LXVD2X_V1TI LXVD2X_VSQ
+ vsq __builtin_vec_vsx_ld (signed long long, signed __int128 *);
+ LXVD2X_V1TI LXVD2X_SQ
+ vuq __builtin_vec_vsx_ld (signed long long, unsigned __int128 *);
+ LXVD2X_V1TI LXVD2X_UQ
+ vf __builtin_vec_vsx_ld (signed long long, vf *);
+ LXVW4X_V4SF LXVW4X_VF
+ vf __builtin_vec_vsx_ld (signed long long, float *);
+ LXVW4X_V4SF LXVW4X_F
+ vd __builtin_vec_vsx_ld (signed long long, vd *);
+ LXVD2X_V2DF LXVD2X_VD
+ vd __builtin_vec_vsx_ld (signed long long, double *);
+ LXVD2X_V2DF LXVD2X_D
+
+[VEC_XL_BE, vec_xl_be, __builtin_vec_xl_be, __VSX__]
+ vsc __builtin_vec_xl_be (signed long long, vsc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VSC
+ vsc __builtin_vec_xl_be (signed long long, signed char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_SC
+ vuc __builtin_vec_xl_be (signed long long, vuc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VUC
+ vuc __builtin_vec_xl_be (signed long long, unsigned char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_UC
+ vss __builtin_vec_xl_be (signed long long, vss *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VSS
+ vss __builtin_vec_xl_be (signed long long, signed short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_SS
+ vus __builtin_vec_xl_be (signed long long, vus *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VUS
+ vus __builtin_vec_xl_be (signed long long, unsigned short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_US
+ vsi __builtin_vec_xl_be (signed long long, vsi *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VSI
+ vsi __builtin_vec_xl_be (signed long long, signed int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_SI
+ vui __builtin_vec_xl_be (signed long long, vui *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VUI
+ vui __builtin_vec_xl_be (signed long long, unsigned int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_UI
+ vsll __builtin_vec_xl_be (signed long long, vsll *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VSLL
+ vsll __builtin_vec_xl_be (signed long long, signed long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_SLL
+ vull __builtin_vec_xl_be (signed long long, vull *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VULL
+ vull __builtin_vec_xl_be (signed long long, unsigned long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_ULL
+ vsq __builtin_vec_xl_be (signed long long, signed __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_SQ
+ vuq __builtin_vec_xl_be (signed long long, unsigned __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_UQ
+ vf __builtin_vec_xl_be (signed long long, vf *);
+ LD_ELEMREV_V4SF LD_ELEMREV_VF
+ vf __builtin_vec_xl_be (signed long long, float *);
+ LD_ELEMREV_V4SF LD_ELEMREV_F
+ vd __builtin_vec_xl_be (signed long long, vd *);
+ LD_ELEMREV_V2DF LD_ELEMREV_VD
+ vd __builtin_vec_xl_be (signed long long, double *);
+ LD_ELEMREV_V2DF LD_ELEMREV_DD
+
+[VEC_XL_LEN_R, vec_xl_len_r, __builtin_vec_xl_len_r, _ARCH_PPC64_PWR9]
+ vuc __builtin_vsx_xl_len_r (unsigned char *, unsigned long long);
+ XL_LEN_R
+
+[VEC_XL_SEXT, vec_xl_sext, __builtin_vec_xl_sext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_sext (signed long long, signed char *);
+ SE_LXVRBX
+ vsq __builtin_vec_xl_sext (signed long long, signed short *);
+ SE_LXVRHX
+ vsq __builtin_vec_xl_sext (signed long long, signed int *);
+ SE_LXVRWX
+ vsq __builtin_vec_xl_sext (signed long long, signed long long *);
+ SE_LXVRDX
+
+[VEC_XL_ZEXT, vec_xl_zext, __builtin_vec_xl_zext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_zext (signed long long, signed char *);
+ ZE_LXVRBX
+ vsq __builtin_vec_xl_zext (signed long long, signed short *);
+ ZE_LXVRHX
+ vsq __builtin_vec_xl_zext (signed long long, signed int *);
+ ZE_LXVRWX
+ vsq __builtin_vec_xl_zext (signed long long, signed long long *);
+ ZE_LXVRDX
+
+[VEC_XOR, vec_xor, __builtin_vec_xor]
+ vsc __builtin_vec_xor (vsc, vsc);
+ VXOR_V16QI
+ vuc __builtin_vec_xor (vuc, vuc);
+ VXOR_V16QI_UNS VXOR_VUC
+ vbc __builtin_vec_xor (vbc, vbc);
+ VXOR_V16QI_UNS VXOR_VBC
+ vss __builtin_vec_xor (vss, vss);
+ VXOR_V8HI
+ vus __builtin_vec_xor (vus, vus);
+ VXOR_V8HI_UNS VXOR_VUS
+ vbs __builtin_vec_xor (vbs, vbs);
+ VXOR_V8HI_UNS VXOR_VBS
+ vsi __builtin_vec_xor (vsi, vsi);
+ VXOR_V4SI
+ vui __builtin_vec_xor (vui, vui);
+ VXOR_V4SI_UNS VXOR_VUI
+ vbi __builtin_vec_xor (vbi, vbi);
+ VXOR_V4SI_UNS VXOR_VBI
+ vsll __builtin_vec_xor (vsll, vsll);
+ VXOR_V2DI
+ vull __builtin_vec_xor (vull, vull);
+ VXOR_V2DI_UNS VXOR_VULL
+ vbll __builtin_vec_xor (vbll, vbll);
+ VXOR_V2DI_UNS VXOR_VBLL
+ vf __builtin_vec_xor (vf, vf);
+ VXOR_V4SF
+ vd __builtin_vec_xor (vd, vd);
+ VXOR_V2DF
+
+[VEC_XST, vec_xst, __builtin_vec_vsx_st, __VSX__]
+ void __builtin_vec_vsx_st (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC
+ void __builtin_vec_vsx_st (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC
+ void __builtin_vec_vsx_st (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC
+ void __builtin_vec_vsx_st (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC
+ void __builtin_vec_vsx_st (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC
+ void __builtin_vec_vsx_st (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S
+ void __builtin_vec_vsx_st (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U
+ void __builtin_vec_vsx_st (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS
+ void __builtin_vec_vsx_st (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS
+ void __builtin_vec_vsx_st (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS
+ void __builtin_vec_vsx_st (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US
+ void __builtin_vec_vsx_st (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS
+ void __builtin_vec_vsx_st (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S
+ void __builtin_vec_vsx_st (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U
+ void __builtin_vec_vsx_st (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP
+ void __builtin_vec_vsx_st (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI
+ void __builtin_vec_vsx_st (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI
+ void __builtin_vec_vsx_st (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI
+ void __builtin_vec_vsx_st (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI
+ void __builtin_vec_vsx_st (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI
+ void __builtin_vec_vsx_st (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S
+ void __builtin_vec_vsx_st (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U
+ void __builtin_vec_vsx_st (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL
+ void __builtin_vec_vsx_st (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL
+ void __builtin_vec_vsx_st (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL
+ void __builtin_vec_vsx_st (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL
+ void __builtin_vec_vsx_st (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL
+ void __builtin_vec_vsx_st (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF
+ void __builtin_vec_vsx_st (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F
+ void __builtin_vec_vsx_st (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD
+ void __builtin_vec_vsx_st (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D
+
+[VEC_XST_BE, vec_xst_be, __builtin_vec_xst_be, __VSX__]
+ void __builtin_vec_xst_be (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC_BE
+ void __builtin_vec_xst_be (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S_BE
+ void __builtin_vec_xst_be (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U_BE
+ void __builtin_vec_xst_be (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS_BE
+ void __builtin_vec_xst_be (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS_BE
+ void __builtin_vec_xst_be (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS_BE
+ void __builtin_vec_xst_be (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US_BE
+ void __builtin_vec_xst_be (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS_BE
+ void __builtin_vec_xst_be (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S_BE
+ void __builtin_vec_xst_be (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U_BE
+ void __builtin_vec_xst_be (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP_BE
+ void __builtin_vec_xst_be (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI_BE
+ void __builtin_vec_xst_be (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI_BE
+ void __builtin_vec_xst_be (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI_BE
+ void __builtin_vec_xst_be (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S_BE
+ void __builtin_vec_xst_be (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U_BE
+ void __builtin_vec_xst_be (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL_BE
+ void __builtin_vec_xst_be (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL_BE
+ void __builtin_vec_xst_be (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL_BE
+ void __builtin_vec_xst_be (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL_BE
+ void __builtin_vec_xst_be (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL_BE
+ void __builtin_vec_xst_be (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF_BE
+ void __builtin_vec_xst_be (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F_BE
+ void __builtin_vec_xst_be (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD_BE
+ void __builtin_vec_xst_be (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D_BE
+
+[VEC_XST_LEN_R, vec_xst_len_r, __builtin_vec_xst_len_r, _ARCH_PPC64_PWR9]
+ void __builtin_vsx_xst_len_r (vuc, unsigned char *, unsigned long long);
+ XST_LEN_R
+
+[VEC_XST_TRUNC, vec_xst_trunc, __builtin_vec_xst_trunc, _ARCH_PWR10]
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed char *);
+ TR_STXVRBX TR_STXVRBX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned char *);
+ TR_STXVRBX TR_STXVRBX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed short *);
+ TR_STXVRHX TR_STXVRHX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned short *);
+ TR_STXVRHX TR_STXVRHX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed int *);
+ TR_STXVRWX TR_STXVRWX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned int *);
+ TR_STXVRWX TR_STXVRWX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed long long *);
+ TR_STXVRDX TR_STXVRDX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned long long *);
+ TR_STXVRDX TR_STXVRDX_U
+
+[VEC_XXPERMDI, vec_xxpermdi, __builtin_vsx_xxpermdi, __VSX__]
+ vsc __builtin_vsx_xxpermdi (vsc, vsc, const int);
+ XXPERMDI_16QI XXPERMDI_VSC
+ vuc __builtin_vsx_xxpermdi (vuc, vuc, const int);
+ XXPERMDI_16QI XXPERMDI_VUC
+ vss __builtin_vsx_xxpermdi (vss, vss, const int);
+ XXPERMDI_8HI XXPERMDI_VSS
+ vus __builtin_vsx_xxpermdi (vus, vus, const int);
+ XXPERMDI_8HI XXPERMDI_VUS
+ vsi __builtin_vsx_xxpermdi (vsi, vsi, const int);
+ XXPERMDI_4SI XXPERMDI_VSI
+ vui __builtin_vsx_xxpermdi (vui, vui, const int);
+ XXPERMDI_4SI XXPERMDI_VUI
+ vsll __builtin_vsx_xxpermdi (vsll, vsll, const int);
+ XXPERMDI_2DI XXPERMDI_VSLL
+ vull __builtin_vsx_xxpermdi (vull, vull, const int);
+ XXPERMDI_2DI XXPERMDI_VULL
+ vf __builtin_vsx_xxpermdi (vf, vf, const int);
+ XXPERMDI_4SF XXPERMDI_VF
+ vd __builtin_vsx_xxpermdi (vd, vd, const int);
+ XXPERMDI_2DF XXPERMDI_VD
+
+[VEC_XXSLDWI, vec_xxsldwi, __builtin_vsx_xxsldwi, __VSX__]
+ vsc __builtin_vsx_xxsldwi (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC2
+ vuc __builtin_vsx_xxsldwi (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC2
+ vss __builtin_vsx_xxsldwi (vss, vss, const int);
+ XXSLDWI_8HI XXSLDWI_VSS2
+ vus __builtin_vsx_xxsldwi (vus, vus, const int);
+ XXSLDWI_8HI XXSLDWI_VUS2
+ vsi __builtin_vsx_xxsldwi (vsi, vsi, const int);
+ XXSLDWI_4SI XXSLDWI_VSI2
+ vui __builtin_vsx_xxsldwi (vui, vui, const int);
+ XXSLDWI_4SI XXSLDWI_VUI2
+ vsll __builtin_vsx_xxsldwi (vsll, vsll, const int);
+ XXSLDWI_2DI XXSLDWI_VSLL2
+ vull __builtin_vsx_xxsldwi (vull, vull, const int);
+ XXSLDWI_2DI XXSLDWI_VULL2
+ vf __builtin_vsx_xxsldwi (vf, vf, const int);
+ XXSLDWI_4SF XXSLDWI_VF2
+ vd __builtin_vsx_xxsldwi (vd, vd, const int);
+ XXSLDWI_2DF XXSLDWI_VD2
^ permalink raw reply [flat|nested] 4+ messages in thread
* [gcc(refs/users/wschmidt/heads/builtins4)] rs6000: Add remaining overloads
@ 2020-12-16 18:07 William Schmidt
0 siblings, 0 replies; 4+ messages in thread
From: William Schmidt @ 2020-12-16 18:07 UTC (permalink / raw)
To: gcc-cvs
https://gcc.gnu.org/g:45551b163cfd17bcf0878a2e531f43f3f521a098
commit 45551b163cfd17bcf0878a2e531f43f3f521a098
Author: Bill Schmidt <wschmidt@linux.ibm.com>
Date: Mon Nov 2 09:55:43 2020 -0500
rs6000: Add remaining overloads
2020-11-02 Bill Schmidt <wschmidt@linux.ibm.com>
* config/rs6000/rs6000-overload.def: Add remaining overloads.
Diff:
---
gcc/config/rs6000/rs6000-overload.def | 3628 +++++++++++++++++++++++++++++++++
1 file changed, 3628 insertions(+)
diff --git a/gcc/config/rs6000/rs6000-overload.def b/gcc/config/rs6000/rs6000-overload.def
index 7c28cdcb84c..82037644157 100644
--- a/gcc/config/rs6000/rs6000-overload.def
+++ b/gcc/config/rs6000/rs6000-overload.def
@@ -74,8 +74,3636 @@
; a semicolon are also treated as blank lines.
+[CRYPTO_PERMXOR, SKIP, __builtin_crypto_vpermxor]
+ vuc __builtin_crypto_vpermxor (vuc, vuc, vuc);
+ VPERMXOR_V16QI
+ vus __builtin_crypto_vpermxor (vus, vus, vus);
+ VPERMXOR_V8HI
+ vui __builtin_crypto_vpermxor (vui, vui, vui);
+ VPERMXOR_V4SI
+ vull __builtin_crypto_vpermxor (vull, vull, vull);
+ VPERMXOR_V2DI
+
+[CRYPTO_PMSUM, SKIP, __builtin_crypto_vpmsum]
+ vuc __builtin_crypto_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_C
+ vus __builtin_crypto_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_C
+ vui __builtin_crypto_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_C
+ vull __builtin_crypto_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_C
+
+[SCAL_CMPB, SKIP, __builtin_cmpb]
+ unsigned int __builtin_cmpb (unsigned int, unsigned int);
+ CMPB_32
+ unsigned long long __builtin_cmpb (unsigned long long, unsigned long long);
+ CMPB
+
[VEC_ABS, vec_abs, __builtin_vec_abs]
vsc __builtin_vec_abs (vsc);
ABS_V16QI
vss __builtin_vec_abs (vss);
ABS_V8HI
+ vsi __builtin_vec_abs (vsi);
+ ABS_V4SI
+ vsll __builtin_vec_abs (vsll);
+ ABS_V2DI
+ vf __builtin_vec_abs (vf);
+ ABS_V4SF
+ vd __builtin_vec_abs (vd);
+ XVABSDP
+
+[VEC_ABSD, vec_absd, __builtin_vec_vadu, _ARCH_PWR9]
+ vuc __builtin_vec_vadu (vuc, vuc);
+ VADUB
+ vus __builtin_vec_vadu (vus, vus);
+ VADUH
+ vui __builtin_vec_vadu (vui, vui);
+ VADUW
+
+[VEC_ABSS, vec_abss, __builtin_vec_abss]
+ vsc __builtin_vec_abss (vsc);
+ ABSS_V16QI
+ vss __builtin_vec_abss (vss);
+ ABSS_V8HI
+ vsi __builtin_vec_abss (vsi);
+ ABSS_V4SI
+
+[VEC_ADD, vec_add, __builtin_vec_add]
+ vsc __builtin_vec_add (vsc, vsc);
+ VADDUBM VADDUBM_VSC
+ vuc __builtin_vec_add (vuc, vuc);
+ VADDUBM VADDUBM_VUC
+ vss __builtin_vec_add (vss, vss);
+ VADDUHM VADDUHM_VSS
+ vus __builtin_vec_add (vus, vus);
+ VADDUHM VADDUHM_VUS
+ vsi __builtin_vec_add (vsi, vsi);
+ VADDUWM VADDUWM_VSI
+ vui __builtin_vec_add (vui, vui);
+ VADDUWM VADDUWM_VUI
+ vsll __builtin_vec_add (vsll, vsll);
+ VADDUDM VADDUDM_VSLL
+ vull __builtin_vec_add (vull, vull);
+ VADDUDM VADDUDM_VULL
+ vsq __builtin_vec_add (vsq, vsq);
+ VADDUQM VADDUQM_VSQ
+ vuq __builtin_vec_add (vuq, vuq);
+ VADDUQM VADDUQM_VUQ
+ vf __builtin_vec_add (vf, vf);
+ XVADDSP
+ vd __builtin_vec_add (vd, vd);
+ XVADDDP
+
+[VEC_ADDC, vec_addc, __builtin_vec_addc]
+ vsi __builtin_vec_addc (vsi, vsi);
+ VADDCUW VADDCUW_VSI
+ vui __builtin_vec_addc (vui, vui);
+ VADDCUW VADDCUW_VUI
+ vsq __builtin_vec_addc (vsq, vsq);
+ VADDCUQ VADDCUQ_VSQ
+ vuq __builtin_vec_addc (vuq, vuq);
+ VADDCUQ VADDCUQ_VUQ
+
+; TODO: Note that the entry for VEC_ADDEC currently gets ignored in
+; altivec_resolve_overloaded_builtin. There are also forms for
+; vsi and vui arguments, but rather than building a define_expand
+; for the instruction sequence generated for those, we do some RTL
+; hackery. Revisit whether we can remove that. For now, keep this
+; much of the entry here to generate the #define, at least.
+[VEC_ADDEC, vec_addec, __builtin_vec_addec]
+ vsq __builtin_vec_addec (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ
+ vuq __builtin_vec_addec (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ
+
+[VEC_ADDS, vec_adds, __builtin_vec_adds]
+ vuc __builtin_vec_adds (vuc, vuc);
+ VADDUBS
+ vsc __builtin_vec_adds (vsc, vsc);
+ VADDSBS
+ vus __builtin_vec_adds (vus, vus);
+ VADDUHS
+ vss __builtin_vec_adds (vss, vss);
+ VADDSHS
+ vui __builtin_vec_adds (vui, vui);
+ VADDUWS
+ vsi __builtin_vec_adds (vsi, vsi);
+ VADDSWS
+
+[VEC_ANDC, vec_andc, __builtin_vec_andc]
+ vbc __builtin_vec_andc (vbc, vbc);
+ VANDC_V16QI_UNS VANDC_VBC
+ vsc __builtin_vec_andc (vsc, vsc);
+ VANDC_V16QI
+ vuc __builtin_vec_andc (vuc, vuc);
+ VANDC_V16QI_UNS VANDC_VUC
+ vbs __builtin_vec_andc (vbs, vbs);
+ VANDC_V8HI_UNS VANDC_VBS
+ vss __builtin_vec_andc (vss, vss);
+ VANDC_V8HI
+ vus __builtin_vec_andc (vus, vus);
+ VANDC_V8HI_UNS VANDC_VUS
+ vbi __builtin_vec_andc (vbi, vbi);
+ VANDC_V4SI_UNS VANDC_VBI
+ vsi __builtin_vec_andc (vsi, vsi);
+ VANDC_V4SI
+ vui __builtin_vec_andc (vui, vui);
+ VANDC_V4SI_UNS VANDC_VUI
+ vbll __builtin_vec_andc (vbll, vbll);
+ VANDC_V4SI_UNS VANDC_VBLL
+ vsll __builtin_vec_andc (vsll, vsll);
+ VANDC_V2DI
+ vull __builtin_vec_andc (vull, vull);
+ VANDC_V2DI_UNS VANDC_VULL
+ vf __builtin_vec_andc (vf, vf);
+ VANDC_V4SF
+ vd __builtin_vec_andc (vd, vd);
+ VANDC_V2DF
+
+[VEC_AVG, vec_avg, __builtin_vec_avg]
+ vsc __builtin_vec_avg (vsc, vsc);
+ VAVGSB
+ vuc __builtin_vec_avg (vuc, vuc);
+ VAVGUB
+ vss __builtin_vec_avg (vss, vss);
+ VAVGSH
+ vus __builtin_vec_avg (vus, vus);
+ VAVGUH
+ vsi __builtin_vec_avg (vsi, vsi);
+ VAVGSW
+ vui __builtin_vec_avg (vui, vui);
+ VAVGUW
+
+[VEC_BLENDV, vec_blendv, __builtin_vec_xxblend, _ARCH_PWR10]
+ vsc __builtin_vec_xxblend (vsc, vsc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VSC
+ vuc __builtin_vec_xxblend (vuc, vuc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VUC
+ vss __builtin_vec_xxblend (vss, vss, vus);
+ VXXBLEND_V8HI VXXBLEND_VSS
+ vus __builtin_vec_xxblend (vus, vus, vus);
+ VXXBLEND_V8HI VXXBLEND_VUS
+ vsi __builtin_vec_xxblend (vsi, vsi, vui);
+ VXXBLEND_V4SI VXXBLEND_VSI
+ vui __builtin_vec_xxblend (vui, vui, vui);
+ VXXBLEND_V4SI VXXBLEND_VUI
+ vsll __builtin_vec_xxblend (vsll, vsll, vull);
+ VXXBLEND_V2DI VXXBLEND_VSLL
+ vull __builtin_vec_xxblend (vull, vull, vull);
+ VXXBLEND_V2DI VXXBLEND_VULL
+ vf __builtin_vec_xxblend (vf, vf, vui);
+ VXXBLEND_V4SF
+ vd __builtin_vec_xxblend (vd, vd, vull);
+ VXXBLEND_V2DF
+
+[VEC_BPERM, vec_bperm, __builtin_vec_vbperm_api, _ARCH_PWR8]
+ vull __builtin_vec_vbperm_api (vull, vuc);
+ VBPERMD VBPERMD_VULL
+ vull __builtin_vec_vbperm_api (vuq, vuc);
+ VBPERMD VBPERMD_VUQ
+ vuc __builtin_vec_vbperm_api (vuc, vuc);
+ VBPERMQ2
+
+[VEC_CEIL, vec_ceil, __builtin_vec_ceil]
+ vf __builtin_vec_ceil (vf);
+ XVRSPIP
+ vd __builtin_vec_ceil (vd);
+ XVRDPIP
+
+[VEC_CFUGE, vec_cfuge, __builtin_vec_cfuge, _ARCH_PWR10]
+ vull __builtin_vec_cfuge (vull, vull);
+ VCFUGED
+
+[VEC_CIPHER_BE, vec_cipher_be, __builtin_vec_vcipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipher_be (vuc, vuc);
+ VCIPHER_BE
+
+[VEC_CIPHERLAST_BE, vec_cipherlast_be, __builtin_vec_vcipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipherlast_be (vuc, vuc);
+ VCIPHERLAST_BE
+
+[VEC_CLRL, vec_clrl, __builtin_vec_clrl, _ARCH_PWR10]
+ vsc __builtin_vec_clrl (vsc, unsigned int);
+ VCLRLB VCLRLB_S
+ vuc __builtin_vec_clrl (vuc, unsigned int);
+ VCLRLB VCLRLB_U
+
+[VEC_CLRR, vec_clrr, __builtin_vec_clrr, ARCH_PWR10]
+ vsc __builtin_vec_clrr (vsc, unsigned int);
+ VCLRRB VCLRRB_S
+ vuc __builtin_vec_clrr (vuc, unsigned int);
+ VCLRRB VCLRRB_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPAE_P, SKIP, __builtin_vec_vcmpae_p]
+ signed int __builtin_vec_vcmpae_p (vsc, vsc);
+ VCMPAEB_P VCMPAEB_VSC_P
+ signed int __builtin_vec_vcmpae_p (vuc, vuc);
+ VCMPAEB_P VCMPAEB_VUC_P
+ signed int __builtin_vec_vcmpae_p (vbc, vbc);
+ VCMPAEB_P VCMPAEB_VBC_P
+ signed int __builtin_vec_vcmpae_p (vss, vss);
+ VCMPAEH_P VCMPAEH_VSS_P
+ signed int __builtin_vec_vcmpae_p (vus, vus);
+ VCMPAEH_P VCMPAEH_VUS_P
+ signed int __builtin_vec_vcmpae_p (vbs, vbs);
+ VCMPAEH_P VCMPAEH_VBS_P
+ signed int __builtin_vec_vcmpae_p (vp, vp);
+ VCMPAEH_P VCMPAEH_VP_P
+ signed int __builtin_vec_vcmpae_p (vsi, vsi);
+ VCMPAEW_P VCMPAEW_VSI_P
+ signed int __builtin_vec_vcmpae_p (vui, vui);
+ VCMPAEW_P VCMPAEW_VUI_P
+ signed int __builtin_vec_vcmpae_p (vbi, vbi);
+ VCMPAEW_P VCMPAEW_VBI_P
+ signed int __builtin_vec_vcmpae_p (vsll, vsll);
+ VCMPAED_P VCMPAED_VSLL_P
+ signed int __builtin_vec_vcmpae_p (vull, vull);
+ VCMPAED_P VCMPAED_VULL_P
+ signed int __builtin_vec_vcmpae_p (vbll, vbll);
+ VCMPAED_P VCMPAED_VBLL_P
+ signed int __builtin_vec_vcmpae_p (vf, vf);
+ VCMPAEFP_P
+ signed int __builtin_vec_vcmpae_p (vd, vd);
+ VCMPAEDP_P
+
+[VEC_CMPB, vec_cmpb, __builtin_vec_cmpb]
+ vsi __builtin_vec_cmpb (vf, vf);
+ VCMPBFP
+
+[VEC_CMPEQ, vec_cmpeq, __builtin_vec_cmpeq]
+ vbc __builtin_vec_cmpeq (vsc, vsc);
+ VCMPEQUB VCMPEQUB_VSC
+ vbc __builtin_vec_cmpeq (vuc, vuc);
+ VCMPEQUB VCMPEQUB_VUC
+ vbc __builtin_vec_cmpeq (vbc, vbc);
+ VCMPEQUB VCMPEQUB_VBC
+ vbs __builtin_vec_cmpeq (vss, vss);
+ VCMPEQUH VCMPEQUH_VSS
+ vbs __builtin_vec_cmpeq (vus, vus);
+ VCMPEQUH VCMPEQUH_VUS
+ vbs __builtin_vec_cmpeq (vbs, vbs);
+ VCMPEQUH VCMPEQUH_VBS
+ vbi __builtin_vec_cmpeq (vsi, vsi);
+ VCMPEQUW VCMPEQUW_VSI
+ vbi __builtin_vec_cmpeq (vui, vui);
+ VCMPEQUW VCMPEQUW_VUI
+ vbi __builtin_vec_cmpeq (vbi, vbi);
+ VCMPEQUW VCMPEQUW_VBI
+ vbll __builtin_vec_cmpeq (vsll, vsll);
+ VCMPEQUD VCMPEQUD_VSLL
+ vbll __builtin_vec_cmpeq (vull, vull);
+ VCMPEQUD VCMPEQUD_VULL
+ vbll __builtin_vec_cmpeq (vbll, vbll);
+ VCMPEQUD VCMPEQUD_VBLL
+ vbi __builtin_vec_cmpeq (vf, vf);
+ XVCMPEQSP
+ vbll __builtin_vec_cmpeq (vd, vd);
+ XVCMPEQDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPEQ_P, SKIP, __builtin_vec_vcmpeq_p]
+ signed int __builtin_vec_vcmpeq_p (signed int, vuc, vuc);
+ VCMPEQUB_P VCMPEQUB_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsc, vsc);
+ VCMPEQUB_P VCMPEQUB_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbc, vbc);
+ VCMPEQUB_P VCMPEQUB_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vus, vus);
+ VCMPEQUH_P VCMPEQUH_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vss, vss);
+ VCMPEQUH_P VCMPEQUH_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbs, vbs);
+ VCMPEQUH_P VCMPEQUH_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vp, vp);
+ VCMPEQUH_P VCMPEQUH_PP
+ signed int __builtin_vec_vcmpeq_p (signed int, vui, vui);
+ VCMPEQUW_P VCMPEQUW_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsi, vsi);
+ VCMPEQUW_P VCMPEQUW_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbi, vbi);
+ VCMPEQUW_P VCMPEQUW_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vull, vull);
+ VCMPEQUD_P VCMPEQUD_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsll, vsll);
+ VCMPEQUD_P VCMPEQUD_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbll, vbll);
+ VCMPEQUD_P VCMPEQUD_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vf, vf);
+ XVCMPEQSP_P
+ signed int __builtin_vec_vcmpeq_p (signed int, vd, vd);
+ XVCMPEQDP_P
+
+[VEC_CMPEQB, SKIP, __builtin_byte_in_set]
+ signed int __builtin_byte_in_set (unsigned char, unsigned long long);
+ CMPEQB
+
+[VEC_CMPGE, vec_cmpge, __builtin_vec_cmpge]
+ vbc __builtin_vec_cmpge (vsc, vsc);
+ CMPGE_16QI CMPGE_16QI_VSC
+ vbc __builtin_vec_cmpge (vuc, vuc);
+ CMPGE_16QI CMPGE_16QI_VUC
+ vbs __builtin_vec_cmpge (vss, vss);
+ CMPGE_8HI CMPGE_8HI_VSS
+ vbs __builtin_vec_cmpge (vus, vus);
+ CMPGE_8HI CMPGE_8HI_VUS
+ vbi __builtin_vec_cmpge (vsi, vsi);
+ CMPGE_4SI CMPGE_4SI_VSI
+ vbi __builtin_vec_cmpge (vui, vui);
+ CMPGE_4SI CMPGE_4SI_VUI
+ vbll __builtin_vec_cmpge (vsll, vsll);
+ CMPGE_2DI CMPGE_2DI_VSLL
+ vbll __builtin_vec_cmpge (vull, vull);
+ CMPGE_2DI CMPGE_2DI_VULL
+ vbi __builtin_vec_cmpge (vf, vf);
+ XVCMPGESP
+ vbll __builtin_vec_cmpge (vd, vd);
+ XVCMPGEDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+; Also, cmpge is the same as cmpgt for all cases except floating point.
+; There is further code to deal with this special case in
+; altivec_build_resolved_builtin. TODO: Make sure this is still true.
+[VEC_CMPGE_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P VCMPGTUB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P VCMPGTSB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P VCMPGTUH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P VCMPGTSH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P VCMPGTUW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P VCMPGTSW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P VCMPGTUD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P VCMPGTSD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGESP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGEDP_P
+
+[VEC_CMPGT, vec_cmpgt, __builtin_vec_cmpgt]
+ vbc __builtin_vec_cmpgt (vsc, vsc);
+ VCMPGTSB
+ vbc __builtin_vec_cmpgt (vuc, vuc);
+ VCMPGTUB
+ vbs __builtin_vec_cmpgt (vss, vss);
+ VCMPGTSH
+ vbs __builtin_vec_cmpgt (vus, vus);
+ VCMPGTUH
+ vbi __builtin_vec_cmpgt (vsi, vsi);
+ VCMPGTSW
+ vbi __builtin_vec_cmpgt (vui, vui);
+ VCMPGTUW
+ vbll __builtin_vec_cmpgt (vsll, vsll);
+ VCMPGTSD
+ vbll __builtin_vec_cmpgt (vull, vull);
+ VCMPGTUD
+ vbi __builtin_vec_cmpgt (vf, vf);
+ XVCMPGTSP
+ vbll __builtin_vec_cmpgt (vd, vd);
+ XVCMPGTDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPGT_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGTSP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGTDP_P
+
+; Note that there is no entry for VEC_CMPLE. VEC_CMPLE is implemented
+; using VEC_CMPGE with reversed arguments in altivec.h.
+
+; Note that there is no entry for VEC_CMPLT. VEC_CMPLT is implemented
+; using VEC_CMPGT with reversed arguments in altivec.h.
+
+[VEC_CMPNE, vec_cmpne, __builtin_vec_cmpne]
+ vbc __builtin_vec_cmpne (vbc, vbc);
+ VCMPNEB VCMPNEB_VBC
+ vbc __builtin_vec_cmpne (vsc, vsc);
+ VCMPNEB VCMPNEB_VSC
+ vbc __builtin_vec_cmpne (vuc, vuc);
+ VCMPNEB VCMPNEB_VUC
+ vbs __builtin_vec_cmpne (vbs, vbs);
+ VCMPNEH VCMPNEH_VBS
+ vbs __builtin_vec_cmpne (vss, vss);
+ VCMPNEH VCMPNEH_VSS
+ vbs __builtin_vec_cmpne (vus, vus);
+ VCMPNEH VCMPNEH_VUS
+ vbi __builtin_vec_cmpne (vbi, vbi);
+ VCMPNEW VCMPNEW_VBI
+ vbi __builtin_vec_cmpne (vsi, vsi);
+ VCMPNEW VCMPNEW_VSI
+ vbi __builtin_vec_cmpne (vui, vui);
+ VCMPNEW VCMPNEW_VUI
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNE_P, SKIP, __builtin_vec_vcmpne_p]
+ signed int __builtin_vec_vcmpne_p (vsc, vsc);
+ VCMPNEB_P VCMPNEB_VSC_P
+ signed int __builtin_vec_vcmpne_p (vuc, vuc);
+ VCMPNEB_P VCMPNEB_VUC_P
+ signed int __builtin_vec_vcmpne_p (vbc, vbc);
+ VCMPNEB_P VCMPNEB_VBC_P
+ signed int __builtin_vec_vcmpne_p (vss, vss);
+ VCMPNEH_P VCMPNEH_VSS_P
+ signed int __builtin_vec_vcmpne_p (vus, vus);
+ VCMPNEH_P VCMPNEH_VUS_P
+ signed int __builtin_vec_vcmpne_p (vbs, vbs);
+ VCMPNEH_P VCMPNEH_VBS_P
+ signed int __builtin_vec_vcmpne_p (vp, vp);
+ VCMPNEH_P VCMPNEH_VP_P
+ signed int __builtin_vec_vcmpne_p (vsi, vsi);
+ VCMPNEW_P VCMPNEW_VSI_P
+ signed int __builtin_vec_vcmpne_p (vui, vui);
+ VCMPNEW_P VCMPNEW_VUI_P
+ signed int __builtin_vec_vcmpne_p (vbi, vbi);
+ VCMPNEW_P VCMPNEW_VBI_P
+ signed int __builtin_vec_vcmpne_p (vsll, vsll);
+ VCMPNED_P VCMPNED_VSLL_P
+ signed int __builtin_vec_vcmpne_p (vull, vull);
+ VCMPNED_P VCMPNED_VULL_P
+ signed int __builtin_vec_vcmpne_p (vbll, vbll);
+ VCMPNED_P VCMPNED_VBLL_P
+ signed int __builtin_vec_vcmpne_p (vf, vf);
+ VCMPNEFP_P
+ signed int __builtin_vec_vcmpne_p (vd, vd);
+ VCMPNEDP_P
+
+[VEC_CMPNEZ, vec_cmpnez, __builtin_vec_cmpnez, _ARCH_PWR9]
+ vbc __builtin_vec_cmpnez (vsc, vsc);
+ CMPNEZB CMPNEZB_S
+ vbc __builtin_vec_cmpnez (vuc, vuc);
+ CMPNEZB CMPNEZB_U
+ vbs __builtin_vec_cmpnez (vss, vss);
+ CMPNEZH CMPNEZH_S
+ vbs __builtin_vec_cmpnez (vus, vus);
+ CMPNEZH CMPNEZH_U
+ vbi __builtin_vec_cmpnez (vsi, vsi);
+ CMPNEZW CMPNEZW_S
+ vbi __builtin_vec_cmpnez (vui, vui);
+ CMPNEZW CMPNEZW_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNEZ_P, SKIP, __builtin_vec_vcmpnez_p]
+ signed int __builtin_vec_vcmpnez_p (signed int, vsc, vsc);
+ VCMPNEZB_P VCMPNEZB_VSC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vuc, vuc);
+ VCMPNEZB_P VCMPNEZB_VUC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vss, vss);
+ VCMPNEZH_P VCMPNEZH_VSS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vus, vus);
+ VCMPNEZH_P VCMPNEZH_VUS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vsi, vsi);
+ VCMPNEZW_P VCMPNEZW_VSI_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vui, vui);
+ VCMPNEZW_P VCMPNEZW_VUI_P
+
+[VEC_CMPRB, SKIP, __builtin_byte_in_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB
+
+[VEC_CMPRB2, SKIP, __builtin_byte_in_either_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB2
+
+[VEC_CNTLZ, vec_cntlz, __builtin_vec_vclz, _ARCH_PWR8]
+ vsc __builtin_vec_vclz (vsc);
+ VCLZB VCLZB_S
+ vuc __builtin_vec_vclz (vuc);
+ VCLZB VCLZB_U
+ vss __builtin_vec_vclz (vss);
+ VCLZH VCLZH_S
+ vus __builtin_vec_vclz (vus);
+ VCLZH VCLZH_U
+ vsi __builtin_vec_vclz (vsi);
+ VCLZW VCLZW_S
+ vui __builtin_vec_vclz (vui);
+ VCLZW VCLZW_U
+ vsll __builtin_vec_vclz (vsll);
+ VCLZD VCLZD_S
+ vull __builtin_vec_vclz (vull);
+ VCLZD VCLZD_U
+
+[VEC_CNTLZM, vec_cntlzm, __builtin_vec_vclzdm, _ARCH_PWR10]
+ vull __builtin_vec_vclzdm (vull, vull);
+ VCLZDM
+
+[VEC_CNTTZM, vec_cnttzm, __builtin_vec_vctzdm, _ARCH_PWR10]
+ vull __builtin_vec_vctzdm (vull, vull);
+ CNTTZDM
+
+[VEC_CNTLZ_LSBB, vec_cntlz_lsbb, __builtin_vec_vclzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vclzlsbb (vsc);
+ VCLZLSBB_V16QI VCLZLSBB_VSC
+ signed int __builtin_vec_vclzlsbb (vuc);
+ VCLZLSBB_V16QI VCLZLSBB_VUC
+ signed int __builtin_vec_vclzlsbb (vss);
+ VCLZLSBB_V8HI VCLZLSBB_VSS
+ signed int __builtin_vec_vclzlsbb (vus);
+ VCLZLSBB_V8HI VCLZLSBB_VUS
+ signed int __builtin_vec_vclzlsbb (vsi);
+ VCLZLSBB_V4SI VCLZLSBB_VSI
+ signed int __builtin_vec_vclzlsbb (vui);
+ VCLZLSBB_V4SI VCLZLSBB_VUI
+
+[VEC_CNTM, vec_cntm, __builtin_vec_cntm, _ARCH_PWR10]
+ unsigned long long __builtin_vec_cntm (vuc, const int);
+ VCNTMBB
+ unsigned long long __builtin_vec_cntm (vus, const int);
+ VCNTMBH
+ unsigned long long __builtin_vec_cntm (vui, const int);
+ VCNTMBW
+ unsigned long long __builtin_vec_cntm (vull, const int);
+ VCNTMBD
+
+[VEC_CNTTZ, vec_cnttz, __builtin_vec_vctz, _ARCH_PWR9]
+ vsc __builtin_vec_vctz (vsc);
+ VCTZB VCTZB_S
+ vuc __builtin_vec_vctz (vuc);
+ VCTZB VCTZB_U
+ vss __builtin_vec_vctz (vss);
+ VCTZH VCTZH_S
+ vus __builtin_vec_vctz (vus);
+ VCTZH VCTZH_U
+ vsi __builtin_vec_vctz (vsi);
+ VCTZW VCTZW_S
+ vui __builtin_vec_vctz (vui);
+ VCTZW VCTZW_U
+ vsll __builtin_vec_vctz (vsll);
+ VCTZD VCTZD_S
+ vull __builtin_vec_vctz (vull);
+ VCTZD VCTZD_U
+
+[VEC_CNTTZ_LSBB, vec_cnttz_lsbb, __builtin_vec_vctzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vctzlsbb (vsc);
+ VCTZLSBB_V16QI VCTZLSBB_VSC
+ signed int __builtin_vec_vctzlsbb (vuc);
+ VCTZLSBB_V16QI VCTZLSBB_VUC
+ signed int __builtin_vec_vctzlsbb (vss);
+ VCTZLSBB_V8HI VCTZLSBB_VSS
+ signed int __builtin_vec_vctzlsbb (vus);
+ VCTZLSBB_V8HI VCTZLSBB_VUS
+ signed int __builtin_vec_vctzlsbb (vsi);
+ VCTZLSBB_V4SI VCTZLSBB_VSI
+ signed int __builtin_vec_vctzlsbb (vui);
+ VCTZLSBB_V4SI VCTZLSBB_VUI
+
+[VEC_CONVERT_4F32_8I16, SKIP, __builtin_vec_convert_4f32_8i16]
+ vus __builtin_vec_convert_4f32_8i16 (vf, vf);
+ CONVERT_4F32_8I16
+
+[VEC_CONVERT_4F32_8F16, vec_pack_to_short_fp32, __builtin_vec_convert_4f32_8f16, _ARCH_PWR9]
+ vus __builtin_vec_convert_4f32_8f16 (vf, vf);
+ CONVERT_4F32_8F16
+
+[VEC_COPYSIGN, vec_cpsgn, __builtin_vec_copysign]
+ vf __builtin_vec_copysign (vf, vf);
+ CPSGNSP
+ vd __builtin_vec_copysign (vd, vd);
+ CPSGNDP
+
+[VEC_CTF, vec_ctf, __builtin_vec_ctf]
+ vf __builtin_vec_ctf (vsi, const int);
+ VCFSX
+ vf __builtin_vec_ctf (vui, const int);
+ VCFUX
+ vd __builtin_vec_ctf (vsll, const int);
+ XVCVSXDDP_SCALE
+ vd __builtin_vec_ctf (vull, const int);
+ XVCVUXDDP_SCALE
+
+[VEC_CTS, vec_cts, __builtin_vec_cts]
+ vsi __builtin_vec_cts (vf, const int);
+ VCTSXS
+ vsll __builtin_vec_cts (vd, const int);
+ XVCVDPSXDS_SCALE
+
+[VEC_CTU, vec_ctu, __builtin_vec_ctu]
+ vui __builtin_vec_ctu (vf, const int);
+ VCTUXS
+ vull __builtin_vec_ctu (vd, const int);
+ XVCVDPUXDS_SCALE
+
+[VEC_DIV, vec_div, __builtin_vec_div, __VSX__]
+ vsll __builtin_vec_div (vsll, vsll);
+ DIV_V2DI
+ vull __builtin_vec_div (vull, vull);
+ UDIV_V2DI
+ vf __builtin_vec_div (vf, vf);
+ XVDIVSP
+ vd __builtin_vec_div (vd, vd);
+ XVDIVDP
+
+[VEC_DOUBLE, vec_double, __builtin_vec_double]
+ vd __builtin_vec_double (vsll);
+ XVCVSXDDP
+ vd __builtin_vec_double (vull);
+ XVCVUXDDP
+
+[VEC_DOUBLEE, vec_doublee, __builtin_vec_doublee]
+ vd __builtin_vec_doublee (vsi);
+ DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vui);
+ UNS_DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vf);
+ DOUBLEE_V4SF
+
+[VEC_DOUBLEH, vec_doubleh, __builtin_vec_doubleh]
+ vd __builtin_vec_doubleh (vsi);
+ DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vui);
+ UNS_DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vf);
+ DOUBLEH_V4SF
+
+[VEC_DOUBLEL, vec_doublel, __builtin_vec_doublel]
+ vd __builtin_vec_doublel (vsi);
+ DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vui);
+ UNS_DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vf);
+ DOUBLEL_V4SF
+
+[VEC_DOUBLEO, vec_doubleo, __builtin_vec_doubleo]
+ vd __builtin_vec_doubleo (vsi);
+ DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vui);
+ UNS_DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vf);
+ DOUBLEO_V4SF
+
+[VEC_DST, vec_dst, __builtin_vec_dst]
+ void __builtin_vec_dst (unsigned char *, const int, const int);
+ DST DST_UC
+ void __builtin_vec_dst (signed char *, const int, const int);
+ DST DST_SC
+ void __builtin_vec_dst (unsigned short *, const int, const int);
+ DST DST_US
+ void __builtin_vec_dst (signed short *, const int, const int);
+ DST DST_SS
+ void __builtin_vec_dst (unsigned int *, const int, const int);
+ DST DST_UI
+ void __builtin_vec_dst (signed int *, const int, const int);
+ DST DST_SI
+ void __builtin_vec_dst (unsigned long long *, const int, const int);
+ DST DST_ULL
+ void __builtin_vec_dst (signed long long *, const int, const int);
+ DST DST_SLL
+ void __builtin_vec_dst (float *, const int, const int);
+ DST DST_F
+ void __builtin_vec_dst (vuc *, const int, const int);
+ DST DST_VUC
+ void __builtin_vec_dst (vsc *, const int, const int);
+ DST DST_VSC
+ void __builtin_vec_dst (vbc *, const int, const int);
+ DST DST_VBC
+ void __builtin_vec_dst (vus *, const int, const int);
+ DST DST_VUS
+ void __builtin_vec_dst (vss *, const int, const int);
+ DST DST_VSS
+ void __builtin_vec_dst (vbs *, const int, const int);
+ DST DST_VBS
+ void __builtin_vec_dst (vp *, const int, const int);
+ DST DST_VP
+ void __builtin_vec_dst (vui *, const int, const int);
+ DST DST_VUI
+ void __builtin_vec_dst (vsi *, const int, const int);
+ DST DST_VSI
+ void __builtin_vec_dst (vbi *, const int, const int);
+ DST DST_VBI
+ void __builtin_vec_dst (vf *, const int, const int);
+ DST DST_VF
+
+[VEC_DSTST, vec_dstst, __builtin_vec_dstst]
+ void __builtin_vec_dstst (unsigned char *, const int, const int);
+ DSTST DSTST_UC
+ void __builtin_vec_dstst (signed char *, const int, const int);
+ DSTST DSTST_SC
+ void __builtin_vec_dstst (unsigned short *, const int, const int);
+ DSTST DSTST_US
+ void __builtin_vec_dstst (signed short *, const int, const int);
+ DSTST DSTST_SS
+ void __builtin_vec_dstst (unsigned int *, const int, const int);
+ DSTST DSTST_UI
+ void __builtin_vec_dstst (signed int *, const int, const int);
+ DSTST DSTST_SI
+ void __builtin_vec_dstst (unsigned long long *, const int, const int);
+ DSTST DSTST_ULL
+ void __builtin_vec_dstst (signed long long *, const int, const int);
+ DSTST DSTST_SLL
+ void __builtin_vec_dstst (float *, const int, const int);
+ DSTST DSTST_F
+ void __builtin_vec_dstst (vuc *, const int, const int);
+ DSTST DSTST_VUC
+ void __builtin_vec_dstst (vsc *, const int, const int);
+ DSTST DSTST_VSC
+ void __builtin_vec_dstst (vbc *, const int, const int);
+ DSTST DSTST_VBC
+ void __builtin_vec_dstst (vus *, const int, const int);
+ DSTST DSTST_VUS
+ void __builtin_vec_dstst (vss *, const int, const int);
+ DSTST DSTST_VSS
+ void __builtin_vec_dstst (vbs *, const int, const int);
+ DSTST DSTST_VBS
+ void __builtin_vec_dstst (vp *, const int, const int);
+ DSTST DSTST_VP
+ void __builtin_vec_dstst (vui *, const int, const int);
+ DSTST DSTST_VUI
+ void __builtin_vec_dstst (vsi *, const int, const int);
+ DSTST DSTST_VSI
+ void __builtin_vec_dstst (vbi *, const int, const int);
+ DSTST DSTST_VBI
+ void __builtin_vec_dstst (vf *, const int, const int);
+ DSTST DSTST_VF
+
+[VEC_DSTSTT, vec_dststt, __builtin_vec_dststt]
+ void __builtin_vec_dststt (unsigned char *, const int, const int);
+ DSTSTT DSTSTT_UC
+ void __builtin_vec_dststt (signed char *, const int, const int);
+ DSTSTT DSTSTT_SC
+ void __builtin_vec_dststt (unsigned short *, const int, const int);
+ DSTSTT DSTSTT_US
+ void __builtin_vec_dststt (signed short *, const int, const int);
+ DSTSTT DSTSTT_SS
+ void __builtin_vec_dststt (unsigned int *, const int, const int);
+ DSTSTT DSTSTT_UI
+ void __builtin_vec_dststt (signed int *, const int, const int);
+ DSTSTT DSTSTT_SI
+ void __builtin_vec_dststt (unsigned long long *, const int, const int);
+ DSTSTT DSTSTT_ULL
+ void __builtin_vec_dststt (signed long long *, const int, const int);
+ DSTSTT DSTSTT_SLL
+ void __builtin_vec_dststt (float *, const int, const int);
+ DSTSTT DSTSTT_F
+ void __builtin_vec_dststt (vuc *, const int, const int);
+ DSTSTT DSTSTT_VUC
+ void __builtin_vec_dststt (vsc *, const int, const int);
+ DSTSTT DSTSTT_VSC
+ void __builtin_vec_dststt (vbc *, const int, const int);
+ DSTSTT DSTSTT_VBC
+ void __builtin_vec_dststt (vus *, const int, const int);
+ DSTSTT DSTSTT_VUS
+ void __builtin_vec_dststt (vss *, const int, const int);
+ DSTSTT DSTSTT_VSS
+ void __builtin_vec_dststt (vbs *, const int, const int);
+ DSTSTT DSTSTT_VBS
+ void __builtin_vec_dststt (vp *, const int, const int);
+ DSTSTT DSTSTT_VP
+ void __builtin_vec_dststt (vui *, const int, const int);
+ DSTSTT DSTSTT_VUI
+ void __builtin_vec_dststt (vsi *, const int, const int);
+ DSTSTT DSTSTT_VSI
+ void __builtin_vec_dststt (vbi *, const int, const int);
+ DSTSTT DSTSTT_VBI
+ void __builtin_vec_dststt (vf *, const int, const int);
+ DSTSTT DSTSTT_VF
+
+[VEC_DSTT, vec_dstt, __builtin_vec_dstt]
+ void __builtin_vec_dstt (unsigned char *, const int, const int);
+ DSTT DSTT_UC
+ void __builtin_vec_dstt (signed char *, const int, const int);
+ DSTT DSTT_SC
+ void __builtin_vec_dstt (unsigned short *, const int, const int);
+ DSTT DSTT_US
+ void __builtin_vec_dstt (signed short *, const int, const int);
+ DSTT DSTT_SS
+ void __builtin_vec_dstt (unsigned int *, const int, const int);
+ DSTT DSTT_UI
+ void __builtin_vec_dstt (signed int *, const int, const int);
+ DSTT DSTT_SI
+ void __builtin_vec_dstt (unsigned long long *, const int, const int);
+ DSTT DSTT_ULL
+ void __builtin_vec_dstt (signed long long *, const int, const int);
+ DSTT DSTT_SLL
+ void __builtin_vec_dstt (float *, const int, const int);
+ DSTT DSTT_F
+ void __builtin_vec_dstt (vuc *, const int, const int);
+ DSTT DSTT_VUC
+ void __builtin_vec_dstt (vsc *, const int, const int);
+ DSTT DSTT_VSC
+ void __builtin_vec_dstt (vbc *, const int, const int);
+ DSTT DSTT_VBC
+ void __builtin_vec_dstt (vus *, const int, const int);
+ DSTT DSTT_VUS
+ void __builtin_vec_dstt (vss *, const int, const int);
+ DSTT DSTT_VSS
+ void __builtin_vec_dstt (vbs *, const int, const int);
+ DSTT DSTT_VBS
+ void __builtin_vec_dstt (vp *, const int, const int);
+ DSTT DSTT_VP
+ void __builtin_vec_dstt (vui *, const int, const int);
+ DSTT DSTT_VUI
+ void __builtin_vec_dstt (vsi *, const int, const int);
+ DSTT DSTT_VSI
+ void __builtin_vec_dstt (vbi *, const int, const int);
+ DSTT DSTT_VBI
+ void __builtin_vec_dstt (vf *, const int, const int);
+ DSTT DSTT_VF
+
+[VEC_EQV, vec_eqv, __builtin_vec_eqv, _ARCH_PWR8]
+ vsc __builtin_vec_eqv (vsc, vsc);
+ EQV_V16QI
+ vuc __builtin_vec_eqv (vuc, vuc);
+ EQV_V16QI_UNS EQV_V16QI_VUC
+ vbc __builtin_vec_eqv (vbc, vbc);
+ EQV_V16QI_UNS EQV_V16QI_VBC
+ vss __builtin_vec_eqv (vss, vss);
+ EQV_V8HI
+ vus __builtin_vec_eqv (vus, vus);
+ EQV_V8HI_UNS EQV_V8HI_VUS
+ vbs __builtin_vec_eqv (vbs, vbs);
+ EQV_V8HI_UNS EQV_V8HI_VBS
+ vsi __builtin_vec_eqv (vsi, vsi);
+ EQV_V4SI
+ vui __builtin_vec_eqv (vui, vui);
+ EQV_V4SI_UNS EQV_V4SI_VUI
+ vbi __builtin_vec_eqv (vbi, vbi);
+ EQV_V4SI_UNS EQV_V4SI_VBI
+ vsll __builtin_vec_eqv (vsll, vsll);
+ EQV_V2DI
+ vull __builtin_vec_eqv (vull, vull);
+ EQV_V2DI_UNS EQV_V2DI_VULL
+ vbll __builtin_vec_eqv (vbll, vbll);
+ EQV_V2DI_UNS EQV_V2DI_VBLL
+ vf __builtin_vec_eqv (vf, vf);
+ EQV_V4SF
+ vd __builtin_vec_eqv (vd, vd);
+ EQV_V2DF
+
+[VEC_EXPANDM, vec_expandm, __builtin_vec_vexpandm, _ARCH_PWR10]
+ vuc __builtin_vec_vexpandm (vuc);
+ VEXPANDMB
+ vus __builtin_vec_vexpandm (vus);
+ VEXPANDMH
+ vui __builtin_vec_vexpandm (vui);
+ VEXPANDMW
+ vull __builtin_vec_vexpandm (vull);
+ VEXPANDMD
+ vuq __builtin_vec_vexpandm (vuq);
+ VEXPANDMQ
+
+[VEC_EXPTE, vec_expte, __builtin_vec_expte]
+ vf __builtin_vec_expte (vf);
+ VEXPTEFP
+
+[VEC_EXTRACTM, vec_extractm, __builtin_vec_vextractm, _ARCH_PWR10]
+ signed int __builtin_vec_vextractm (vuc);
+ VEXTRACTMB
+ signed int __builtin_vec_vextractm (vus);
+ VEXTRACTMH
+ signed int __builtin_vec_vextractm (vui);
+ VEXTRACTMW
+ signed int __builtin_vec_vextractm (vull);
+ VEXTRACTMD
+ signed int __builtin_vec_vextractm (vuq);
+ VEXTRACTMQ
+
+[VEC_EXTRACT_FP_FROM_SHORTH, vec_extract_fp32_from_shorth, __builtin_vec_vextract_fp_from_shorth, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shorth (vus);
+ VEXTRACT_FP_FROM_SHORTH
+
+[VEC_EXTRACT_FP_FROM_SHORTL, vec_extract_fp32_from_shortl, __builtin_vec_vextract_fp_from_shortl, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shortl (vus);
+ VEXTRACT_FP_FROM_SHORTL
+
+[VEC_EXTRACTH, vec_extracth, __builtin_vec_extracth, _ARCH_PWR10]
+ vull __builtin_vec_extracth (vuc, vuc, unsigned char);
+ VEXTRACTBR
+ vull __builtin_vec_extracth (vus, vus, unsigned char);
+ VEXTRACTHR
+ vull __builtin_vec_extracth (vui, vui, unsigned char);
+ VEXTRACTWR
+ vull __builtin_vec_extracth (vull, vull, unsigned char);
+ VEXTRACTDR
+
+[VEC_EXTRACTL, vec_extractl, __builtin_vec_extractl, _ARCH_PWR10]
+ vull __builtin_vec_extractl (vuc, vuc, unsigned char);
+ VEXTRACTBL
+ vull __builtin_vec_extractl (vus, vus, unsigned char);
+ VEXTRACTHL
+ vull __builtin_vec_extractl (vui, vui, unsigned char);
+ VEXTRACTWL
+ vull __builtin_vec_extractl (vull, vull, unsigned char);
+ VEXTRACTDL
+
+[VEC_EXTRACT4B, vec_extract4b, __builtin_vec_extract4b, _ARCH_PWR9]
+ vull __builtin_vec_extract4b (vuc, const int);
+ EXTRACT4B
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTULX, vec_xlx, __builtin_vec_vextulx, _ARCH_PWR9]
+ signed char __builtin_vec_vextulx (unsigned int, vsc);
+ VEXTUBLX VEXTUBLX_S
+ unsigned char __builtin_vec_vextulx (unsigned int, vuc);
+ VEXTUBLX VEXTUBLX_U
+ signed short __builtin_vec_vextulx (unsigned int, vss);
+ VEXTUHLX VEXTUHLX_S
+ unsigned short __builtin_vec_vextulx (unsigned int, vus);
+ VEXTUHLX VEXTUHLX_U
+ signed int __builtin_vec_vextulx (unsigned int, vsi);
+ VEXTUWLX VEXTUWLX_S
+ unsigned int __builtin_vec_vextulx (unsigned int, vui);
+ VEXTUWLX VEXTUWLX_U
+ float __builtin_vec_vextulx (unsigned int, vf);
+ VEXTUWLX VEXTUWLX_F
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTURX, vec_xrx, __builtin_vec_vexturx, _ARCH_PWR9]
+ signed char __builtin_vec_vexturx (unsigned int, vsc);
+ VEXTUBRX VEXTUBRX_S
+ unsigned char __builtin_vec_vexturx (unsigned int, vuc);
+ VEXTUBRX VEXTUBRX_U
+ signed short __builtin_vec_vexturx (unsigned int, vss);
+ VEXTUHRX VEXTUHRX_S
+ unsigned short __builtin_vec_vexturx (unsigned int, vus);
+ VEXTUHRX VEXTUHRX_U
+ signed int __builtin_vec_vexturx (unsigned int, vsi);
+ VEXTUWRX VEXTUWRX_S
+ unsigned int __builtin_vec_vexturx (unsigned int, vui);
+ VEXTUWRX VEXTUWRX_U
+ float __builtin_vec_vexturx (unsigned int, vf);
+ VEXTUWRX VEXTUWRX_F
+
+[VEC_FIRSTMATCHINDEX, vec_first_match_index, __builtin_vec_first_match_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_index (vsc, vsc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_match_index (vuc, vuc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_match_index (vss, vss);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_match_index (vus, vus);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_match_index (vsi, vsi);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_match_index (vui, vui);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VUI
+
+[VEC_FIRSTMATCHOREOSINDEX, vec_first_match_or_eos_index, __builtin_vec_first_match_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_or_eos_index (vsc, vsc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_match_or_eos_index (vuc, vuc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_match_or_eos_index (vss, vss);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_match_or_eos_index (vus, vus);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_match_or_eos_index (vsi, vsi);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_match_or_eos_index (vui, vui);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VUI
+
+[VEC_FIRSTMISMATCHINDEX, vec_first_mismatch_index, __builtin_vec_first_mismatch_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_index (vsc, vsc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_index (vuc, vuc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_index (vss, vss);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_index (vus, vus);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_index (vsi, vsi);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_index (vui, vui);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VUI
+
+[VEC_FIRSTMISMATCHOREOSINDEX, vec_first_mismatch_or_eos_index, __builtin_vec_first_mismatch_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsc, vsc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vuc, vuc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vss, vss);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vus, vus);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsi, vsi);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vui, vui);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VUI
+
+[VEC_FLOAT, vec_float, __builtin_vec_float]
+ vf __builtin_vec_float (vsi);
+ XVCVSXWSP_V4SF
+ vf __builtin_vec_float (vui);
+ XVCVUXWSP_V4SF
+
+[VEC_FLOAT2, vec_float2, __builtin_vec_float2]
+ vf __builtin_vec_float2 (vsll, vsll);
+ FLOAT2_V2DI
+ vf __builtin_vec_float2 (vull, vull);
+ UNS_FLOAT2_V2DI
+ vf __builtin_vec_float2 (vd, vd);
+ FLOAT2_V2DF
+
+[VEC_FLOATE, vec_floate, __builtin_vec_floate]
+ vf __builtin_vec_floate (vsll);
+ FLOATE_V2DI
+ vf __builtin_vec_floate (vull);
+ UNS_FLOATE_V2DI
+ vf __builtin_vec_floate (vd);
+ FLOATE_V2DF
+
+[VEC_FLOATO, vec_floato, __builtin_vec_floato]
+ vf __builtin_vec_floato (vsll);
+ FLOATO_V2DI
+ vf __builtin_vec_floato (vull);
+ UNS_FLOATO_V2DI
+ vf __builtin_vec_floato (vd);
+ FLOATO_V2DF
+
+[VEC_FLOOR, vec_floor, __builtin_vec_floor]
+ vf __builtin_vec_floor (vf);
+ XVRSPIM
+ vd __builtin_vec_floor (vd);
+ XVRDPIM
+
+[VEC_GB, vec_gb, __builtin_vec_vgbbd, _ARCH_PWR8]
+ vsc __builtin_vec_vgbbd (vsc);
+ VGBBD VGBBD_S
+ vuc __builtin_vec_vgbbd (vuc);
+ VGBBD VGBBD_U
+
+[VEC_GENBM, vec_genbm, __builtin_vec_mtvsrbm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrbm (unsigned long long);
+ MTVSRBM
+
+[VEC_GENHM, vec_genhm, __builtin_vec_mtvsrhm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrhm (unsigned long long);
+ MTVSRHM
+
+[VEC_GENWM, vec_genwm, __builtin_vec_mtvsrwm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrwm (unsigned long long);
+ MTVSRWM
+
+[VEC_GENDM, vec_gendm, __builtin_vec_mtvsrdm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrdm (unsigned long long);
+ MTVSRDM
+
+[VEC_GENQM, vec_genqm, __builtin_vec_mtvsrqm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrqm (unsigned long long);
+ MTVSRQM
+
+[VEC_GENPCVM, vec_genpcvm, __builtin_vec_xxgenpcvm, _ARCH_PWR10]
+ vuc __builtin_vec_xxgenpcvm (vuc, const int);
+ XXGENPCVM_V16QI
+ vus __builtin_vec_xxgenpcvm (vus, const int);
+ XXGENPCVM_V8HI
+ vui __builtin_vec_xxgenpcvm (vui, const int);
+ XXGENPCVM_V4SI
+ vull __builtin_vec_xxgenpcvm (vull, const int);
+ XXGENPCVM_V2DI
+
+[VEC_GNB, vec_gnb, __builtin_vec_gnb, _ARCH_PWR10]
+ vull __builtin_vec_gnb (vuq, unsigned char);
+ VGNB
+
+[VEC_INSERTH, vec_inserth, __builtin_vec_inserth, _ARCH_PWR10]
+ vuc __builtin_vec_inserth (unsigned char, vuc, unsigned int);
+ VINSERTGPRBR
+ vuc __builtin_vec_inserth (vuc, vuc, unsigned int);
+ VINSERTVPRBR
+ vus __builtin_vec_inserth (unsigned short, vus, unsigned int);
+ VINSERTGPRHR
+ vus __builtin_vec_inserth (vus, vus, unsigned int);
+ VINSERTVPRHR
+ vui __builtin_vec_inserth (unsigned int, vui, unsigned int);
+ VINSERTGPRWR
+ vui __builtin_vec_inserth (vui, vui, unsigned int);
+ VINSERTVPRWR
+ vull __builtin_vec_inserth (unsigned long long, vull, unsigned int);
+ VINSERTGPRDR
+
+[VEC_INSERTL, vec_insertl, __builtin_vec_insertl, _ARCH_PWR10]
+ vuc __builtin_vec_insertl (unsigned char, vuc, unsigned int);
+ VINSERTGPRBL
+ vuc __builtin_vec_insertl (vuc, vuc, unsigned int);
+ VINSERTVPRBL
+ vus __builtin_vec_insertl (unsigned short, vus, unsigned int);
+ VINSERTGPRHL
+ vus __builtin_vec_insertl (vus, vus, unsigned int);
+ VINSERTVPRHL
+ vui __builtin_vec_insertl (unsigned int, vui, unsigned int);
+ VINSERTGPRWL
+ vui __builtin_vec_insertl (vui, vui, unsigned int);
+ VINSERTVPRWL
+ vull __builtin_vec_insertl (unsigned long long, vull, unsigned int);
+ VINSERTGPRDL
+
+[VEC_INSERT4B, vec_insert4b, __builtin_vec_insert4b, _ARCH_PWR9]
+ vuc __builtin_vec_insert4b (vsi, vuc, const int);
+ INSERT4B INSERT4B_S
+ vuc __builtin_vec_insert4b (vui, vuc, const int);
+ INSERT4B INSERT4B_U
+
+[VEC_LD, vec_ld, __builtin_vec_ld]
+ vsc __builtin_vec_ld (signed long long, vsc *);
+ LVX_V16QI LVX_V16QI_VSC
+ vsc __builtin_vec_ld (signed long long, signed char *);
+ LVX_V16QI LVX_V16QI_SC
+ vuc __builtin_vec_ld (signed long long, vuc *);
+ LVX_V16QI LVX_V16QI_VUC
+ vuc __builtin_vec_ld (signed long long, unsigned char *);
+ LVX_V16QI LVX_V16QI_UC
+ vbc __builtin_vec_ld (signed long long, vbc *);
+ LVX_V16QI LVX_V16QI_VBC
+ vss __builtin_vec_ld (signed long long, vss *);
+ LVX_V8HI LVX_V8HI_VSS
+ vss __builtin_vec_ld (signed long long, signed short *);
+ LVX_V8HI LVX_V8HI_SS
+ vus __builtin_vec_ld (signed long long, vus *);
+ LVX_V8HI LVX_V8HI_VUS
+ vus __builtin_vec_ld (signed long long, unsigned short *);
+ LVX_V8HI LVX_V8HI_US
+ vbs __builtin_vec_ld (signed long long, vbs *);
+ LVX_V8HI LVX_V8HI_VBS
+ vp __builtin_vec_ld (signed long long, vp *);
+ LVX_V8HI LVX_V8HI_VP
+ vsi __builtin_vec_ld (signed long long, vsi *);
+ LVX_V4SI LVX_V4SI_VSI
+ vsi __builtin_vec_ld (signed long long, signed int *);
+ LVX_V4SI LVX_V4SI_SI
+ vui __builtin_vec_ld (signed long long, vui *);
+ LVX_V4SI LVX_V4SI_VUI
+ vui __builtin_vec_ld (signed long long, unsigned int *);
+ LVX_V4SI LVX_V4SI_UI
+ vbi __builtin_vec_ld (signed long long, vbi *);
+ LVX_V4SI LVX_V4SI_VBI
+ vsll __builtin_vec_ld (signed long long, vsll *);
+ LVX_V2DI LVX_V2DI_VSLL
+ vsll __builtin_vec_ld (signed long long, signed long long *);
+ LVX_V2DI LVX_V2DI_SLL
+ vull __builtin_vec_ld (signed long long, vull *);
+ LVX_V2DI LVX_V2DI_VULL
+ vull __builtin_vec_ld (signed long long, unsigned long long *);
+ LVX_V2DI LVX_V2DI_ULL
+ vbll __builtin_vec_ld (signed long long, vbll *);
+ LVX_V2DI LVX_V2DI_VBLL
+ vsq __builtin_vec_ld (signed long long, const vsq *);
+ LVX_V1TI LVX_V1TI_VSQ
+ vuq __builtin_vec_ld (signed long long, const vuq *);
+ LVX_V1TI LVX_V1TI_VUQ
+ vsq __builtin_vec_ld (signed long long, __int128 *);
+ LVX_V1TI LVX_V1TI_TI
+ vuq __builtin_vec_ld (signed long long, unsigned __int128 *);
+ LVX_V1TI LVX_V1TI_UTI
+ vf __builtin_vec_ld (signed long long, vf *);
+ LVX_V4SF LVX_V4SF_VF
+ vf __builtin_vec_ld (signed long long, float *);
+ LVX_V4SF LVX_V4SF_F
+ vd __builtin_vec_ld (signed long long, vd *);
+ LVX_V2DF LVX_V2DF_VD
+ vd __builtin_vec_ld (signed long long, double *);
+ LVX_V2DF LVX_V2DF_D
+
+[VEC_LDE, vec_lde, __builtin_vec_lde]
+ vsc __builtin_vec_lde (signed long long, signed char *);
+ LVEBX LVEBX_SC
+ vuc __builtin_vec_lde (signed long long, unsigned char *);
+ LVEBX LVEBX_UC
+ vss __builtin_vec_lde (signed long long, signed short *);
+ LVEHX LVEHX_SS
+ vus __builtin_vec_lde (signed long long, unsigned short *);
+ LVEHX LVEHX_US
+ vsi __builtin_vec_lde (signed long long, signed int *);
+ LVEWX LVEWX_SI
+ vui __builtin_vec_lde (signed long long, unsigned int *);
+ LVEWX LVEWX_UI
+ vf __builtin_vec_lde (signed long long, float *);
+ LVEWX LVEWX_F
+
+[VEC_LDL, vec_ldl, __builtin_vec_ldl]
+ vsc __builtin_vec_ldl (signed long long, vsc *);
+ LVXL_V16QI LVXL_V16QI_VSC
+ vsc __builtin_vec_ldl (signed long long, signed char *);
+ LVXL_V16QI LVXL_V16QI_SC
+ vuc __builtin_vec_ldl (signed long long, vuc *);
+ LVXL_V16QI LVXL_V16QI_VUC
+ vuc __builtin_vec_ldl (signed long long, unsigned char *);
+ LVXL_V16QI LVXL_V16QI_UC
+ vbc __builtin_vec_ldl (signed long long, vbc *);
+ LVXL_V16QI LVXL_V16QI_VBC
+ vss __builtin_vec_ldl (signed long long, vss *);
+ LVXL_V8HI LVXL_V8HI_VSS
+ vss __builtin_vec_ldl (signed long long, signed short *);
+ LVXL_V8HI LVXL_V8HI_SS
+ vus __builtin_vec_ldl (signed long long, vus *);
+ LVXL_V8HI LVXL_V8HI_VUS
+ vus __builtin_vec_ldl (signed long long, unsigned short *);
+ LVXL_V8HI LVXL_V8HI_US
+ vbs __builtin_vec_ldl (signed long long, vbs *);
+ LVXL_V8HI LVXL_V8HI_VBS
+ vp __builtin_vec_ldl (signed long long, vp *);
+ LVXL_V8HI LVXL_V8HI_VP
+ vsi __builtin_vec_ldl (signed long long, vsi *);
+ LVXL_V4SI LVXL_V4SI_VSI
+ vsi __builtin_vec_ldl (signed long long, signed int *);
+ LVXL_V4SI LVXL_V4SI_SI
+ vui __builtin_vec_ldl (signed long long, vui *);
+ LVXL_V4SI LVXL_V4SI_VUI
+ vui __builtin_vec_ldl (signed long long, unsigned int *);
+ LVXL_V4SI LVXL_V4SI_UI
+ vbi __builtin_vec_ldl (signed long long, vbi *);
+ LVXL_V4SI LVXL_V4SI_VBI
+ vsll __builtin_vec_ldl (signed long long, vsll *);
+ LVXL_V2DI LVXL_V2DI_VSLL
+ vsll __builtin_vec_ldl (signed long long, signed long long *);
+ LVXL_V2DI LVXL_V2DI_SLL
+ vull __builtin_vec_ldl (signed long long, vull *);
+ LVXL_V2DI LVXL_V2DI_VULL
+ vull __builtin_vec_ldl (signed long long, unsigned long long *);
+ LVXL_V2DI LVXL_V2DI_ULL
+ vbll __builtin_vec_ldl (signed long long, vbll *);
+ LVXL_V2DI LVXL_V2DI_VBLL
+ vsq __builtin_vec_ldl (signed long long, const vsq *);
+ LVXL_V1TI LVXL_V1TI_VSQ
+ vuq __builtin_vec_ldl (signed long long, const vuq *);
+ LVXL_V1TI LVXL_V1TI_VUQ
+ vsq __builtin_vec_ldl (signed long long, __int128 *);
+ LVXL_V1TI LVXL_V1TI_TI
+ vuq __builtin_vec_ldl (signed long long, unsigned __int128 *);
+ LVXL_V1TI LVXL_V1TI_UTI
+ vf __builtin_vec_ldl (signed long long, vf *);
+ LVXL_V4SF LVXL_V4SF_VF
+ vf __builtin_vec_ldl (signed long long, float *);
+ LVXL_V4SF LVXL_V4SF_F
+ vd __builtin_vec_ldl (signed long long, vd *);
+ LVXL_V2DF LVXL_V2DF_VD
+ vd __builtin_vec_ldl (signed long long, double *);
+ LVXL_V2DF LVXL_V2DF_D
+
+[VEC_LOGE, vec_loge, __builtin_vec_loge]
+ vf __builtin_vec_loge (vf);
+ VLOGEFP
+
+[VEC_LVLX, vec_lvlx, __builtin_vec_lvlx, __PPU__]
+ vbc __builtin_vec_lvlx (signed long long, vbc *);
+ LVLX LVLX_VBC
+ vsc __builtin_vec_lvlx (signed long long, vsc *);
+ LVLX LVLX_VSC
+ vsc __builtin_vec_lvlx (signed long long, signed char *);
+ LVLX LVLX_SC
+ vuc __builtin_vec_lvlx (signed long long, vuc *);
+ LVLX LVLX_VUC
+ vuc __builtin_vec_lvlx (signed long long, unsigned char *);
+ LVLX LVLX_UC
+ vbs __builtin_vec_lvlx (signed long long, vbs *);
+ LVLX LVLX_VBS
+ vss __builtin_vec_lvlx (signed long long, vss *);
+ LVLX LVLX_VSS
+ vss __builtin_vec_lvlx (signed long long, signed short *);
+ LVLX LVLX_SS
+ vus __builtin_vec_lvlx (signed long long, vus *);
+ LVLX LVLX_VUS
+ vus __builtin_vec_lvlx (signed long long, unsigned short *);
+ LVLX LVLX_US
+ vp __builtin_vec_lvlx (signed long long, vp *);
+ LVLX LVLX_VP
+ vbi __builtin_vec_lvlx (signed long long, vbi *);
+ LVLX LVLX_VBI
+ vsi __builtin_vec_lvlx (signed long long, vsi *);
+ LVLX LVLX_VSI
+ vsi __builtin_vec_lvlx (signed long long, signed int *);
+ LVLX LVLX_SI
+ vui __builtin_vec_lvlx (signed long long, vui *);
+ LVLX LVLX_VUI
+ vui __builtin_vec_lvlx (signed long long, unsigned int *);
+ LVLX LVLX_UI
+ vf __builtin_vec_lvlx (signed long long, vf *);
+ LVLX LVLX_VF
+ vf __builtin_vec_lvlx (signed long long, float *);
+ LVLX LVLX_F
+
+[VEC_LVLXL, vec_lvlxl, __builtin_vec_lvlxl, __PPU__]
+ vbc __builtin_vec_lvlxl (signed long long, vbc *);
+ LVLXL LVLXL_VBC
+ vsc __builtin_vec_lvlxl (signed long long, vsc *);
+ LVLXL LVLXL_VSC
+ vsc __builtin_vec_lvlxl (signed long long, signed char *);
+ LVLXL LVLXL_SC
+ vuc __builtin_vec_lvlxl (signed long long, vuc *);
+ LVLXL LVLXL_VUC
+ vuc __builtin_vec_lvlxl (signed long long, unsigned char *);
+ LVLXL LVLXL_UC
+ vbs __builtin_vec_lvlxl (signed long long, vbs *);
+ LVLXL LVLXL_VBS
+ vss __builtin_vec_lvlxl (signed long long, vss *);
+ LVLXL LVLXL_VSS
+ vss __builtin_vec_lvlxl (signed long long, signed short *);
+ LVLXL LVLXL_SS
+ vus __builtin_vec_lvlxl (signed long long, vus *);
+ LVLXL LVLXL_VUS
+ vus __builtin_vec_lvlxl (signed long long, unsigned short *);
+ LVLXL LVLXL_US
+ vp __builtin_vec_lvlxl (signed long long, vp *);
+ LVLXL LVLXL_VP
+ vbi __builtin_vec_lvlxl (signed long long, vbi *);
+ LVLXL LVLXL_VBI
+ vsi __builtin_vec_lvlxl (signed long long, vsi *);
+ LVLXL LVLXL_VSI
+ vsi __builtin_vec_lvlxl (signed long long, signed int *);
+ LVLXL LVLXL_SI
+ vui __builtin_vec_lvlxl (signed long long, vui *);
+ LVLXL LVLXL_VUI
+ vui __builtin_vec_lvlxl (signed long long, unsigned int *);
+ LVLXL LVLXL_UI
+ vf __builtin_vec_lvlxl (signed long long, vf *);
+ LVLXL LVLXL_VF
+ vf __builtin_vec_lvlxl (signed long long, float *);
+ LVLXL LVLXL_F
+
+[VEC_LVRX, vec_lvrx, __builtin_vec_lvrx, __PPU__]
+ vbc __builtin_vec_lvrx (signed long long, vbc *);
+ LVRX LVRX_VBC
+ vsc __builtin_vec_lvrx (signed long long, vsc *);
+ LVRX LVRX_VSC
+ vsc __builtin_vec_lvrx (signed long long, signed char *);
+ LVRX LVRX_SC
+ vuc __builtin_vec_lvrx (signed long long, vuc *);
+ LVRX LVRX_VUC
+ vuc __builtin_vec_lvrx (signed long long, unsigned char *);
+ LVRX LVRX_UC
+ vbs __builtin_vec_lvrx (signed long long, vbs *);
+ LVRX LVRX_VBS
+ vss __builtin_vec_lvrx (signed long long, vss *);
+ LVRX LVRX_VSS
+ vss __builtin_vec_lvrx (signed long long, signed short *);
+ LVRX LVRX_SS
+ vus __builtin_vec_lvrx (signed long long, vus *);
+ LVRX LVRX_VUS
+ vus __builtin_vec_lvrx (signed long long, unsigned short *);
+ LVRX LVRX_US
+ vp __builtin_vec_lvrx (signed long long, vp *);
+ LVRX LVRX_VP
+ vbi __builtin_vec_lvrx (signed long long, vbi *);
+ LVRX LVRX_VBI
+ vsi __builtin_vec_lvrx (signed long long, vsi *);
+ LVRX LVRX_VSI
+ vsi __builtin_vec_lvrx (signed long long, signed int *);
+ LVRX LVRX_SI
+ vui __builtin_vec_lvrx (signed long long, vui *);
+ LVRX LVRX_VUI
+ vui __builtin_vec_lvrx (signed long long, unsigned int *);
+ LVRX LVRX_UI
+ vf __builtin_vec_lvrx (signed long long, vf *);
+ LVRX LVRX_VF
+ vf __builtin_vec_lvrx (signed long long, float *);
+ LVRX LVRX_F
+
+[VEC_LVRXL, vec_lvrxl, __builtin_vec_lvrxl, __PPU__]
+ vbc __builtin_vec_lvrxl (signed long long, vbc *);
+ LVRXL LVRXL_VBC
+ vsc __builtin_vec_lvrxl (signed long long, vsc *);
+ LVRXL LVRXL_VSC
+ vsc __builtin_vec_lvrxl (signed long long, signed char *);
+ LVRXL LVRXL_SC
+ vuc __builtin_vec_lvrxl (signed long long, vuc *);
+ LVRXL LVRXL_VUC
+ vuc __builtin_vec_lvrxl (signed long long, unsigned char *);
+ LVRXL LVRXL_UC
+ vbs __builtin_vec_lvrxl (signed long long, vbs *);
+ LVRXL LVRXL_VBS
+ vss __builtin_vec_lvrxl (signed long long, vss *);
+ LVRXL LVRXL_VSS
+ vss __builtin_vec_lvrxl (signed long long, signed short *);
+ LVRXL LVRXL_SS
+ vus __builtin_vec_lvrxl (signed long long, vus *);
+ LVRXL LVRXL_VUS
+ vus __builtin_vec_lvrxl (signed long long, unsigned short *);
+ LVRXL LVRXL_US
+ vp __builtin_vec_lvrxl (signed long long, vp *);
+ LVRXL LVRXL_VP
+ vbi __builtin_vec_lvrxl (signed long long, vbi *);
+ LVRXL LVRXL_VBI
+ vsi __builtin_vec_lvrxl (signed long long, vsi *);
+ LVRXL LVRXL_VSI
+ vsi __builtin_vec_lvrxl (signed long long, signed int *);
+ LVRXL LVRXL_SI
+ vui __builtin_vec_lvrxl (signed long long, vui *);
+ LVRXL LVRXL_VUI
+ vui __builtin_vec_lvrxl (signed long long, unsigned int *);
+ LVRXL LVRXL_UI
+ vf __builtin_vec_lvrxl (signed long long, vf *);
+ LVRXL LVRXL_VF
+ vf __builtin_vec_lvrxl (signed long long, float *);
+ LVRXL LVRXL_F
+
+[VEC_LVSL, vec_lvsl, __builtin_vec_lvsl]
+ vuc __builtin_vec_lvsl (signed long long, unsigned char *);
+ LVSL LVSL_UC
+ vuc __builtin_vec_lvsl (signed long long, signed char *);
+ LVSL LVSL_SC
+
+[VEC_LVSR, vec_lvsr, __builtin_vec_lvsr]
+ vuc __builtin_vec_lvsr (signed long long, unsigned char *);
+ LVSR LVSR_UC
+ vuc __builtin_vec_lvsr (signed long long, signed char *);
+ LVSR LVSR_SC
+
+[VEC_LXVL, vec_xl_len, __builtin_vec_lxvl, _ARCH_PPC64_PWR9]
+ vsc __builtin_vec_lxvl (signed char *, unsigned long long);
+ LXVL LXVL_VSC
+ vuc __builtin_vec_lxvl (unsigned char *, unsigned long long);
+ LXVL LXVL_VUC
+ vss __builtin_vec_lxvl (signed short *, unsigned long long);
+ LXVL LXVL_VSS
+ vus __builtin_vec_lxvl (unsigned short *, unsigned long long);
+ LXVL LXVL_VUS
+ vsi __builtin_vec_lxvl (signed int *, unsigned long long);
+ LXVL LXVL_VSI
+ vui __builtin_vec_lxvl (unsigned int *, unsigned long long);
+ LXVL LXVL_VUI
+ vsll __builtin_vec_lxvl (signed long long *, unsigned long long);
+ LXVL LXVL_VSLL
+ vull __builtin_vec_lxvl (unsigned long long *, unsigned long long);
+ LXVL LXVL_VULL
+ vsq __builtin_vec_lxvl (signed __int128 *, unsigned long long);
+ LXVL LXVL_VSQ
+ vuq __builtin_vec_lxvl (unsigned __int128 *, unsigned long long);
+ LXVL LXVL_VUQ
+ vf __builtin_vec_lxvl (float *, unsigned long long);
+ LXVL LXVL_VF
+ vd __builtin_vec_lxvl (double *, unsigned long long);
+ LXVL LXVL_VD
+
+[VEC_MADD, vec_madd, __builtin_vec_madd]
+ vss __builtin_vec_madd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS
+ vss __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS
+ vss __builtin_vec_madd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS
+ vus __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS
+ vf __builtin_vec_madd (vf, vf, vf);
+ XVMADDSP
+ vd __builtin_vec_madd (vd, vd, vd);
+ XVMADDDP
+
+[VEC_MADDS, vec_madds, __builtin_vec_madds]
+ vss __builtin_vec_madds (vss, vss, vss);
+ VMHADDSHS
+
+[VEC_MAX, vec_max, __builtin_vec_max]
+ vsc __builtin_vec_max (vsc, vsc);
+ VMAXSB
+ vuc __builtin_vec_max (vuc, vuc);
+ VMAXUB
+ vss __builtin_vec_max (vss, vss);
+ VMAXSH
+ vus __builtin_vec_max (vus, vus);
+ VMAXUH
+ vsi __builtin_vec_max (vsi, vsi);
+ VMAXSW
+ vui __builtin_vec_max (vui, vui);
+ VMAXUW
+ vsll __builtin_vec_max (vsll, vsll);
+ VMAXSD
+ vull __builtin_vec_max (vull, vull);
+ VMAXUD
+ vf __builtin_vec_max (vf, vf);
+ XVMAXSP
+ vd __builtin_vec_max (vd, vd);
+ XVMAXDP
+
+[VEC_MERGEE, vec_mergee, __builtin_vec_vmrgew]
+ vsi __builtin_vec_vmrgew (vsi, vsi);
+ VMRGEW_V4SI VMRGEW_VSI
+ vui __builtin_vec_vmrgew (vui, vui);
+ VMRGEW_V4SI VMRGEW_VUI
+ vbi __builtin_vec_vmrgew (vbi, vbi);
+ VMRGEW_V4SI VMRGEW_VBI
+ vsll __builtin_vec_vmrgew (vsll, vsll);
+ VMRGEW_V2DI VMRGEW_VSLL
+ vull __builtin_vec_vmrgew (vull, vull);
+ VMRGEW_V2DI VMRGEW_VULL
+ vbll __builtin_vec_vmrgew (vbll, vbll);
+ VMRGEW_V2DI VMRGEW_VBLL
+ vf __builtin_vec_vmrgew (vf, vf);
+ VMRGEW_V4SF
+ vd __builtin_vec_vmrgew (vd, vd);
+ VMRGEW_V2DF
+
+[VEC_MERGEH, vec_mergeh, __builtin_vec_mergeh]
+ vbc __builtin_vec_mergeh (vbc, vbc);
+ VMRGHB VMRGHB_VBC
+ vsc __builtin_vec_mergeh (vsc, vsc);
+ VMRGHB VMRGHB_VSC
+ vuc __builtin_vec_mergeh (vuc, vuc);
+ VMRGHB VMRGHB_VUC
+ vbs __builtin_vec_mergeh (vbs, vbs);
+ VMRGHH VMRGHH_VBS
+ vss __builtin_vec_mergeh (vss, vss);
+ VMRGHH VMRGHH_VSS
+ vus __builtin_vec_mergeh (vus, vus);
+ VMRGHH VMRGHH_VUS
+ vp __builtin_vec_mergeh (vp, vp);
+ VMRGHH VMRGHH_VP
+ vbi __builtin_vec_mergeh (vbi, vbi);
+ VMRGHW VMRGHW_VBI
+ vsi __builtin_vec_mergeh (vsi, vsi);
+ VMRGHW VMRGHW_VSI
+ vui __builtin_vec_mergeh (vui, vui);
+ VMRGHW VMRGHW_VUI
+ vbll __builtin_vec_mergeh (vbll, vbll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VBLL
+ vsll __builtin_vec_mergeh (vsll, vsll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VSLL
+ vull __builtin_vec_mergeh (vull, vull);
+ VEC_MERGEH_V2DI VEC_MERGEH_VULL
+ vf __builtin_vec_mergeh (vf, vf);
+ VMRGHW VMRGHW_VF
+ vd __builtin_vec_mergeh (vd, vd);
+ VEC_MERGEH_V2DF
+
+[VEC_MERGEL, vec_mergel, __builtin_vec_mergel]
+ vbc __builtin_vec_mergel (vbc, vbc);
+ VMRGLB VMRGLB_VBC
+ vsc __builtin_vec_mergel (vsc, vsc);
+ VMRGLB VMRGLB_VSC
+ vuc __builtin_vec_mergel (vuc, vuc);
+ VMRGLB VMRGLB_VUC
+ vbs __builtin_vec_mergel (vbs, vbs);
+ VMRGLH VMRGLH_VBS
+ vss __builtin_vec_mergel (vss, vss);
+ VMRGLH VMRGLH_VSS
+ vus __builtin_vec_mergel (vus, vus);
+ VMRGLH VMRGLH_VUS
+ vp __builtin_vec_mergel (vp, vp);
+ VMRGLH VMRGLH_VP
+ vbi __builtin_vec_mergel (vbi, vbi);
+ VMRGLW VMRGLW_VBI
+ vsi __builtin_vec_mergel (vsi, vsi);
+ VMRGLW VMRGLW_VSI
+ vui __builtin_vec_mergel (vui, vui);
+ VMRGLW VMRGLW_VUI
+ vbll __builtin_vec_mergel (vbll, vbll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VBLL
+ vsll __builtin_vec_mergel (vsll, vsll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VSLL
+ vull __builtin_vec_mergel (vull, vull);
+ VEC_MERGEL_V2DI VEC_MERGEL_VULL
+ vf __builtin_vec_mergel (vf, vf);
+ VMRGLW VMRGLW_VF
+ vd __builtin_vec_mergel (vd, vd);
+ VEC_MERGEL_V2DF
+
+[VEC_MERGEO, vec_mergeo, __builtin_vec_vmrgow]
+ vsi __builtin_vec_vmrgow (vsi, vsi);
+ VMRGOW_V4SI VMRGOW_VSI
+ vui __builtin_vec_vmrgow (vui, vui);
+ VMRGOW_V4SI VMRGOW_VUI
+ vbi __builtin_vec_vmrgow (vbi, vbi);
+ VMRGOW_V4SI VMRGOW_VBI
+ vsll __builtin_vec_vmrgow (vsll, vsll);
+ VMRGOW_V2DI VMRGOW_VSLL
+ vull __builtin_vec_vmrgow (vull, vull);
+ VMRGOW_V2DI VMRGOW_VULL
+ vbll __builtin_vec_vmrgow (vbll, vbll);
+ VMRGOW_V2DI VMRGOW_VBLL
+ vf __builtin_vec_vmrgow (vf, vf);
+ VMRGOW_V4SF
+ vd __builtin_vec_vmrgow (vd, vd);
+ VMRGOW_V2DF
+
+[VEC_MIN, vec_min, __builtin_vec_min]
+ vsc __builtin_vec_min (vsc, vsc);
+ VMINSB
+ vuc __builtin_vec_min (vuc, vuc);
+ VMINUB
+ vss __builtin_vec_min (vss, vss);
+ VMINSH
+ vus __builtin_vec_min (vus, vus);
+ VMINUH
+ vsi __builtin_vec_min (vsi, vsi);
+ VMINSW
+ vui __builtin_vec_min (vui, vui);
+ VMINUW
+ vsll __builtin_vec_min (vsll, vsll);
+ VMINSD
+ vull __builtin_vec_min (vull, vull);
+ VMINUD
+ vf __builtin_vec_min (vf, vf);
+ XVMINSP
+ vd __builtin_vec_min (vd, vd);
+ XVMINDP
+
+[VEC_MLADD, vec_mladd, __builtin_vec_mladd]
+ vss __builtin_vec_mladd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS2
+ vss __builtin_vec_mladd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS2
+ vss __builtin_vec_mladd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS2
+ vus __builtin_vec_mladd (vus, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS2
+
+[VEC_MRADDS, vec_mradds, __builtin_vec_mradds]
+ vss __builtin_vec_mradds (vss, vss, vss);
+ VMHRADDSHS
+
+[VEC_MSUB, vec_msub, __builtin_vec_msub, __VSX__]
+ vf __builtin_vec_msub (vf, vf, vf);
+ XVMSUBSP
+ vd __builtin_vec_msub (vd, vd, vd);
+ XVMSUBDP
+
+[VEC_MSUM, vec_msum, __builtin_vec_msum]
+ vui __builtin_vec_msum (vuc, vuc, vui);
+ VMSUMUBM
+ vsi __builtin_vec_msum (vsc, vuc, vsi);
+ VMSUMMBM
+ vui __builtin_vec_msum (vus, vus, vui);
+ VMSUMUHM
+ vsi __builtin_vec_msum (vss, vss, vsi);
+ VMSUMSHM
+ vsq __builtin_vec_msum (vsll, vsll, vsq);
+ VMSUMUDM VMSUMUDM_S
+ vuq __builtin_vec_msum (vull, vull, vuq);
+ VMSUMUDM VMSUMUDM_U
+
+[VEC_MSUMS, vec_msums, __builtin_vec_msums]
+ vui __builtin_vec_msums (vus, vus, vui);
+ VMSUMUHS
+ vsi __builtin_vec_msums (vss, vss, vsi);
+ VMSUMSHS
+
+[VEC_MTVSCR, vec_mtvscr, __builtin_vec_mtvscr]
+ void __builtin_vec_mtvscr (vbc);
+ MTVSCR MTVSCR_VBC
+ void __builtin_vec_mtvscr (vsc);
+ MTVSCR MTVSCR_VSC
+ void __builtin_vec_mtvscr (vuc);
+ MTVSCR MTVSCR_VUC
+ void __builtin_vec_mtvscr (vbs);
+ MTVSCR MTVSCR_VBS
+ void __builtin_vec_mtvscr (vss);
+ MTVSCR MTVSCR_VSS
+ void __builtin_vec_mtvscr (vus);
+ MTVSCR MTVSCR_VUS
+ void __builtin_vec_mtvscr (vp);
+ MTVSCR MTVSCR_VP
+ void __builtin_vec_mtvscr (vbi);
+ MTVSCR MTVSCR_VBI
+ void __builtin_vec_mtvscr (vsi);
+ MTVSCR MTVSCR_VSI
+ void __builtin_vec_mtvscr (vui);
+ MTVSCR MTVSCR_VUI
+
+; Note that there is no entry for VEC_MUL. See rs6000-c.c:
+; altivec_resolve_overloaded_builtin, where there is special-case
+; code for VEC_MUL. TODO: Is this really necessary? Investigate.
+
+[VEC_MULE, vec_mule, __builtin_vec_mule]
+ vss __builtin_vec_mule (vsc, vsc);
+ VMULESB
+ vus __builtin_vec_mule (vuc, vuc);
+ VMULEUB
+ vsi __builtin_vec_mule (vss, vss);
+ VMULESH
+ vui __builtin_vec_mule (vus, vus);
+ VMULEUH
+ vsll __builtin_vec_mule (vsi, vsi);
+ VMULESW
+ vull __builtin_vec_mule (vui, vui);
+ VMULEUW
+
+[VEC_MULO, vec_mulo, __builtin_vec_mulo]
+ vss __builtin_vec_mulo (vsc, vsc);
+ VMULOSB
+ vus __builtin_vec_mulo (vuc, vuc);
+ VMULOUB
+ vsi __builtin_vec_mulo (vss, vss);
+ VMULOSH
+ vui __builtin_vec_mulo (vus, vus);
+ VMULOUH
+ vsll __builtin_vec_mulo (vsi, vsi);
+ VMULOSW
+ vull __builtin_vec_mulo (vui, vui);
+ VMULOUW
+
+[VEC_NABS, vec_nabs, __builtin_vec_nabs]
+ vsc __builtin_vec_nabs (vsc);
+ NABS_V16QI
+ vss __builtin_vec_nabs (vss);
+ NABS_V8HI
+ vsi __builtin_vec_nabs (vsi);
+ NABS_V4SI
+ vsll __builtin_vec_nabs (vsll);
+ NABS_V2DI
+ vf __builtin_vec_nabs (vf);
+ NABS_V4SF
+ vd __builtin_vec_nabs (vd);
+ NABS_V2DF
+
+[VEC_NAND, vec_nand, __builtin_vec_nand, _ARCH_PWR8]
+ vsc __builtin_vec_nand (vsc, vsc);
+ NAND_V16QI
+ vuc __builtin_vec_nand (vuc, vuc);
+ NAND_V16QI_UNS NAND_VUC
+ vbc __builtin_vec_nand (vbc, vbc);
+ NAND_V16QI_UNS NAND_VBC
+ vss __builtin_vec_nand (vss, vss);
+ NAND_V8HI
+ vus __builtin_vec_nand (vus, vus);
+ NAND_V8HI_UNS NAND_VUS
+ vbs __builtin_vec_nand (vbs, vbs);
+ NAND_V8HI_UNS NAND_VBS
+ vsi __builtin_vec_nand (vsi, vsi);
+ NAND_V4SI
+ vui __builtin_vec_nand (vui, vui);
+ NAND_V4SI_UNS NAND_VUI
+ vbi __builtin_vec_nand (vbi, vbi);
+ NAND_V4SI_UNS NAND_VBI
+ vsll __builtin_vec_nand (vsll, vsll);
+ NAND_V2DI
+ vull __builtin_vec_nand (vull, vull);
+ NAND_V2DI_UNS NAND_VULL
+ vbll __builtin_vec_nand (vbll, vbll);
+ NAND_V2DI_UNS NAND_VBLL
+ vf __builtin_vec_nand (vf, vf);
+ NAND_V4SF
+ vd __builtin_vec_nand (vd, vd);
+ NAND_V2DF
+
+[VEC_NCIPHER_BE, vec_ncipher_be, __builtin_vec_vncipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipher_be (vuc, vuc);
+ VNCIPHER_BE
+
+[VEC_NCIPHERLAST_BE, vec_ncipherlast_be, __builtin_vec_vncipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipherlast_be (vuc, vuc);
+ VNCIPHERLAST_BE
+
+[VEC_NEARBYINT, vec_nearbyint, __builtin_vec_nearbyint, __VSX__]
+ vf __builtin_vec_nearbyint (vf);
+ XVRSPI XVRSPI_NBI
+ vd __builtin_vec_nearbyint (vd);
+ XVRDPI XVRDPI_NBI
+
+[VEC_NEG, vec_neg, __builtin_vec_neg]
+ vsc __builtin_vec_neg (vsc);
+ NEG_V16QI
+ vss __builtin_vec_neg (vss);
+ NEG_V8HI
+ vsi __builtin_vec_neg (vsi);
+ NEG_V4SI
+ vsll __builtin_vec_neg (vsll);
+ NEG_V2DI
+ vf __builtin_vec_neg (vf);
+ NEG_V4SF
+ vd __builtin_vec_neg (vd);
+ NEG_V2DF
+
+[VEC_NMADD, vec_nmadd, __builtin_vec_nmadd, __VSX__]
+ vf __builtin_vec_nmadd (vf, vf, vf);
+ XVNMADDSP
+ vd __builtin_vec_nmadd (vd, vd, vd);
+ XVNMADDDP
+
+[VEC_NMSUB, vec_nmsub, __builtin_vec_nmsub]
+ vf __builtin_vec_nmsub (vf, vf, vf);
+ XVNMSUBSP
+ vd __builtin_vec_nmsub (vd, vd, vd);
+ XVNMSUBDP
+
+[VEC_NOR, vec_nor, __builtin_vec_nor]
+ vsc __builtin_vec_nor (vsc, vsc);
+ VNOR_V16QI
+ vuc __builtin_vec_nor (vuc, vuc);
+ VNOR_V16QI_UNS VNOR_V16QI_U
+ vbc __builtin_vec_nor (vbc, vbc);
+ VNOR_V16QI_UNS VNOR_V16QI_B
+ vss __builtin_vec_nor (vss, vss);
+ VNOR_V8HI
+ vus __builtin_vec_nor (vus, vus);
+ VNOR_V8HI_UNS VNOR_V8HI_U
+ vbs __builtin_vec_nor (vbs, vbs);
+ VNOR_V8HI_UNS VNOR_V8HI_B
+ vsi __builtin_vec_nor (vsi, vsi);
+ VNOR_V4SI
+ vui __builtin_vec_nor (vui, vui);
+ VNOR_V4SI_UNS VNOR_V4SI_U
+ vbi __builtin_vec_nor (vbi, vbi);
+ VNOR_V4SI_UNS VNOR_V4SI_B
+ vsll __builtin_vec_nor (vsll, vsll);
+ VNOR_V2DI
+ vull __builtin_vec_nor (vull, vull);
+ VNOR_V2DI_UNS VNOR_V2DI_U
+ vbll __builtin_vec_nor (vbll, vbll);
+ VNOR_V2DI_UNS VNOR_V2DI_B
+ vf __builtin_vec_nor (vf, vf);
+ VNOR_V4SF
+ vd __builtin_vec_nor (vd, vd);
+ VNOR_V2DF
+
+[VEC_OR, vec_or, __builtin_vec_or]
+ vsc __builtin_vec_or (vsc, vsc);
+ VOR_V16QI
+ vuc __builtin_vec_or (vuc, vuc);
+ VOR_V16QI_UNS VOR_V16QI_U
+ vbc __builtin_vec_or (vbc, vbc);
+ VOR_V16QI_UNS VOR_V16QI_B
+ vss __builtin_vec_or (vss, vss);
+ VOR_V8HI
+ vus __builtin_vec_or (vus, vus);
+ VOR_V8HI_UNS VOR_V8HI_U
+ vbs __builtin_vec_or (vbs, vbs);
+ VOR_V8HI_UNS VOR_V8HI_B
+ vsi __builtin_vec_or (vsi, vsi);
+ VOR_V4SI
+ vui __builtin_vec_or (vui, vui);
+ VOR_V4SI_UNS VOR_V4SI_U
+ vbi __builtin_vec_or (vbi, vbi);
+ VOR_V4SI_UNS VOR_V4SI_B
+ vsll __builtin_vec_or (vsll, vsll);
+ VOR_V2DI
+ vull __builtin_vec_or (vull, vull);
+ VOR_V2DI_UNS VOR_V2DI_U
+ vbll __builtin_vec_or (vbll, vbll);
+ VOR_V2DI_UNS VOR_V2DI_B
+ vf __builtin_vec_or (vf, vf);
+ VOR_V4SF
+ vd __builtin_vec_or (vd, vd);
+ VOR_V2DF
+
+[VEC_ORC, vec_orc, __builtin_vec_orc, _ARCH_PWR8]
+ vsc __builtin_vec_orc (vsc, vsc);
+ ORC_V16QI
+ vuc __builtin_vec_orc (vuc, vuc);
+ ORC_V16QI_UNS ORC_VUC
+ vbc __builtin_vec_orc (vbc, vbc);
+ ORC_V16QI_UNS ORC_VBC
+ vss __builtin_vec_orc (vss, vss);
+ ORC_V8HI
+ vus __builtin_vec_orc (vus, vus);
+ ORC_V8HI_UNS ORC_VUS
+ vbs __builtin_vec_orc (vbs, vbs);
+ ORC_V8HI_UNS ORC_VBS
+ vsi __builtin_vec_orc (vsi, vsi);
+ ORC_V4SI
+ vui __builtin_vec_orc (vui, vui);
+ ORC_V4SI_UNS ORC_VUI
+ vbi __builtin_vec_orc (vbi, vbi);
+ ORC_V4SI_UNS ORC_VBI
+ vsll __builtin_vec_orc (vsll, vsll);
+ ORC_V2DI
+ vull __builtin_vec_orc (vull, vull);
+ ORC_V2DI_UNS ORC_VULL
+ vbll __builtin_vec_orc (vbll, vbll);
+ ORC_V2DI_UNS ORC_VBLL
+ vf __builtin_vec_orc (vf, vf);
+ ORC_V4SF
+ vd __builtin_vec_orc (vd, vd);
+ ORC_V2DF
+
+[VEC_PACK, vec_pack, __builtin_vec_pack]
+ vsc __builtin_vec_pack (vss, vss);
+ VPKUHUM VPKUHUM_VSS
+ vuc __builtin_vec_pack (vus, vus);
+ VPKUHUM VPKUHUM_VUS
+ vbc __builtin_vec_pack (vbs, vbs);
+ VPKUHUM VPKUHUM_VBS
+ vss __builtin_vec_pack (vsi, vsi);
+ VPKUWUM VPKUWUM_VSI
+ vus __builtin_vec_pack (vui, vui);
+ VPKUWUM VPKUWUM_VUI
+ vbs __builtin_vec_pack (vbi, vbi);
+ VPKUWUM VPKUWUM_VBI
+ vsi __builtin_vec_pack (vsll, vsll);
+ VPKUDUM VPKUDUM_VSLL
+ vui __builtin_vec_pack (vull, vull);
+ VPKUDUM VPKUDUM_VULL
+ vbi __builtin_vec_pack (vbll, vbll);
+ VPKUDUM VPKUDUM_VBLL
+ vf __builtin_vec_pack (vd, vd);
+ FLOAT2_V2DF FLOAT2_V2DF_PACK
+
+[VEC_PACKPX, vec_packpx, __builtin_vec_packpx]
+ vp __builtin_vec_packpx (vui, vui);
+ VPKPX
+
+[VEC_PACKS, vec_packs, __builtin_vec_packs]
+ vuc __builtin_vec_packs (vus, vus);
+ VPKUHUS
+ vsc __builtin_vec_packs (vss, vss);
+ VPKSHSS
+ vus __builtin_vec_packs (vui, vui);
+ VPKUWUS
+ vss __builtin_vec_packs (vsi, vsi);
+ VPKSWSS
+ vui __builtin_vec_packs (vull, vull);
+ VPKUDUS
+ vsi __builtin_vec_packs (vsll, vsll);
+ VPKSDSS
+
+[VEC_PDEP, vec_pdep, __builtin_vec_vpdepd, _ARCH_PWR10]
+ vull __builtin_vec_vpdepd (vull, vull);
+ VPDEPD
+
+[VEC_PERM, vec_perm, __builtin_vec_perm]
+ vsc __builtin_vec_perm (vsc, vsc, vuc);
+ VPERM_16QI
+ vuc __builtin_vec_perm (vuc, vuc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VUC
+ vbc __builtin_vec_perm (vbc, vbc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VBC
+ vss __builtin_vec_perm (vss, vss, vuc);
+ VPERM_8HI
+ vus __builtin_vec_perm (vus, vus, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VUS
+ vbs __builtin_vec_perm (vbs, vbs, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VBS
+ vp __builtin_vec_perm (vp, vp, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VP
+ vsi __builtin_vec_perm (vsi, vsi, vuc);
+ VPERM_4SI
+ vui __builtin_vec_perm (vui, vui, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VUI
+ vbi __builtin_vec_perm (vbi, vbi, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VBI
+ vsll __builtin_vec_perm (vsll, vsll, vuc);
+ VPERM_2DI
+ vull __builtin_vec_perm (vull, vull, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VULL
+ vbll __builtin_vec_perm (vbll, vbll, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VBLL
+ vf __builtin_vec_perm (vf, vf, vuc);
+ VPERM_4SF
+ vd __builtin_vec_perm (vd, vd, vuc);
+ VPERM_2DF
+
+[VEC_PERMX, vec_permx, __builtin_vec_xxpermx, _ARCH_PWR10]
+ vsc __builtin_vec_xxpermx (vsc, vsc, vuc, const int);
+ XXPERMX_V16QI
+ vuc __builtin_vec_xxpermx (vuc, vuc, vuc, const int);
+ XXPERMX_UV16QI
+ vss __builtin_vec_xxpermx (vss, vss, vuc, const int);
+ XXPERMX_V8HI
+ vus __builtin_vec_xxpermx (vus, vus, vuc, const int);
+ XXPERMX_UV8HI
+ vsi __builtin_vec_xxpermx (vsi, vsi, vuc, const int);
+ XXPERMX_V4SI
+ vui __builtin_vec_xxpermx (vui, vui, vuc, const int);
+ XXPERMX_UV4SI
+ vsll __builtin_vec_xxpermx (vsll, vsll, vuc, const int);
+ XXPERMX_V2DI
+ vull __builtin_vec_xxpermx (vull, vull, vuc, const int);
+ XXPERMX_UV2DI
+ vf __builtin_vec_xxpermx (vf, vf, vuc, const int);
+ XXPERMX_V4SF
+ vd __builtin_vec_xxpermx (vd, vd, vuc, const int);
+ XXPERMX_V2DF
+
+[VEC_PERMXOR, vec_permxor, __builtin_vec_vpermxor]
+ vsc __builtin_vec_vpermxor (vsc, vsc, vsc);
+ VPERMXOR VPERMXOR_VSC
+ vuc __builtin_vec_vpermxor (vuc, vuc, vuc);
+ VPERMXOR VPERMXOR_VUC
+ vbc __builtin_vec_vpermxor (vbc, vbc, vbc);
+ VPERMXOR VPERMXOR_VBC
+
+[VEC_PEXT, vec_pext, __builtin_vec_vpextd, _ARCH_PWR10]
+ vull __builtin_vec_vpextd (vull, vull);
+ VPEXTD
+
+[VEC_PMSUM, vec_pmsum_be, __builtin_vec_vpmsum]
+ vus __builtin_vec_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_V
+ vui __builtin_vec_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_V
+ vull __builtin_vec_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_V
+ vuq __builtin_vec_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_V
+
+[VEC_POPCNT, vec_popcnt, __builtin_vec_vpopcnt, _ARCH_PWR8]
+ vsc __builtin_vec_vpopcnt (vsc);
+ VPOPCNTB
+ vuc __builtin_vec_vpopcnt (vuc);
+ VPOPCNTUB
+ vss __builtin_vec_vpopcnt (vss);
+ VPOPCNTH
+ vus __builtin_vec_vpopcnt (vus);
+ VPOPCNTUH
+ vsi __builtin_vec_vpopcnt (vsi);
+ VPOPCNTW
+ vui __builtin_vec_vpopcnt (vui);
+ VPOPCNTUW
+ vsll __builtin_vec_vpopcnt (vsll);
+ VPOPCNTD
+ vull __builtin_vec_vpopcnt (vull);
+ VPOPCNTUD
+
+[VEC_PARITY_LSBB, vec_parity_lsbb, __builtin_vec_vparity_lsbb, _ARCH_PWR9]
+ vui __builtin_vec_vparity_lsbb (vsi);
+ VPRTYBW VPRTYBW_S
+ vui __builtin_vec_vparity_lsbb (vui);
+ VPRTYBW VPRTYBW_U
+ vull __builtin_vec_vparity_lsbb (vsll);
+ VPRTYBD VPRTYBD_S
+ vull __builtin_vec_vparity_lsbb (vull);
+ VPRTYBD VPRTYBD_U
+ vuq __builtin_vec_vparity_lsbb (vsq);
+ VPRTYBQ VPRTYBQ_S
+ vuq __builtin_vec_vparity_lsbb (vuq);
+ VPRTYBQ VPRTYBQ_U
+
+[VEC_RE, vec_re, __builtin_vec_re]
+ vf __builtin_vec_re (vf);
+ XVRESP
+ vd __builtin_vec_re (vd);
+ XVREDP
+
+[VEC_RECIP, vec_recipdiv, __builtin_vec_recipdiv]
+ vf __builtin_vec_recipdiv (vf, vf);
+ RECIP_V4SF
+ vd __builtin_vec_recipdiv (vd, vd);
+ RECIP_V2DF
+
+[VEC_REPLACE_ELT, vec_replace_elt, __builtin_vec_replace_elt, _ARCH_PWR10]
+ vui __builtin_vec_replace_elt (vui, unsigned int, const int);
+ VREPLACE_ELT_UV4SI
+ vsi __builtin_vec_replace_elt (vsi, signed int, const int);
+ VREPLACE_ELT_V4SI
+ vull __builtin_vec_replace_elt (vull, unsigned long long, const int);
+ VREPLACE_ELT_UV2DI
+ vsll __builtin_vec_replace_elt (vsll, signed long long, const int);
+ VREPLACE_ELT_V2DI
+ vf __builtin_vec_replace_elt (vf, float, const int);
+ VREPLACE_ELT_V4SF
+ vd __builtin_vec_replace_elt (vd, double, const int);
+ VREPLACE_ELT_V2DF
+
+[VEC_REPLACE_UN, vec_replace_unaligned, __builtin_vec_replace_un, _ARCH_PWR10]
+ vui __builtin_vec_replace_un (vui, unsigned int, const int);
+ VREPLACE_UN_UV4SI
+ vsi __builtin_vec_replace_un (vsi, signed int, const int);
+ VREPLACE_UN_V4SI
+ vull __builtin_vec_replace_un (vull, unsigned long long, const int);
+ VREPLACE_UN_UV2DI
+ vsll __builtin_vec_replace_un (vsll, signed long long, const int);
+ VREPLACE_UN_V2DI
+ vf __builtin_vec_replace_un (vf, float, const int);
+ VREPLACE_UN_V4SF
+ vd __builtin_vec_replace_un (vd, double, const int);
+ VREPLACE_UN_V2DF
+
+[VEC_REVB, vec_revb, __builtin_vec_revb, _ARCH_PWR8]
+ vss __builtin_vec_revb (vss);
+ REVB_V8HI REVB_VSS
+ vus __builtin_vec_revb (vus);
+ REVB_V8HI REVB_VUS
+ vsi __builtin_vec_revb (vsi);
+ REVB_V4SI REVB_VSI
+ vui __builtin_vec_revb (vui);
+ REVB_V4SI REVB_VUI
+ vsll __builtin_vec_revb (vsll);
+ REVB_V2DI REVB_VSLL
+ vull __builtin_vec_revb (vull);
+ REVB_V2DI REVB_VULL
+ vsq __builtin_vec_revb (vsq);
+ REVB_V1TI REVB_VSQ
+ vuq __builtin_vec_revb (vuq);
+ REVB_V1TI REVB_VUQ
+ vf __builtin_vec_revb (vf);
+ REVB_V4SF
+ vd __builtin_vec_revb (vd);
+ REVB_V2DF
+
+[VEC_REVE, vec_reve, __builtin_vec_vreve]
+ vsc __builtin_vec_vreve (vsc);
+ VREVE_V16QI VREVE_VSC
+ vuc __builtin_vec_vreve (vuc);
+ VREVE_V16QI VREVE_VUC
+ vbc __builtin_vec_vreve (vbc);
+ VREVE_V16QI VREVE_VBC
+ vss __builtin_vec_vreve (vss);
+ VREVE_V8HI VREVE_VSS
+ vus __builtin_vec_vreve (vus);
+ VREVE_V8HI VREVE_VUS
+ vbs __builtin_vec_vreve (vbs);
+ VREVE_V8HI VREVE_VBS
+ vsi __builtin_vec_vreve (vsi);
+ VREVE_V4SI VREVE_VSI
+ vui __builtin_vec_vreve (vui);
+ VREVE_V4SI VREVE_VUI
+ vbi __builtin_vec_vreve (vbi);
+ VREVE_V4SI VREVE_VBI
+ vsll __builtin_vec_vreve (vsll);
+ VREVE_V2DI VREVE_VSLL
+ vull __builtin_vec_vreve (vull);
+ VREVE_V2DI VREVE_VULL
+ vbll __builtin_vec_vreve (vbll);
+ VREVE_V2DI VREVE_VBLL
+ vf __builtin_vec_vreve (vf);
+ VREVE_V4SF
+ vd __builtin_vec_vreve (vd);
+ VREVE_V2DF
+
+[VEC_RINT, vec_rint, __builtin_vec_rint, __VSX__]
+ vf __builtin_vec_rint (vf);
+ XVRSPIC
+ vd __builtin_vec_rint (vd);
+ XVRDPIC
+
+[VEC_RL, vec_rl, __builtin_vec_rl]
+ vsc __builtin_vec_rl (vsc, vuc);
+ VRLB VRLB_VSC
+ vuc __builtin_vec_rl (vuc, vuc);
+ VRLB VRLB_VUC
+ vss __builtin_vec_rl (vss, vus);
+ VRLH VRLH_VSS
+ vus __builtin_vec_rl (vus, vus);
+ VRLH VRLH_VUS
+ vsi __builtin_vec_rl (vsi, vui);
+ VRLW VRLW_VSI
+ vui __builtin_vec_rl (vui, vui);
+ VRLW VRLW_VUI
+ vsll __builtin_vec_rl (vsll, vull);
+ VRLD VRLD_VSLL
+ vull __builtin_vec_rl (vull, vull);
+ VRLD VRLD_VULL
+
+[VEC_RLMI, vec_rlmi, __builtin_vec_rlmi]
+ vui __builtin_vec_rlmi (vui, vui, vui);
+ VRLWMI
+ vull __builtin_vec_rlmi (vull, vull, vull);
+ VRLDMI
+
+[VEC_RLNM, vec_rlnm, __builtin_vec_rlnm]
+ vui __builtin_vec_rlnm (vui, vui);
+ VRLWNM
+ vull __builtin_vec_rlnm (vull, vull);
+ VRLDNM
+
+[VEC_ROUND, vec_round, __builtin_vec_round]
+ vf __builtin_vec_round (vf);
+ XVRSPI
+ vd __builtin_vec_round (vd);
+ XVRDPI
+
+[VEC_RSQRT, vec_rsqrt, __builtin_vec_rsqrt]
+ vf __builtin_vec_rsqrt (vf);
+ RSQRT_4SF
+ vd __builtin_vec_rsqrt (vd);
+ RSQRT_2DF
+
+[VEC_RSQRTE, vec_rsqrte, __builtin_vec_rsqrte]
+ vf __builtin_vec_rsqrte (vf);
+ XVRSQRTESP
+ vd __builtin_vec_rsqrte (vd);
+ XVRSQRTEDP
+
+[VEC_SBOX_BE, vec_sbox_be, __builtin_vec_sbox_be, _ARCH_PWR8]
+ vuc __builtin_vec_sbox_be (vuc);
+ VSBOX_BE
+
+[VEC_SEL, vec_sel, __builtin_vec_sel]
+ vsc __builtin_vec_sel (vsc, vsc, vbc);
+ VSEL_16QI VSEL_16QI_B
+ vsc __builtin_vec_sel (vsc, vsc, vuc);
+ VSEL_16QI VSEL_16QI_U
+ vuc __builtin_vec_sel (vuc, vuc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_UB
+ vuc __builtin_vec_sel (vuc, vuc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_UU
+ vbc __builtin_vec_sel (vbc, vbc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_BB
+ vbc __builtin_vec_sel (vbc, vbc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_BU
+ vss __builtin_vec_sel (vss, vss, vbs);
+ VSEL_8HI VSEL_8HI_B
+ vss __builtin_vec_sel (vss, vss, vus);
+ VSEL_8HI VSEL_8HI_U
+ vus __builtin_vec_sel (vus, vus, vbs);
+ VSEL_8HI_UNS VSEL_8HI_UB
+ vus __builtin_vec_sel (vus, vus, vus);
+ VSEL_8HI_UNS VSEL_8HI_UU
+ vbs __builtin_vec_sel (vbs, vbs, vbs);
+ VSEL_8HI_UNS VSEL_8HI_BB
+ vbs __builtin_vec_sel (vbs, vbs, vus);
+ VSEL_8HI_UNS VSEL_8HI_BU
+ vsi __builtin_vec_sel (vsi, vsi, vbi);
+ VSEL_4SI VSEL_4SI_B
+ vsi __builtin_vec_sel (vsi, vsi, vui);
+ VSEL_4SI VSEL_4SI_U
+ vui __builtin_vec_sel (vui, vui, vbi);
+ VSEL_4SI_UNS VSEL_4SI_UB
+ vui __builtin_vec_sel (vui, vui, vui);
+ VSEL_4SI_UNS VSEL_4SI_UU
+ vbi __builtin_vec_sel (vbi, vbi, vbi);
+ VSEL_4SI_UNS VSEL_4SI_BB
+ vbi __builtin_vec_sel (vbi, vbi, vui);
+ VSEL_4SI_UNS VSEL_4SI_BU
+ vsll __builtin_vec_sel (vsll, vsll, vbll);
+ VSEL_2DI_B VSEL_2DI_B
+ vsll __builtin_vec_sel (vsll, vsll, vull);
+ VSEL_2DI_B VSEL_2DI_U
+ vull __builtin_vec_sel (vull, vull, vbll);
+ VSEL_2DI_UNS VSEL_2DI_UB
+ vull __builtin_vec_sel (vull, vull, vull);
+ VSEL_2DI_UNS VSEL_2DI_UU
+ vbll __builtin_vec_sel (vbll, vbll, vbll);
+ VSEL_2DI_UNS VSEL_2DI_BB
+ vbll __builtin_vec_sel (vbll, vbll, vull);
+ VSEL_2DI_UNS VSEL_2DI_BU
+ vf __builtin_vec_sel (vf, vf, vbll);
+ VSEL_4SF VSEL_4SF_B
+ vf __builtin_vec_sel (vf, vf, vull);
+ VSEL_4SF VSEL_4SF_U
+ vd __builtin_vec_sel (vd, vd, vbll);
+ VSEL_2DF VSEL_2DF_B
+ vd __builtin_vec_sel (vd, vd, vull);
+ VSEL_2DF VSEL_2DF_U
+
+[VEC_SHASIGMA_BE, vec_shasigma_be, __builtin_crypto_vshasigma]
+ vui __builtin_crypto_vshasigma (vui, const int, const int);
+ VSHASIGMAW
+ vull __builtin_crypto_vshasigma (vull, const int, const int);
+ VSHASIGMAD
+
+[VEC_SIGNED, vec_signed, __builtin_vec_vsigned]
+ vsi __builtin_vec_vsigned (vf);
+ VEC_VSIGNED_V4SF
+ vsll __builtin_vec_vsigned (vd);
+ VEC_VSIGNED_V2DF
+
+[VEC_SIGNED2, vec_signed2, __builtin_vec_vsigned2]
+ vsi __builtin_vec_vsigned2 (vd, vd);
+ VEC_VSIGNED2_V2DF
+
+[VEC_SIGNEDO, vec_signedo, __builtin_vec_vsignedo]
+ vui __builtin_vec_vsignedo (vd);
+ VEC_VSIGNEDO_V2DF
+
+[VEC_SL, vec_sl, __builtin_vec_sl]
+ vsc __builtin_vec_sl (vsc, vuc);
+ VSLB VSLB_VSC
+ vuc __builtin_vec_sl (vuc, vuc);
+ VSLB VSLB_VUC
+ vss __builtin_vec_sl (vss, vus);
+ VSLH VSLH_VSS
+ vus __builtin_vec_sl (vus, vus);
+ VSLH VSLH_VUS
+ vsi __builtin_vec_sl (vsi, vui);
+ VSLW VSLW_VSI
+ vui __builtin_vec_sl (vui, vui);
+ VSLW VSLW_VUI
+ vsll __builtin_vec_sl (vsll, vull);
+ VSLD VSLD_VSLL
+ vull __builtin_vec_sl (vull, vull);
+ VSLD VSLD_VULL
+
+[VEC_SLD, vec_sld, __builtin_vec_sld]
+ vsc __builtin_vec_sld (vsc, vsc, const int);
+ VSLDOI_16QI VSLDOI_VSC
+ vbc __builtin_vec_sld (vbc, vbc, const int);
+ VSLDOI_16QI VSLDOI_VBC
+ vuc __builtin_vec_sld (vuc, vuc, const int);
+ VSLDOI_16QI VSLDOI_VUC
+ vss __builtin_vec_sld (vss, vss, const int);
+ VSLDOI_8HI VSLDOI_VSS
+ vbs __builtin_vec_sld (vbs, vbs, const int);
+ VSLDOI_8HI VSLDOI_VBS
+ vus __builtin_vec_sld (vus, vus, const int);
+ VSLDOI_8HI VSLDOI_VUS
+ vp __builtin_vec_sld (vp, vp, const int);
+ VSLDOI_8HI VSLDOI_VP
+ vsi __builtin_vec_sld (vsi, vsi, const int);
+ VSLDOI_4SI VSLDOI_VSI
+ vbi __builtin_vec_sld (vbi, vbi, const int);
+ VSLDOI_4SI VSLDOI_VBI
+ vui __builtin_vec_sld (vui, vui, const int);
+ VSLDOI_4SI VSLDOI_VUI
+ vsll __builtin_vec_sld (vsll, vsll, const int);
+ VSLDOI_2DI VSLDOI_VSLL
+ vbll __builtin_vec_sld (vbll, vbll, const int);
+ VSLDOI_2DI VSLDOI_VBLL
+ vull __builtin_vec_sld (vull, vull, const int);
+ VSLDOI_2DI VSLDOI_VULL
+ vf __builtin_vec_sld (vf, vf, const int);
+ VSLDOI_4SF
+ vd __builtin_vec_sld (vd, vd, const int);
+ VSLDOI_2DF
+
+[VEC_SLDB, vec_sldb, __builtin_vec_sldb, _ARCH_PWR10]
+ vsc __builtin_vec_sldb (vsc, vsc, const int);
+ VSLDB_V16QI VSLDB_VSC
+ vuc __builtin_vec_sldb (vuc, vuc, const int);
+ VSLDB_V16QI VSLDB_VUC
+ vss __builtin_vec_sldb (vss, vss, const int);
+ VSLDB_V8HI VSLDB_VSS
+ vus __builtin_vec_sldb (vus, vus, const int);
+ VSLDB_V8HI VSLDB_VUS
+ vsi __builtin_vec_sldb (vsi, vsi, const int);
+ VSLDB_V4SI VSLDB_VSI
+ vui __builtin_vec_sldb (vui, vui, const int);
+ VSLDB_V4SI VSLDB_VUI
+ vsll __builtin_vec_sldb (vsll, vsll, const int);
+ VSLDB_V2DI VSLDB_VSLL
+ vull __builtin_vec_sldb (vull, vull, const int);
+ VSLDB_V2DI VSLDB_VULL
+
+[VEC_SLDW, vec_sldw, __builtin_vec_sldw]
+ vsc __builtin_vec_sldw (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC
+ vuc __builtin_vec_sldw (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC
+ vss __builtin_vec_sldw (vss, vss, const int);
+ XXSLDWI_16QI XXSLDWI_VSS
+ vus __builtin_vec_sldw (vus, vus, const int);
+ XXSLDWI_16QI XXSLDWI_VUS
+ vsi __builtin_vec_sldw (vsi, vsi, const int);
+ XXSLDWI_16QI XXSLDWI_VSI
+ vui __builtin_vec_sldw (vui, vui, const int);
+ XXSLDWI_16QI XXSLDWI_VUI
+ vsll __builtin_vec_sldw (vsll, vsll, const int);
+ XXSLDWI_16QI XXSLDWI_VSLL
+ vull __builtin_vec_sldw (vull, vull, const int);
+ XXSLDWI_16QI XXSLDWI_VULL
+
+[VEC_SLL, vec_sll, __builtin_vec_sll]
+ vsc __builtin_vec_sll (vsc, vuc);
+ VSL VSL_VSC
+ vuc __builtin_vec_sll (vuc, vuc);
+ VSL VSL_VUC
+ vss __builtin_vec_sll (vss, vuc);
+ VSL VSL_VSS
+ vus __builtin_vec_sll (vus, vuc);
+ VSL VSL_VUS
+ vp __builtin_vec_sll (vp, vuc);
+ VSL VSL_VP
+ vsi __builtin_vec_sll (vsi, vuc);
+ VSL VSL_VSI
+ vui __builtin_vec_sll (vui, vuc);
+ VSL VSL_VUI
+ vsll __builtin_vec_sll (vsll, vuc);
+ VSL VSL_VSLL
+ vull __builtin_vec_sll (vull, vuc);
+ VSL VSL_VULL
+
+[VEC_SLO, vec_slo, __builtin_vec_slo]
+ vsc __builtin_vec_slo (vsc, vsc);
+ VSLO VSLO_VSCS
+ vsc __builtin_vec_slo (vsc, vuc);
+ VSLO VSLO_VSCU
+ vuc __builtin_vec_slo (vuc, vsc);
+ VSLO VSLO_VUCS
+ vuc __builtin_vec_slo (vuc, vuc);
+ VSLO VSLO_VUCU
+ vss __builtin_vec_slo (vss, vsc);
+ VSLO VSLO_VSSS
+ vss __builtin_vec_slo (vss, vuc);
+ VSLO VSLO_VSSU
+ vus __builtin_vec_slo (vus, vsc);
+ VSLO VSLO_VUSS
+ vus __builtin_vec_slo (vus, vuc);
+ VSLO VSLO_VUSU
+ vp __builtin_vec_slo (vp, vsc);
+ VSLO VSLO_VPS
+ vp __builtin_vec_slo (vp, vuc);
+ VSLO VSLO_VPU
+ vsi __builtin_vec_slo (vsi, vsc);
+ VSLO VSLO_VSIS
+ vsi __builtin_vec_slo (vsi, vuc);
+ VSLO VSLO_VSIU
+ vui __builtin_vec_slo (vui, vsc);
+ VSLO VSLO_VUIS
+ vui __builtin_vec_slo (vui, vuc);
+ VSLO VSLO_VUIU
+ vsll __builtin_vec_slo (vsll, vsc);
+ VSLO VSLO_VSLLS
+ vsll __builtin_vec_slo (vsll, vuc);
+ VSLO VSLO_VSLLU
+ vull __builtin_vec_slo (vull, vsc);
+ VSLO VSLO_VULLS
+ vull __builtin_vec_slo (vull, vuc);
+ VSLO VSLO_VULLU
+ vf __builtin_vec_slo (vf, vsc);
+ VSLO VSLO_VFS
+ vf __builtin_vec_slo (vf, vuc);
+ VSLO VSLO_VFU
+
+[VEC_SLV, vec_slv, __builtin_vec_vslv, _ARCH_PWR9]
+ vuc __builtin_vec_vslv (vuc, vuc);
+ VSLV
+
+[VEC_SPLAT, vec_splat, __builtin_vec_splat]
+ vsc __builtin_vec_splat (vsc, signed int);
+ VSPLTB VSPLTB_VSC
+ vuc __builtin_vec_splat (vuc, signed int);
+ VSPLTB VSPLTB_VUC
+ vbc __builtin_vec_splat (vbc, signed int);
+ VSPLTB VSPLTB_VBC
+ vss __builtin_vec_splat (vss, signed int);
+ VSPLTH VSPLTH_VSS
+ vus __builtin_vec_splat (vus, signed int);
+ VSPLTH VSPLTH_VUS
+ vbs __builtin_vec_splat (vbs, signed int);
+ VSPLTH VSPLTH_VBS
+ vp __builtin_vec_splat (vp, signed int);
+ VSPLTH VSPLTH_VP
+ vf __builtin_vec_splat (vf, signed int);
+ VSPLTW VSPLTW_VF
+ vsi __builtin_vec_splat (vsi, signed int);
+ VSPLTW VSPLTW_VSI
+ vui __builtin_vec_splat (vui, signed int);
+ VSPLTW VSPLTW_VUI
+ vbi __builtin_vec_splat (vbi, signed int);
+ VSPLTW VSPLTW_VBI
+ vd __builtin_vec_splat (vd, signed int);
+ XXSPLTD_V2DF
+ vsll __builtin_vec_splat (vsll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VSLL
+ vull __builtin_vec_splat (vull, signed int);
+ XXSPLTD_V2DI XXSPLTD_VULL
+ vbll __builtin_vec_splat (vbll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VBLL
+
+[VEC_SPLATI, vec_splati, __builtin_vec_xxspltiw, _ARCH_PWR10]
+ vsi __builtin_vec_xxspltiw (signed int);
+ VXXSPLTIW_V4SI
+ vf __builtin_vec_xxspltiw (float);
+ VXXSPLTIW_V4SF
+
+[VEC_SPLATID, vec_splatid, __builtin_vec_xxspltid, ARCH_PWR10]
+ vd __builtin_vec_xxspltid (float);
+ VXXSPLTIDP
+
+[VEC_SPLATI_INS, vec_splati_ins, __builtin_vec_xxsplti32dx, _ARCH_PWR10]
+ vsi __builtin_vec_xxsplti32dx (vsi, const int, signed int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VSI
+ vui __builtin_vec_xxsplti32dx (vui, const int, unsigned int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VUI
+ vf __builtin_vec_xxsplti32dx (vf, const int, float);
+ VXXSPLTI32DX_V4SF
+
+[VEC_SQRT, vec_sqrt, __builtin_vec_sqrt, __VSX__]
+ vf __builtin_vec_sqrt (vf);
+ XVSQRTSP
+ vd __builtin_vec_sqrt (vd);
+ XVSQRTDP
+
+[VEC_SR, vec_sr, __builtin_vec_sr]
+ vsc __builtin_vec_sr (vsc, vuc);
+ VSRB VSRB_VSC
+ vuc __builtin_vec_sr (vuc, vuc);
+ VSRB VSRB_VUC
+ vss __builtin_vec_sr (vss, vus);
+ VSRH VSRH_VSS
+ vus __builtin_vec_sr (vus, vus);
+ VSRH VSRH_VUS
+ vsi __builtin_vec_sr (vsi, vui);
+ VSRW VSRW_VSI
+ vui __builtin_vec_sr (vui, vui);
+ VSRW VSRW_VUI
+ vsll __builtin_vec_sr (vsll, vull);
+ VSRD VSRD_VSLL
+ vull __builtin_vec_sr (vull, vull);
+ VSRD VSRD_VULL
+
+[VEC_SRA, vec_sra, __builtin_vec_sra]
+ vsc __builtin_vec_sra (vsc, vuc);
+ VSRAB VSRAB_VSC
+ vuc __builtin_vec_sra (vuc, vuc);
+ VSRAB VSRAB_VUC
+ vss __builtin_vec_sra (vss, vus);
+ VSRAH VSRAH_VSS
+ vus __builtin_vec_sra (vus, vus);
+ VSRAH VSRAH_VUS
+ vsi __builtin_vec_sra (vsi, vui);
+ VSRAW VSRAW_VSI
+ vui __builtin_vec_sra (vui, vui);
+ VSRAW VSRAW_VUI
+ vsll __builtin_vec_sra (vsll, vull);
+ VSRAD VSRAD_VSLL
+ vull __builtin_vec_sra (vull, vull);
+ VSRAD VSRAD_VULL
+
+[VEC_SRDB, vec_srdb, __builtin_vec_srdb, _ARCH_PWR10]
+ vsc __builtin_vec_srdb (vsc, vsc, const int);
+ VSRDB_V16QI VSRDB_VSC
+ vuc __builtin_vec_srdb (vuc, vuc, const int);
+ VSRDB_V16QI VSRDB_VUC
+ vss __builtin_vec_srdb (vss, vss, const int);
+ VSRDB_V8HI VSRDB_VSS
+ vus __builtin_vec_srdb (vus, vus, const int);
+ VSRDB_V8HI VSRDB_VUS
+ vsi __builtin_vec_srdb (vsi, vsi, const int);
+ VSRDB_V4SI VSRDB_VSI
+ vui __builtin_vec_srdb (vui, vui, const int);
+ VSRDB_V4SI VSRDB_VUI
+ vsll __builtin_vec_srdb (vsll, vsll, const int);
+ VSRDB_V2DI VSRDB_VSLL
+ vull __builtin_vec_srdb (vull, vull, const int);
+ VSRDB_V2DI VSRDB_VULL
+
+[VEC_SRL, vec_srl, __builtin_vec_srl]
+ vsc __builtin_vec_srl (vsc, vuc);
+ VSR VSR_VSC
+ vuc __builtin_vec_srl (vuc, vuc);
+ VSR VSR_VUC
+ vss __builtin_vec_srl (vss, vuc);
+ VSR VSR_VSS
+ vus __builtin_vec_srl (vus, vuc);
+ VSR VSR_VUS
+ vp __builtin_vec_srl (vp, vuc);
+ VSR VSR_VP
+ vsi __builtin_vec_srl (vsi, vuc);
+ VSR VSR_VSI
+ vui __builtin_vec_srl (vui, vuc);
+ VSR VSR_VUI
+ vsll __builtin_vec_srl (vsll, vuc);
+ VSR VSR_VSLL
+ vull __builtin_vec_srl (vull, vuc);
+ VSR VSR_VULL
+
+[VEC_SRO, vec_sro, __builtin_vec_sro]
+ vsc __builtin_vec_sro (vsc, vsc);
+ VSRO VSRO_VSCS
+ vsc __builtin_vec_sro (vsc, vuc);
+ VSRO VSRO_VSCU
+ vuc __builtin_vec_sro (vuc, vsc);
+ VSRO VSRO_VUCS
+ vuc __builtin_vec_sro (vuc, vuc);
+ VSRO VSRO_VUCU
+ vss __builtin_vec_sro (vss, vsc);
+ VSRO VSRO_VSSS
+ vss __builtin_vec_sro (vss, vuc);
+ VSRO VSRO_VSSU
+ vus __builtin_vec_sro (vus, vsc);
+ VSRO VSRO_VUSS
+ vus __builtin_vec_sro (vus, vuc);
+ VSRO VSRO_VUSU
+ vp __builtin_vec_sro (vp, vsc);
+ VSRO VSRO_VPS
+ vp __builtin_vec_sro (vp, vuc);
+ VSRO VSRO_VPU
+ vsi __builtin_vec_sro (vsi, vsc);
+ VSRO VSRO_VSIS
+ vsi __builtin_vec_sro (vsi, vuc);
+ VSRO VSRO_VSIU
+ vui __builtin_vec_sro (vui, vsc);
+ VSRO VSRO_VUIS
+ vui __builtin_vec_sro (vui, vuc);
+ VSRO VSRO_VUIU
+ vsll __builtin_vec_sro (vsll, vsc);
+ VSRO VSRO_VSLLS
+ vsll __builtin_vec_sro (vsll, vuc);
+ VSRO VSRO_VSLLU
+ vull __builtin_vec_sro (vull, vsc);
+ VSRO VSRO_VULLS
+ vull __builtin_vec_sro (vull, vuc);
+ VSRO VSRO_VULLU
+ vf __builtin_vec_sro (vf, vsc);
+ VSRO VSRO_VFS
+ vf __builtin_vec_sro (vf, vuc);
+ VSRO VSRO_VFU
+
+[VEC_SRV, vec_srv, __builtin_vec_vsrv, _ARCH_PWR9]
+ vuc __builtin_vec_vsrv (vuc, vuc);
+ VSRV
+
+[VEC_ST, vec_st, __builtin_vec_st]
+ void __builtin_vec_st (vsc, signed long long, vsc *);
+ STVX_V16QI STVX_VSC
+ void __builtin_vec_st (vsc, signed long long, signed char *);
+ STVX_V16QI STVX_SC
+ void __builtin_vec_st (vuc, signed long long, vuc *);
+ STVX_V16QI STVX_VUC
+ void __builtin_vec_st (vuc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC
+ void __builtin_vec_st (vbc, signed long long, vbc *);
+ STVX_V16QI STVX_VBC
+ void __builtin_vec_st (vbc, signed long long, signed char *);
+ STVX_V16QI STVX_SC_B
+ void __builtin_vec_st (vbc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC_B
+ void __builtin_vec_st (vss, signed long long, vss *);
+ STVX_V8HI STVX_VSS
+ void __builtin_vec_st (vss, signed long long, signed short *);
+ STVX_V8HI STVX_SS
+ void __builtin_vec_st (vus, signed long long, vus *);
+ STVX_V8HI STVX_VUS
+ void __builtin_vec_st (vus, signed long long, unsigned short *);
+ STVX_V8HI STVX_US
+ void __builtin_vec_st (vbs, signed long long, vbs *);
+ STVX_V8HI STVX_VBS
+ void __builtin_vec_st (vbs, signed long long, signed short *);
+ STVX_V8HI STVX_SS_B
+ void __builtin_vec_st (vbs, signed long long, unsigned short *);
+ STVX_V8HI STVX_US_B
+ void __builtin_vec_st (vp, signed long long, vp *);
+ STVX_V8HI STVX_P
+ void __builtin_vec_st (vsi, signed long long, vsi *);
+ STVX_V4SI STVX_VSI
+ void __builtin_vec_st (vsi, signed long long, signed int *);
+ STVX_V4SI STVX_SI
+ void __builtin_vec_st (vui, signed long long, vui *);
+ STVX_V4SI STVX_VUI
+ void __builtin_vec_st (vui, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI
+ void __builtin_vec_st (vbi, signed long long, vbi *);
+ STVX_V4SI STVX_VBI
+ void __builtin_vec_st (vbi, signed long long, signed int *);
+ STVX_V4SI STVX_SI_B
+ void __builtin_vec_st (vbi, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI_B
+ void __builtin_vec_st (vsll, signed long long, vsll *);
+ STVX_V2DI STVX_VSLL
+ void __builtin_vec_st (vsll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL
+ void __builtin_vec_st (vull, signed long long, vull *);
+ STVX_V2DI STVX_VULL
+ void __builtin_vec_st (vull, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL
+ void __builtin_vec_st (vbll, signed long long, vbll *);
+ STVX_V2DI STVX_VBLL
+ void __builtin_vec_st (vbll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL_B
+ void __builtin_vec_st (vbll, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL_B
+ void __builtin_vec_st (vf, signed long long, vf *);
+ STVX_V4SF STVX_VF
+ void __builtin_vec_st (vf, signed long long, float *);
+ STVX_V4SF STVX_F
+ void __builtin_vec_st (vd, signed long long, vd *);
+ STVX_V2DF STVX_VD
+ void __builtin_vec_st (vd, signed long long, double *);
+ STVX_V2DF STVX_D
+
+[VEC_STE, vec_ste, __builtin_vec_ste]
+ void __builtin_vec_ste (vsc, signed long long, signed char *);
+ STVEBX STVEBX_S
+ void __builtin_vec_ste (vuc, signed long long, unsigned char *);
+ STVEBX STVEBX_U
+ void __builtin_vec_ste (vbc, signed long long, signed char *);
+ STVEBX STVEBX_BS
+ void __builtin_vec_ste (vbc, signed long long, unsigned char *);
+ STVEBX STVEBX_BU
+ void __builtin_vec_ste (vss, signed long long, signed short *);
+ STVEHX STVEHX_S
+ void __builtin_vec_ste (vus, signed long long, unsigned short *);
+ STVEHX STVEHX_U
+ void __builtin_vec_ste (vbs, signed long long, signed short *);
+ STVEHX STVEHX_BS
+ void __builtin_vec_ste (vbs, signed long long, unsigned short *);
+ STVEHX STVEHX_BU
+ void __builtin_vec_ste (vp, signed long long, signed short *);
+ STVEHX STVEHX_PS
+ void __builtin_vec_ste (vp, signed long long, unsigned short *);
+ STVEHX STVEHX_PU
+ void __builtin_vec_ste (vsi, signed long long, signed int *);
+ STVEWX STVEHWX_S
+ void __builtin_vec_ste (vui, signed long long, unsigned int *);
+ STVEWX STVEWX_U
+ void __builtin_vec_ste (vbi, signed long long, signed int *);
+ STVEWX STVEWX_BS
+ void __builtin_vec_ste (vbi, signed long long, unsigned int *);
+ STVEWX STVEWX_BU
+ void __builtin_vec_ste (vf, signed long long, float *);
+ STVEWX STVEWX_F
+
+[VEC_STL, vec_stl, __builtin_vec_stl]
+ void __builtin_vec_stl (vsc, signed long long, vsc *);
+ STVXL_V16QI STVXL_VSC
+ void __builtin_vec_stl (vsc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC
+ void __builtin_vec_stl (vuc, signed long long, vuc *);
+ STVXL_V16QI STVXL_VUC
+ void __builtin_vec_stl (vuc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC
+ void __builtin_vec_stl (vbc, signed long long, vbc *);
+ STVXL_V16QI STVXL_VBC
+ void __builtin_vec_stl (vbc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC_B
+ void __builtin_vec_stl (vbc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC_B
+ void __builtin_vec_stl (vss, signed long long, vss *);
+ STVXL_V8HI STVXL_VSS
+ void __builtin_vec_stl (vss, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS
+ void __builtin_vec_stl (vus, signed long long, vus *);
+ STVXL_V8HI STVXL_VUS
+ void __builtin_vec_stl (vus, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US
+ void __builtin_vec_stl (vbs, signed long long, vbs *);
+ STVXL_V8HI STVXL_VBS
+ void __builtin_vec_stl (vbs, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS_B
+ void __builtin_vec_stl (vbs, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US_B
+ void __builtin_vec_stl (vp, signed long long, vp *);
+ STVXL_V8HI STVXL_P
+ void __builtin_vec_stl (vsi, signed long long, vsi *);
+ STVXL_V4SI STVXL_VSI
+ void __builtin_vec_stl (vsi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI
+ void __builtin_vec_stl (vui, signed long long, vui *);
+ STVXL_V4SI STVXL_VUI
+ void __builtin_vec_stl (vui, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI
+ void __builtin_vec_stl (vbi, signed long long, vbi *);
+ STVXL_V4SI STVXL_VBI
+ void __builtin_vec_stl (vbi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI_B
+ void __builtin_vec_stl (vbi, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI_B
+ void __builtin_vec_stl (vsll, signed long long, vsll *);
+ STVXL_V2DI STVXL_VSLL
+ void __builtin_vec_stl (vsll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL
+ void __builtin_vec_stl (vull, signed long long, vull *);
+ STVXL_V2DI STVXL_VULL
+ void __builtin_vec_stl (vull, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL
+ void __builtin_vec_stl (vbll, signed long long, vbll *);
+ STVXL_V2DI STVXL_VBLL
+ void __builtin_vec_stl (vbll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL_B
+ void __builtin_vec_stl (vbll, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL_B
+ void __builtin_vec_stl (vf, signed long long, vf *);
+ STVXL_V4SF STVXL_VF
+ void __builtin_vec_stl (vf, signed long long, float *);
+ STVXL_V4SF STVXL_F
+ void __builtin_vec_stl (vd, signed long long, vd *);
+ STVXL_V2DF STVXL_VD
+ void __builtin_vec_stl (vd, signed long long, double *);
+ STVXL_V2DF STVXL_D
+
+[VEC_STRIL, vec_stril, __builtin_vec_stril, ARCH_PWR10]
+ vuc __builtin_vec_stril (vuc);
+ VSTRIBL VSTRIBL_U
+ vsc __builtin_vec_stril (vsc);
+ VSTRIBL VSTRIBL_S
+ vus __builtin_vec_stril (vus);
+ VSTRIHL VSTRIHL_U
+ vss __builtin_vec_stril (vss);
+ VSTRIHL VSTRIHL_S
+
+[VEC_STRIL_P, vec_stril_p, __builtin_vec_stril_p, _ARCH_PWR10]
+ signed int __builtin_vec_stril_p (vuc);
+ VSTRIBL_P VSTRIBL_PU
+ signed int __builtin_vec_stril_p (vsc);
+ VSTRIBL_P VSTRIBL_PS
+ signed int __builtin_vec_stril_p (vus);
+ VSTRIHL_P VSTRIHL_PU
+ signed int __builtin_vec_stril_p (vss);
+ VSTRIHL_P VSTRIHL_PS
+
+[VEC_STRIR, vec_strir, __builtin_vec_strir, _ARCH_PWR10]
+ vuc __builtin_vec_strir (vuc);
+ VSTRIBR VSTRIBR_U
+ vsc __builtin_vec_strir (vsc);
+ VSTRIBR VSTRIBR_S
+ vus __builtin_vec_strir (vus);
+ VSTRIHR VSTRIHR_U
+ vss __builtin_vec_strir (vss);
+ VSTRIHR VSTRIHR_S
+
+[VEC_STRIR_P, vec_strir_p, __builtin_vec_strir_p, ARCH_PWR10]
+ signed int __builtin_vec_strir_p (vuc);
+ VSTRIBR_P VSTRIBR_PU
+ signed int __builtin_vec_strir_p (vsc);
+ VSTRIBR_P VSTRIBR_PS
+ signed int __builtin_vec_strir_p (vus);
+ VSTRIHR_P VSTRIHR_PU
+ signed int __builtin_vec_strir_p (vss);
+ VSTRIHR_P VSTRIHR_PS
+
+[VEC_STVLX, vec_stvlx, __builtin_vec_stvlx, __PPU__]
+ void __builtin_vec_stvlx (vbc, signed long long, vbc *);
+ STVLX STVLX_VBC
+ void __builtin_vec_stvlx (vsc, signed long long, vsc *);
+ STVLX STVLX_VSC
+ void __builtin_vec_stvlx (vsc, signed long long, signed char *);
+ STVLX STVLX_SC
+ void __builtin_vec_stvlx (vuc, signed long long, vuc *);
+ STVLX STVLX_VUC
+ void __builtin_vec_stvlx (vuc, signed long long, unsigned char *);
+ STVLX STVLX_UC
+ void __builtin_vec_stvlx (vbs, signed long long, vbs *);
+ STVLX STVLX_VBS
+ void __builtin_vec_stvlx (vss, signed long long, vss *);
+ STVLX STVLX_VSS
+ void __builtin_vec_stvlx (vss, signed long long, signed short *);
+ STVLX STVLX_SS
+ void __builtin_vec_stvlx (vus, signed long long, vus *);
+ STVLX STVLX_VUS
+ void __builtin_vec_stvlx (vus, signed long long, unsigned short *);
+ STVLX STVLX_US
+ void __builtin_vec_stvlx (vp, signed long long, vp *);
+ STVLX STVLX_VP
+ void __builtin_vec_stvlx (vbi, signed long long, vbi *);
+ STVLX STVLX_VBI
+ void __builtin_vec_stvlx (vsi, signed long long, vsi *);
+ STVLX STVLX_VSI
+ void __builtin_vec_stvlx (vsi, signed long long, signed int *);
+ STVLX STVLX_SI
+ void __builtin_vec_stvlx (vui, signed long long, vui *);
+ STVLX STVLX_VUI
+ void __builtin_vec_stvlx (vui, signed long long, unsigned int *);
+ STVLX STVLX_UI
+ void __builtin_vec_stvlx (vf, signed long long, vf *);
+ STVLX STVLX_VF
+ void __builtin_vec_stvlx (vf, signed long long, float *);
+ STVLX STVLX_F
+
+[VEC_STVLXL, vec_stvlxl, __builtin_vec_stvlxl, __PPU__]
+ void __builtin_vec_stvlxl (vbc, signed long long, vbc *);
+ STVLXL STVLXL_VBC
+ void __builtin_vec_stvlxl (vsc, signed long long, vsc *);
+ STVLXL STVLXL_VSC
+ void __builtin_vec_stvlxl (vsc, signed long long, signed char *);
+ STVLXL STVLXL_SC
+ void __builtin_vec_stvlxl (vuc, signed long long, vuc *);
+ STVLXL STVLXL_VUC
+ void __builtin_vec_stvlxl (vuc, signed long long, unsigned char *);
+ STVLXL STVLXL_UC
+ void __builtin_vec_stvlxl (vbs, signed long long, vbs *);
+ STVLXL STVLXL_VBS
+ void __builtin_vec_stvlxl (vss, signed long long, vss *);
+ STVLXL STVLXL_VSS
+ void __builtin_vec_stvlxl (vss, signed long long, signed short *);
+ STVLXL STVLXL_SS
+ void __builtin_vec_stvlxl (vus, signed long long, vus *);
+ STVLXL STVLXL_VUS
+ void __builtin_vec_stvlxl (vus, signed long long, unsigned short *);
+ STVLXL STVLXL_US
+ void __builtin_vec_stvlxl (vp, signed long long, vp *);
+ STVLXL STVLXL_VP
+ void __builtin_vec_stvlxl (vbi, signed long long, vbi *);
+ STVLXL STVLXL_VBI
+ void __builtin_vec_stvlxl (vsi, signed long long, vsi *);
+ STVLXL STVLXL_VSI
+ void __builtin_vec_stvlxl (vsi, signed long long, signed int *);
+ STVLXL STVLXL_SI
+ void __builtin_vec_stvlxl (vui, signed long long, vui *);
+ STVLXL STVLXL_VUI
+ void __builtin_vec_stvlxl (vui, signed long long, unsigned int *);
+ STVLXL STVLXL_UI
+ void __builtin_vec_stvlxl (vf, signed long long, vf *);
+ STVLXL STVLXL_VF
+ void __builtin_vec_stvlxl (vf, signed long long, float *);
+ STVLXL STVLXL_F
+
+[VEC_STVRX, vec_stvrx, __builtin_vec_stvrx, __PPU__]
+ void __builtin_vec_stvrx (vbc, signed long long, vbc *);
+ STVRX STVRX_VBC
+ void __builtin_vec_stvrx (vsc, signed long long, vsc *);
+ STVRX STVRX_VSC
+ void __builtin_vec_stvrx (vsc, signed long long, signed char *);
+ STVRX STVRX_SC
+ void __builtin_vec_stvrx (vuc, signed long long, vuc *);
+ STVRX STVRX_VUC
+ void __builtin_vec_stvrx (vuc, signed long long, unsigned char *);
+ STVRX STVRX_UC
+ void __builtin_vec_stvrx (vbs, signed long long, vbs *);
+ STVRX STVRX_VBS
+ void __builtin_vec_stvrx (vss, signed long long, vss *);
+ STVRX STVRX_VSS
+ void __builtin_vec_stvrx (vss, signed long long, signed short *);
+ STVRX STVRX_SS
+ void __builtin_vec_stvrx (vus, signed long long, vus *);
+ STVRX STVRX_VUS
+ void __builtin_vec_stvrx (vus, signed long long, unsigned short *);
+ STVRX STVRX_US
+ void __builtin_vec_stvrx (vp, signed long long, vp *);
+ STVRX STVRX_VP
+ void __builtin_vec_stvrx (vbi, signed long long, vbi *);
+ STVRX STVRX_VBI
+ void __builtin_vec_stvrx (vsi, signed long long, vsi *);
+ STVRX STVRX_VSI
+ void __builtin_vec_stvrx (vsi, signed long long, signed int *);
+ STVRX STVRX_SI
+ void __builtin_vec_stvrx (vui, signed long long, vui *);
+ STVRX STVRX_VUI
+ void __builtin_vec_stvrx (vui, signed long long, unsigned int *);
+ STVRX STVRX_UI
+ void __builtin_vec_stvrx (vf, signed long long, vf *);
+ STVRX STVRX_VF
+ void __builtin_vec_stvrx (vf, signed long long, float *);
+ STVRX STVRX_F
+
+[VEC_STVRXL, vec_stvrxl, __builtin_vec_stvrxl, __PPU__]
+ void __builtin_vec_stvrxl (vbc, signed long long, vbc *);
+ STVRXL STVRXL_VBC
+ void __builtin_vec_stvrxl (vsc, signed long long, vsc *);
+ STVRXL STVRXL_VSC
+ void __builtin_vec_stvrxl (vsc, signed long long, signed char *);
+ STVRXL STVRXL_SC
+ void __builtin_vec_stvrxl (vuc, signed long long, vuc *);
+ STVRXL STVRXL_VUC
+ void __builtin_vec_stvrxl (vuc, signed long long, unsigned char *);
+ STVRXL STVRXL_UC
+ void __builtin_vec_stvrxl (vbs, signed long long, vbs *);
+ STVRXL STVRXL_VBS
+ void __builtin_vec_stvrxl (vss, signed long long, vss *);
+ STVRXL STVRXL_VSS
+ void __builtin_vec_stvrxl (vss, signed long long, signed short *);
+ STVRXL STVRXL_SS
+ void __builtin_vec_stvrxl (vus, signed long long, vus *);
+ STVRXL STVRXL_VUS
+ void __builtin_vec_stvrxl (vus, signed long long, unsigned short *);
+ STVRXL STVRXL_US
+ void __builtin_vec_stvrxl (vp, signed long long, vp *);
+ STVRXL STVRXL_VP
+ void __builtin_vec_stvrxl (vbi, signed long long, vbi *);
+ STVRXL STVRXL_VBI
+ void __builtin_vec_stvrxl (vsi, signed long long, vsi *);
+ STVRXL STVRXL_VSI
+ void __builtin_vec_stvrxl (vsi, signed long long, signed int *);
+ STVRXL STVRXL_SI
+ void __builtin_vec_stvrxl (vui, signed long long, vui *);
+ STVRXL STVRXL_VUI
+ void __builtin_vec_stvrxl (vui, signed long long, unsigned int *);
+ STVRXL STVRXL_UI
+ void __builtin_vec_stvrxl (vf, signed long long, vf *);
+ STVRXL STVRXL_VF
+ void __builtin_vec_stvrxl (vf, signed long long, float *);
+ STVRXL STVRXL_F
+
+[VEC_STXVL, vec_xst_len, __builtin_vec_stxvl, _ARCH_PPC64_PWR9]
+ void __builtin_vec_stxvl (vsc, signed char *, unsigned long long);
+ STXVL STXVL_VSC
+ void __builtin_vec_stxvl (vuc, unsigned char *, unsigned long long);
+ STXVL STXVL_VUC
+ void __builtin_vec_stxvl (vss, signed short *, unsigned long long);
+ STXVL STXVL_VSS
+ void __builtin_vec_stxvl (vus, unsigned short *, unsigned long long);
+ STXVL STXVL_VUS
+ void __builtin_vec_stxvl (vsi, signed int *, unsigned long long);
+ STXVL STXVL_VSI
+ void __builtin_vec_stxvl (vui, unsigned int *, unsigned long long);
+ STXVL STXVL_VUI
+ void __builtin_vec_stxvl (vsll, signed long long *, unsigned long long);
+ STXVL STXVL_VSLL
+ void __builtin_vec_stxvl (vull, unsigned long long *, unsigned long long);
+ STXVL STXVL_VULL
+ void __builtin_vec_stxvl (vsq, signed __int128 *, unsigned long long);
+ STXVL STXVL_VSQ
+ void __builtin_vec_stxvl (vuq, unsigned __int128 *, unsigned long long);
+ STXVL STXVL_VUQ
+ void __builtin_vec_stxvl (vf, float *, unsigned long long);
+ STXVL STXVL_VF
+ void __builtin_vec_stxvl (vd, double *, unsigned long long);
+ STXVL STXVL_VD
+
+[VEC_SUB, vec_sub, __builtin_vec_sub]
+ vsc __builtin_vec_sub (vsc, vsc);
+ VSUBUBM VSUBUBM_VSC
+ vuc __builtin_vec_sub (vuc, vuc);
+ VSUBUBM VSUBUBM_VUC
+ vss __builtin_vec_sub (vss, vss);
+ VSUBUHM VSUBUHM_VSS
+ vus __builtin_vec_sub (vus, vus);
+ VSUBUHM VSUBUHM_VUS
+ vsi __builtin_vec_sub (vsi, vsi);
+ VSUBUWM VSUBUWM_VSI
+ vui __builtin_vec_sub (vui, vui);
+ VSUBUWM VSUBUWM_VUI
+ vsll __builtin_vec_sub (vsll, vsll);
+ VSUBUDM VSUBUDM_VSLL
+ vull __builtin_vec_sub (vull, vull);
+ VSUBUDM VSUBUDM_VULL
+ vsq __builtin_vec_sub (vsq, vsq);
+ VSUBUQM VSUBUQM_VSQ
+ vuq __builtin_vec_sub (vuq, vuq);
+ VSUBUQM VSUBUQM_VUQ
+ vf __builtin_vec_sub (vf, vf);
+ XVSUBSP
+ vd __builtin_vec_sub (vd, vd);
+ XVSUBDP
+
+[VEC_SUBC, vec_subc, __builtin_vec_subc]
+ vsi __builtin_vec_subc (vsi, vsi);
+ VSUBCUW VSUBCUW_VSI
+ vui __builtin_vec_subc (vui, vui);
+ VSUBCUW VSUBCUW_VUI
+ vsq __builtin_vec_subc (vsq, vsq);
+ VSUBCUQ VSUBCUQ_VSQ
+ vuq __builtin_vec_subc (vuq, vuq);
+ VSUBCUQ VSUBCUQ_VUQ
+
+[VEC_SUBS, vec_subs, __builtin_vec_subs]
+ vuc __builtin_vec_subs (vuc, vuc);
+ VSUBUBS
+ vsc __builtin_vec_subs (vsc, vsc);
+ VSUBSBS
+ vus __builtin_vec_subs (vus, vus);
+ VSUBUHS
+ vss __builtin_vec_subs (vss, vss);
+ VSUBSHS
+ vui __builtin_vec_subs (vui, vui);
+ VSUBUWS
+ vsi __builtin_vec_subs (vsi, vsi);
+ VSUBSWS
+
+[VEC_SUM2S, vec_sum2s, __builtin_vec_sum2s]
+ vsi __builtin_vec_sum2s (vsi, vsi);
+ VSUM2SWS
+
+[VEC_SUM4S, vec_sum4s, __builtin_vec_sum4s]
+ vui __builtin_vec_sum4s (vuc, vui);
+ VSUM4UBS
+ vsi __builtin_vec_sum4s (vsc, vui);
+ VSUM4SBS
+ vsi __builtin_vec_sum4s (vss, vsi);
+ VSUM4SHS
+
+[VEC_SUMS, vec_sums, __builtin_vec_sums]
+ vsi __builtin_vec_sums (vsi, vsi);
+ VSUMSWS
+
+[VEC_TERNARYLOGIC, vec_ternarylogic, __builtin_vec_xxeval, _ARCH_PWR10]
+ vuc __builtin_vec_xxeval (vuc, vuc, vuc, const int);
+ XXEVAL XXEVAL_VUC
+ vus __builtin_vec_xxeval (vus, vus, vus, const int);
+ XXEVAL XXEVAL_VUS
+ vui __builtin_vec_xxeval (vui, vui, vui, const int);
+ XXEVAL XXEVAL_VUI
+ vull __builtin_vec_xxeval (vull, vull, vull, const int);
+ XXEVAL XXEVAL_VULL
+ vuq __builtin_vec_xxeval (vuq, vuq, vuq, const int);
+ XXEVAL XXEVAL_VUQ
+
+[VEC_TEST_LSBB_ALL_ONES, vec_test_lsbb_all_ones, __builtin_vec_xvtlsbb_all_ones, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_ones (vuc);
+ XVTLSBB_ONES
+
+[VEC_TEST_LSBB_ALL_ZEROS, vec_test_lsbb_all_zeros, __builtin_vec_xvtlsbb_all_zeros, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_zeros (vuc);
+ XVTLSBB_ZEROS
+
+[VEC_TRUNC, vec_trunc, __builtin_vec_trunc]
+ vf __builtin_vec_trunc (vf);
+ XVRSPIZ
+ vd __builtin_vec_trunc (vd);
+ XVRDPIZ
+
+[VEC_TSTSFI_GT, SKIP, __builtin_dfp_dtstsfi_gt]
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal64);
+ TSTSFI_GT_DD
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal128);
+ TSTSFI_GT_TD
+
+[VEC_TSTSFI_EQ, SKIP, __builtin_dfp_dtstsfi_eq]
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal64);
+ TSTSFI_EQ_DD
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal128);
+ TSTSFI_EQ_TD
+
+[VEC_TSTSFI_LT, SKIP, __builtin_dfp_dtstsfi_lt]
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal64);
+ TSTSFI_LT_DD
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal128);
+ TSTSFI_LT_TD
+
+[VEC_TSTSFI_OV, SKIP, __builtin_dfp_dtstsfi_ov]
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal64);
+ TSTSFI_OV_DD
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal128);
+ TSTSFI_OV_TD
+
+[VEC_UNPACKH, vec_unpackh, __builtin_vec_unpackh]
+ vss __builtin_vec_unpackh (vsc);
+ VUPKHSB VUPKHSB_VSC
+ vbs __builtin_vec_unpackh (vbc);
+ VUPKHSB VUPKHSB_VBC
+ vsi __builtin_vec_unpackh (vss);
+ VUPKHSH VUPKHSH_VSS
+ vbi __builtin_vec_unpackh (vbs);
+ VUPKHSH VUPKHSH_VBS
+ vui __builtin_vec_unpackh (vp);
+ VUPKHPX
+ vsll __builtin_vec_unpackh (vsi);
+ VUPKHSW VUPKHSW_VSI
+ vbll __builtin_vec_unpackh (vbi);
+ VUPKHSW VUPKHSW_VBI
+ vd __builtin_vec_unpackh (vf);
+ DOUBLEH_V4SF VUPKHF
+
+[VEC_UNPACKL, vec_unpackl, __builtin_vec_unpackl]
+ vss __builtin_vec_unpackl (vsc);
+ VUPKLSB VUPKLSB_VSC
+ vbs __builtin_vec_unpackl (vbc);
+ VUPKLSB VUPKLSB_VBC
+ vsi __builtin_vec_unpackl (vss);
+ VUPKLSH VUPKLSH_VSS
+ vbi __builtin_vec_unpackl (vbs);
+ VUPKLSH VUPKLSH_VBS
+ vui __builtin_vec_unpackl (vp);
+ VUPKLPX
+ vsll __builtin_vec_unpackl (vsi);
+ VUPKLSW VUPKLSW_VSI
+ vbll __builtin_vec_unpackl (vbi);
+ VUPKLSW VUPKLSW_VBI
+ vd __builtin_vec_unpackl (vf);
+ DOUBLEL_V4SF VUPKLF
+
+[VEC_UNSIGNED, vec_unsigned, __builtin_vec_vunsigned]
+ vsi __builtin_vec_vunsigned (vf);
+ VEC_VUNSIGNED_V4SF
+ vsll __builtin_vec_vunsigned (vd);
+ VEC_VUNSIGNED_V2DF
+
+[VEC_UNSIGNED2, vec_unsigned2, __builtin_vec_vunsigned2]
+ vsi __builtin_vec_vunsigned2 (vd, vd);
+ VEC_VUNSIGNED2_V2DF
+
+[VEC_UNSIGNEDO, vec_unsignedo, __builtin_vec_vunsignedo]
+ vui __builtin_vec_vunsignedo (vd);
+ VEC_VUNSIGNEDO_V2DF
+
+; Not sure this should exist, but it does. This group is redundant with
+; vec_addec, but the next three don't have an alias.
+[VEC_VADDECUQ, vec_vaddecuq, __builtin_vec_vaddecuq]
+ vsq __builtin_vec_vaddecuq (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ2
+ vuq __builtin_vec_vaddecuq (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ2
+
+; Not sure this should exist, but it does.
+[VEC_VADDEUQM, vec_vaddeuqm, __builtin_vec_vaddeuqm]
+ vsq __builtin_vec_vaddeuqm (vsq, vsq, vsq);
+ VADDEUQM VADDEUQM_VSQ
+ vuq __builtin_vec_vaddeuqm (vuq, vuq, vuq);
+ VADDEUQM VADDEUQM_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBECUQ, vec_vsubecuq, __builtin_vec_vsubecuq]
+ vsq __builtin_vec_vsubecuq (vsq, vsq, vsq);
+ VSUBECUQ VSUBECUQ_VSQ
+ vuq __builtin_vec_vsubecuq (vuq, vuq, vuq);
+ VSUBECUQ VSUBECUQ_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBEUQM, vec_vsubeuqm, __builtin_vec_vsubeuqm]
+ vsq __builtin_vec_vsubeuqm (vsq, vsq, vsq);
+ VSUBEUQM VSUBEUQM_VSQ
+ vuq __builtin_vec_vsubeuqm (vuq, vuq, vuq);
+ VSUBEUQM VSUBEUQM_VUQ
+
+[VEC_VEE, vec_extract_exp, __builtin_vec_extract_exp, _ARCH_PWR9]
+ vui __builtin_vec_extract_exp (vf);
+ VEESP
+ vull __builtin_vec_extract_exp (vd);
+ VEEDP
+
+[VEC_VES, vec_extract_sig, __builtin_vec_extract_sig, _ARCH_PWR9]
+ vui __builtin_vec_extract_sig (vf);
+ VESSP
+ vull __builtin_vec_extract_sig (vd);
+ VESDP
+
+[VEC_VIE, vec_insert_exp, __builtin_vec_insert_exp, ARCH_PWR9]
+ vf __builtin_vec_insert_exp (vf, vui);
+ VIESP VIESP_VF
+ vf __builtin_vec_insert_exp (vui, vui);
+ VIESP VIESP_VUI
+ vd __builtin_vec_insert_exp (vd, vull);
+ VIEDP VIEDP_VD
+ vd __builtin_vec_insert_exp (vull, vull);
+ VIEDP VIEDP_VULL
+
+[VEC_VSCEEQ, scalar_cmp_exp_eq, __builtin_vec_scalar_cmp_exp_eq, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_eq (double, double);
+ VSCEDPEQ
+ signed int __builtin_vec_scalar_cmp_exp_eq (_Float128, _Float128);
+ VSCEQPEQ
+
+[VEC_VSCEGT, scalar_cmp_exp_gt, __builtin_vec_scalar_cmp_exp_gt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_gt (double, double);
+ VSCEDPGT
+ signed int __builtin_vec_scalar_cmp_exp_gt (_Float128, _Float128);
+ VSCEQPGT
+
+[VEC_VSCELT, scalar_cmp_exp_lt, __builtin_vec_scalar_cmp_exp_lt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_lt (double, double);
+ VSCEDPLT
+ signed int __builtin_vec_scalar_cmp_exp_lt (_Float128, _Float128);
+ VSCEQPLT
+
+[VEC_VSCEUO, scalar_cmp_exp_unordered, __builtin_vec_scalar_cmp_exp_unordered, ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_unordered (double, double);
+ VSCEDPUO
+ signed int __builtin_vec_scalar_cmp_exp_unordered (_Float128, _Float128);
+ VSCEQPUO
+
+[VEC_VSEE, scalar_extract_exp, __builtin_vec_scalar_extract_exp, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_exp (double);
+ VSEEDP
+ unsigned int __builtin_vec_scalar_extract_exp (_Float128);
+ VSEEQP
+
+[VEC_VSES, scalar_extract_sig, __builtin_vec_scalar_extract_sig, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_sig (double);
+ VSESDP
+ unsigned int __builtin_vec_scalar_extract_sig (_Float128);
+ VSESQP
+
+[VEC_VSIE, scalar_insert_exp, __builtin_vec_scalar_insert_exp, _ARCH_PWR9]
+ double __builtin_vec_scalar_insert_exp (unsigned int, unsigned int);
+ VSIEDP VSIEDP_UI
+ double __builtin_vec_scalar_insert_exp (double, unsigned int);
+ VSIEDP VSIEDP_D
+ _Float128 __builtin_vec_scalar_insert_exp (unsigned long long, unsigned long long);
+ VSIEQP VSIEQP_ULL
+ _Float128 __builtin_vec_scalar_insert_exp (_Float128, unsigned long long);
+ VSIEQP VSIEQP_F128
+
+[VEC_VSTDC, scalar_test_data_class, __builtin_vec_scalar_test_data_class, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_data_class (float, signed int);
+ VSTDCSP
+ bool __builtin_vec_scalar_test_data_class (double, signed int);
+ VSTDCDP
+ bool __builtin_vec_scalar_test_data_class (_Float128, signed int);
+ VSTDCQP
+
+[VEC_VSTDCN, scalar_test_neg, __builtin_vec_scalar_test_neg, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_neg (float);
+ VSTDCNSP
+ bool __builtin_vec_scalar_test_neg (double);
+ VSTDCNDP
+ bool __builtin_vec_scalar_test_neg (_Float128);
+ VSTDCNQP
+
+[VEC_VTDC, vec_test_data_class, __builtin_vec_test_data_class, _ARCH_PWR9]
+ vbi __builtin_vec_test_data_class (vf, signed int);
+ VTDCSP
+ vbll __builtin_vec_test_data_class (vd, signed int);
+ VTDCDP
+
+[VEC_XL, vec_xl, __builtin_vec_vsx_ld, __VSX__]
+ vsc __builtin_vec_vsx_ld (signed long long, vsc *);
+ LXVW4X_V4SI LXVW4X_VSC
+ vsc __builtin_vec_vsx_ld (signed long long, signed char *);
+ LXVW4X_V4SI LXVW4X_SC
+ vuc __builtin_vec_vsx_ld (signed long long, vuc *);
+ LXVW4X_V4SI LXVW4X_VUC
+ vuc __builtin_vec_vsx_ld (signed long long, unsigned char *);
+ LXVW4X_V4SI LXVW4X_UC
+ vss __builtin_vec_vsx_ld (signed long long, vss *);
+ LXVW4X_V4SI LXVW4X_VSS
+ vss __builtin_vec_vsx_ld (signed long long, signed short *);
+ LXVW4X_V4SI LXVW4X_SS
+ vus __builtin_vec_vsx_ld (signed long long, vus *);
+ LXVW4X_V4SI LXVW4X_VUS
+ vus __builtin_vec_vsx_ld (signed long long, unsigned short *);
+ LXVW4X_V4SI LXVW4X_US
+ vsi __builtin_vec_vsx_ld (signed long long, vsi *);
+ LXVW4X_V4SI LXVW4X_VSI
+ vsi __builtin_vec_vsx_ld (signed long long, signed int *);
+ LXVW4X_V4SI LXVW4X_SI
+ vui __builtin_vec_vsx_ld (signed long long, vui *);
+ LXVW4X_V4SI LXVW4X_VUI
+ vui __builtin_vec_vsx_ld (signed long long, unsigned int *);
+ LXVW4X_V4SI LXVW4X_UI
+ vsll __builtin_vec_vsx_ld (signed long long, vsll *);
+ LXVD2X_V2DI LXVD2X_VSLL
+ vsll __builtin_vec_vsx_ld (signed long long, signed long long *);
+ LXVD2X_V2DI LXVD2X_SLL
+ vull __builtin_vec_vsx_ld (signed long long, vull *);
+ LXVD2X_V2DI LXVD2X_VULL
+ vull __builtin_vec_vsx_ld (signed long long, unsigned long long *);
+ LXVD2X_V2DI LXVD2X_ULL
+ vsq __builtin_vec_vsx_ld (signed long long, vsq *);
+ LXVD2X_V1TI LXVD2X_VSQ
+ vsq __builtin_vec_vsx_ld (signed long long, signed __int128 *);
+ LXVD2X_V1TI LXVD2X_SQ
+ vuq __builtin_vec_vsx_ld (signed long long, unsigned __int128 *);
+ LXVD2X_V1TI LXVD2X_UQ
+ vf __builtin_vec_vsx_ld (signed long long, vf *);
+ LXVW4X_V4SF LXVW4X_VF
+ vf __builtin_vec_vsx_ld (signed long long, float *);
+ LXVW4X_V4SF LXVW4X_F
+ vd __builtin_vec_vsx_ld (signed long long, vd *);
+ LXVD2X_V2DF LXVD2X_VD
+ vd __builtin_vec_vsx_ld (signed long long, double *);
+ LXVD2X_V2DF LXVD2X_D
+
+[VEC_XL_BE, vec_xl_be, __builtin_vec_xl_be, __VSX__]
+ vsc __builtin_vec_xl_be (signed long long, vsc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VSC
+ vsc __builtin_vec_xl_be (signed long long, signed char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_SC
+ vuc __builtin_vec_xl_be (signed long long, vuc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VUC
+ vuc __builtin_vec_xl_be (signed long long, unsigned char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_UC
+ vss __builtin_vec_xl_be (signed long long, vss *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VSS
+ vss __builtin_vec_xl_be (signed long long, signed short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_SS
+ vus __builtin_vec_xl_be (signed long long, vus *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VUS
+ vus __builtin_vec_xl_be (signed long long, unsigned short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_US
+ vsi __builtin_vec_xl_be (signed long long, vsi *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VSI
+ vsi __builtin_vec_xl_be (signed long long, signed int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_SI
+ vui __builtin_vec_xl_be (signed long long, vui *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VUI
+ vui __builtin_vec_xl_be (signed long long, unsigned int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_UI
+ vsll __builtin_vec_xl_be (signed long long, vsll *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VSLL
+ vsll __builtin_vec_xl_be (signed long long, signed long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_SLL
+ vull __builtin_vec_xl_be (signed long long, vull *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VULL
+ vull __builtin_vec_xl_be (signed long long, unsigned long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_ULL
+ vsq __builtin_vec_xl_be (signed long long, signed __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_SQ
+ vuq __builtin_vec_xl_be (signed long long, unsigned __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_UQ
+ vf __builtin_vec_xl_be (signed long long, vf *);
+ LD_ELEMREV_V4SF LD_ELEMREV_VF
+ vf __builtin_vec_xl_be (signed long long, float *);
+ LD_ELEMREV_V4SF LD_ELEMREV_F
+ vd __builtin_vec_xl_be (signed long long, vd *);
+ LD_ELEMREV_V2DF LD_ELEMREV_VD
+ vd __builtin_vec_xl_be (signed long long, double *);
+ LD_ELEMREV_V2DF LD_ELEMREV_DD
+
+[VEC_XL_LEN_R, vec_xl_len_r, __builtin_vec_xl_len_r, _ARCH_PPC64_PWR9]
+ vuc __builtin_vsx_xl_len_r (unsigned char *, unsigned long long);
+ XL_LEN_R
+
+[VEC_XL_SEXT, vec_xl_sext, __builtin_vec_xl_sext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_sext (signed long long, signed char *);
+ SE_LXVRBX
+ vsq __builtin_vec_xl_sext (signed long long, signed short *);
+ SE_LXVRHX
+ vsq __builtin_vec_xl_sext (signed long long, signed int *);
+ SE_LXVRWX
+ vsq __builtin_vec_xl_sext (signed long long, signed long long *);
+ SE_LXVRDX
+
+[VEC_XL_ZEXT, vec_xl_zext, __builtin_vec_xl_zext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_zext (signed long long, signed char *);
+ ZE_LXVRBX
+ vsq __builtin_vec_xl_zext (signed long long, signed short *);
+ ZE_LXVRHX
+ vsq __builtin_vec_xl_zext (signed long long, signed int *);
+ ZE_LXVRWX
+ vsq __builtin_vec_xl_zext (signed long long, signed long long *);
+ ZE_LXVRDX
+
+[VEC_XOR, vec_xor, __builtin_vec_xor]
+ vsc __builtin_vec_xor (vsc, vsc);
+ VXOR_V16QI
+ vuc __builtin_vec_xor (vuc, vuc);
+ VXOR_V16QI_UNS VXOR_VUC
+ vbc __builtin_vec_xor (vbc, vbc);
+ VXOR_V16QI_UNS VXOR_VBC
+ vss __builtin_vec_xor (vss, vss);
+ VXOR_V8HI
+ vus __builtin_vec_xor (vus, vus);
+ VXOR_V8HI_UNS VXOR_VUS
+ vbs __builtin_vec_xor (vbs, vbs);
+ VXOR_V8HI_UNS VXOR_VBS
+ vsi __builtin_vec_xor (vsi, vsi);
+ VXOR_V4SI
+ vui __builtin_vec_xor (vui, vui);
+ VXOR_V4SI_UNS VXOR_VUI
+ vbi __builtin_vec_xor (vbi, vbi);
+ VXOR_V4SI_UNS VXOR_VBI
+ vsll __builtin_vec_xor (vsll, vsll);
+ VXOR_V2DI
+ vull __builtin_vec_xor (vull, vull);
+ VXOR_V2DI_UNS VXOR_VULL
+ vbll __builtin_vec_xor (vbll, vbll);
+ VXOR_V2DI_UNS VXOR_VBLL
+ vf __builtin_vec_xor (vf, vf);
+ VXOR_V4SF
+ vd __builtin_vec_xor (vd, vd);
+ VXOR_V2DF
+
+[VEC_XST, vec_xst, __builtin_vec_vsx_st, __VSX__]
+ void __builtin_vec_vsx_st (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC
+ void __builtin_vec_vsx_st (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC
+ void __builtin_vec_vsx_st (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC
+ void __builtin_vec_vsx_st (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC
+ void __builtin_vec_vsx_st (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC
+ void __builtin_vec_vsx_st (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S
+ void __builtin_vec_vsx_st (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U
+ void __builtin_vec_vsx_st (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS
+ void __builtin_vec_vsx_st (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS
+ void __builtin_vec_vsx_st (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS
+ void __builtin_vec_vsx_st (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US
+ void __builtin_vec_vsx_st (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS
+ void __builtin_vec_vsx_st (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S
+ void __builtin_vec_vsx_st (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U
+ void __builtin_vec_vsx_st (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP
+ void __builtin_vec_vsx_st (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI
+ void __builtin_vec_vsx_st (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI
+ void __builtin_vec_vsx_st (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI
+ void __builtin_vec_vsx_st (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI
+ void __builtin_vec_vsx_st (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI
+ void __builtin_vec_vsx_st (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S
+ void __builtin_vec_vsx_st (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U
+ void __builtin_vec_vsx_st (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL
+ void __builtin_vec_vsx_st (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL
+ void __builtin_vec_vsx_st (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL
+ void __builtin_vec_vsx_st (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL
+ void __builtin_vec_vsx_st (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL
+ void __builtin_vec_vsx_st (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF
+ void __builtin_vec_vsx_st (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F
+ void __builtin_vec_vsx_st (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD
+ void __builtin_vec_vsx_st (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D
+
+[VEC_XST_BE, vec_xst_be, __builtin_vec_xst_be, __VSX__]
+ void __builtin_vec_xst_be (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC_BE
+ void __builtin_vec_xst_be (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S_BE
+ void __builtin_vec_xst_be (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U_BE
+ void __builtin_vec_xst_be (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS_BE
+ void __builtin_vec_xst_be (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS_BE
+ void __builtin_vec_xst_be (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS_BE
+ void __builtin_vec_xst_be (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US_BE
+ void __builtin_vec_xst_be (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS_BE
+ void __builtin_vec_xst_be (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S_BE
+ void __builtin_vec_xst_be (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U_BE
+ void __builtin_vec_xst_be (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP_BE
+ void __builtin_vec_xst_be (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI_BE
+ void __builtin_vec_xst_be (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI_BE
+ void __builtin_vec_xst_be (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI_BE
+ void __builtin_vec_xst_be (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S_BE
+ void __builtin_vec_xst_be (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U_BE
+ void __builtin_vec_xst_be (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL_BE
+ void __builtin_vec_xst_be (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL_BE
+ void __builtin_vec_xst_be (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL_BE
+ void __builtin_vec_xst_be (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL_BE
+ void __builtin_vec_xst_be (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL_BE
+ void __builtin_vec_xst_be (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF_BE
+ void __builtin_vec_xst_be (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F_BE
+ void __builtin_vec_xst_be (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD_BE
+ void __builtin_vec_xst_be (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D_BE
+
+[VEC_XST_LEN_R, vec_xst_len_r, __builtin_vec_xst_len_r, _ARCH_PPC64_PWR9]
+ void __builtin_vsx_xst_len_r (vuc, unsigned char *, unsigned long long);
+ XST_LEN_R
+
+[VEC_XST_TRUNC, vec_xst_trunc, __builtin_vec_xst_trunc, _ARCH_PWR10]
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed char *);
+ TR_STXVRBX TR_STXVRBX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned char *);
+ TR_STXVRBX TR_STXVRBX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed short *);
+ TR_STXVRHX TR_STXVRHX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned short *);
+ TR_STXVRHX TR_STXVRHX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed int *);
+ TR_STXVRWX TR_STXVRWX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned int *);
+ TR_STXVRWX TR_STXVRWX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed long long *);
+ TR_STXVRDX TR_STXVRDX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned long long *);
+ TR_STXVRDX TR_STXVRDX_U
+
+[VEC_XXPERMDI, vec_xxpermdi, __builtin_vsx_xxpermdi, __VSX__]
+ vsc __builtin_vsx_xxpermdi (vsc, vsc, const int);
+ XXPERMDI_16QI XXPERMDI_VSC
+ vuc __builtin_vsx_xxpermdi (vuc, vuc, const int);
+ XXPERMDI_16QI XXPERMDI_VUC
+ vss __builtin_vsx_xxpermdi (vss, vss, const int);
+ XXPERMDI_8HI XXPERMDI_VSS
+ vus __builtin_vsx_xxpermdi (vus, vus, const int);
+ XXPERMDI_8HI XXPERMDI_VUS
+ vsi __builtin_vsx_xxpermdi (vsi, vsi, const int);
+ XXPERMDI_4SI XXPERMDI_VSI
+ vui __builtin_vsx_xxpermdi (vui, vui, const int);
+ XXPERMDI_4SI XXPERMDI_VUI
+ vsll __builtin_vsx_xxpermdi (vsll, vsll, const int);
+ XXPERMDI_2DI XXPERMDI_VSLL
+ vull __builtin_vsx_xxpermdi (vull, vull, const int);
+ XXPERMDI_2DI XXPERMDI_VULL
+ vf __builtin_vsx_xxpermdi (vf, vf, const int);
+ XXPERMDI_4SF XXPERMDI_VF
+ vd __builtin_vsx_xxpermdi (vd, vd, const int);
+ XXPERMDI_2DF XXPERMDI_VD
+
+[VEC_XXSLDWI, vec_xxsldwi, __builtin_vsx_xxsldwi, __VSX__]
+ vsc __builtin_vsx_xxsldwi (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC2
+ vuc __builtin_vsx_xxsldwi (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC2
+ vss __builtin_vsx_xxsldwi (vss, vss, const int);
+ XXSLDWI_8HI XXSLDWI_VSS2
+ vus __builtin_vsx_xxsldwi (vus, vus, const int);
+ XXSLDWI_8HI XXSLDWI_VUS2
+ vsi __builtin_vsx_xxsldwi (vsi, vsi, const int);
+ XXSLDWI_4SI XXSLDWI_VSI2
+ vui __builtin_vsx_xxsldwi (vui, vui, const int);
+ XXSLDWI_4SI XXSLDWI_VUI2
+ vsll __builtin_vsx_xxsldwi (vsll, vsll, const int);
+ XXSLDWI_2DI XXSLDWI_VSLL2
+ vull __builtin_vsx_xxsldwi (vull, vull, const int);
+ XXSLDWI_2DI XXSLDWI_VULL2
+ vf __builtin_vsx_xxsldwi (vf, vf, const int);
+ XXSLDWI_4SF XXSLDWI_VF2
+ vd __builtin_vsx_xxsldwi (vd, vd, const int);
+ XXSLDWI_2DF XXSLDWI_VD2
^ permalink raw reply [flat|nested] 4+ messages in thread
* [gcc(refs/users/wschmidt/heads/builtins4)] rs6000: Add remaining overloads
@ 2020-11-02 22:08 William Schmidt
0 siblings, 0 replies; 4+ messages in thread
From: William Schmidt @ 2020-11-02 22:08 UTC (permalink / raw)
To: gcc-cvs
https://gcc.gnu.org/g:022608cfafdafeacb536d1c21d5a1ae3b47e85f2
commit 022608cfafdafeacb536d1c21d5a1ae3b47e85f2
Author: Bill Schmidt <wschmidt@linux.ibm.com>
Date: Mon Nov 2 09:55:43 2020 -0500
rs6000: Add remaining overloads
2020-11-02 Bill Schmidt <wschmidt@linux.ibm.com>
* config/rs6000/rs6000-overload.def: Add remaining overloads.
Diff:
---
gcc/config/rs6000/rs6000-overload.def | 3628 +++++++++++++++++++++++++++++++++
1 file changed, 3628 insertions(+)
diff --git a/gcc/config/rs6000/rs6000-overload.def b/gcc/config/rs6000/rs6000-overload.def
index 7c28cdcb84c..82037644157 100644
--- a/gcc/config/rs6000/rs6000-overload.def
+++ b/gcc/config/rs6000/rs6000-overload.def
@@ -74,8 +74,3636 @@
; a semicolon are also treated as blank lines.
+[CRYPTO_PERMXOR, SKIP, __builtin_crypto_vpermxor]
+ vuc __builtin_crypto_vpermxor (vuc, vuc, vuc);
+ VPERMXOR_V16QI
+ vus __builtin_crypto_vpermxor (vus, vus, vus);
+ VPERMXOR_V8HI
+ vui __builtin_crypto_vpermxor (vui, vui, vui);
+ VPERMXOR_V4SI
+ vull __builtin_crypto_vpermxor (vull, vull, vull);
+ VPERMXOR_V2DI
+
+[CRYPTO_PMSUM, SKIP, __builtin_crypto_vpmsum]
+ vuc __builtin_crypto_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_C
+ vus __builtin_crypto_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_C
+ vui __builtin_crypto_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_C
+ vull __builtin_crypto_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_C
+
+[SCAL_CMPB, SKIP, __builtin_cmpb]
+ unsigned int __builtin_cmpb (unsigned int, unsigned int);
+ CMPB_32
+ unsigned long long __builtin_cmpb (unsigned long long, unsigned long long);
+ CMPB
+
[VEC_ABS, vec_abs, __builtin_vec_abs]
vsc __builtin_vec_abs (vsc);
ABS_V16QI
vss __builtin_vec_abs (vss);
ABS_V8HI
+ vsi __builtin_vec_abs (vsi);
+ ABS_V4SI
+ vsll __builtin_vec_abs (vsll);
+ ABS_V2DI
+ vf __builtin_vec_abs (vf);
+ ABS_V4SF
+ vd __builtin_vec_abs (vd);
+ XVABSDP
+
+[VEC_ABSD, vec_absd, __builtin_vec_vadu, _ARCH_PWR9]
+ vuc __builtin_vec_vadu (vuc, vuc);
+ VADUB
+ vus __builtin_vec_vadu (vus, vus);
+ VADUH
+ vui __builtin_vec_vadu (vui, vui);
+ VADUW
+
+[VEC_ABSS, vec_abss, __builtin_vec_abss]
+ vsc __builtin_vec_abss (vsc);
+ ABSS_V16QI
+ vss __builtin_vec_abss (vss);
+ ABSS_V8HI
+ vsi __builtin_vec_abss (vsi);
+ ABSS_V4SI
+
+[VEC_ADD, vec_add, __builtin_vec_add]
+ vsc __builtin_vec_add (vsc, vsc);
+ VADDUBM VADDUBM_VSC
+ vuc __builtin_vec_add (vuc, vuc);
+ VADDUBM VADDUBM_VUC
+ vss __builtin_vec_add (vss, vss);
+ VADDUHM VADDUHM_VSS
+ vus __builtin_vec_add (vus, vus);
+ VADDUHM VADDUHM_VUS
+ vsi __builtin_vec_add (vsi, vsi);
+ VADDUWM VADDUWM_VSI
+ vui __builtin_vec_add (vui, vui);
+ VADDUWM VADDUWM_VUI
+ vsll __builtin_vec_add (vsll, vsll);
+ VADDUDM VADDUDM_VSLL
+ vull __builtin_vec_add (vull, vull);
+ VADDUDM VADDUDM_VULL
+ vsq __builtin_vec_add (vsq, vsq);
+ VADDUQM VADDUQM_VSQ
+ vuq __builtin_vec_add (vuq, vuq);
+ VADDUQM VADDUQM_VUQ
+ vf __builtin_vec_add (vf, vf);
+ XVADDSP
+ vd __builtin_vec_add (vd, vd);
+ XVADDDP
+
+[VEC_ADDC, vec_addc, __builtin_vec_addc]
+ vsi __builtin_vec_addc (vsi, vsi);
+ VADDCUW VADDCUW_VSI
+ vui __builtin_vec_addc (vui, vui);
+ VADDCUW VADDCUW_VUI
+ vsq __builtin_vec_addc (vsq, vsq);
+ VADDCUQ VADDCUQ_VSQ
+ vuq __builtin_vec_addc (vuq, vuq);
+ VADDCUQ VADDCUQ_VUQ
+
+; TODO: Note that the entry for VEC_ADDEC currently gets ignored in
+; altivec_resolve_overloaded_builtin. There are also forms for
+; vsi and vui arguments, but rather than building a define_expand
+; for the instruction sequence generated for those, we do some RTL
+; hackery. Revisit whether we can remove that. For now, keep this
+; much of the entry here to generate the #define, at least.
+[VEC_ADDEC, vec_addec, __builtin_vec_addec]
+ vsq __builtin_vec_addec (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ
+ vuq __builtin_vec_addec (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ
+
+[VEC_ADDS, vec_adds, __builtin_vec_adds]
+ vuc __builtin_vec_adds (vuc, vuc);
+ VADDUBS
+ vsc __builtin_vec_adds (vsc, vsc);
+ VADDSBS
+ vus __builtin_vec_adds (vus, vus);
+ VADDUHS
+ vss __builtin_vec_adds (vss, vss);
+ VADDSHS
+ vui __builtin_vec_adds (vui, vui);
+ VADDUWS
+ vsi __builtin_vec_adds (vsi, vsi);
+ VADDSWS
+
+[VEC_ANDC, vec_andc, __builtin_vec_andc]
+ vbc __builtin_vec_andc (vbc, vbc);
+ VANDC_V16QI_UNS VANDC_VBC
+ vsc __builtin_vec_andc (vsc, vsc);
+ VANDC_V16QI
+ vuc __builtin_vec_andc (vuc, vuc);
+ VANDC_V16QI_UNS VANDC_VUC
+ vbs __builtin_vec_andc (vbs, vbs);
+ VANDC_V8HI_UNS VANDC_VBS
+ vss __builtin_vec_andc (vss, vss);
+ VANDC_V8HI
+ vus __builtin_vec_andc (vus, vus);
+ VANDC_V8HI_UNS VANDC_VUS
+ vbi __builtin_vec_andc (vbi, vbi);
+ VANDC_V4SI_UNS VANDC_VBI
+ vsi __builtin_vec_andc (vsi, vsi);
+ VANDC_V4SI
+ vui __builtin_vec_andc (vui, vui);
+ VANDC_V4SI_UNS VANDC_VUI
+ vbll __builtin_vec_andc (vbll, vbll);
+ VANDC_V4SI_UNS VANDC_VBLL
+ vsll __builtin_vec_andc (vsll, vsll);
+ VANDC_V2DI
+ vull __builtin_vec_andc (vull, vull);
+ VANDC_V2DI_UNS VANDC_VULL
+ vf __builtin_vec_andc (vf, vf);
+ VANDC_V4SF
+ vd __builtin_vec_andc (vd, vd);
+ VANDC_V2DF
+
+[VEC_AVG, vec_avg, __builtin_vec_avg]
+ vsc __builtin_vec_avg (vsc, vsc);
+ VAVGSB
+ vuc __builtin_vec_avg (vuc, vuc);
+ VAVGUB
+ vss __builtin_vec_avg (vss, vss);
+ VAVGSH
+ vus __builtin_vec_avg (vus, vus);
+ VAVGUH
+ vsi __builtin_vec_avg (vsi, vsi);
+ VAVGSW
+ vui __builtin_vec_avg (vui, vui);
+ VAVGUW
+
+[VEC_BLENDV, vec_blendv, __builtin_vec_xxblend, _ARCH_PWR10]
+ vsc __builtin_vec_xxblend (vsc, vsc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VSC
+ vuc __builtin_vec_xxblend (vuc, vuc, vuc);
+ VXXBLEND_V16QI VXXBLEND_VUC
+ vss __builtin_vec_xxblend (vss, vss, vus);
+ VXXBLEND_V8HI VXXBLEND_VSS
+ vus __builtin_vec_xxblend (vus, vus, vus);
+ VXXBLEND_V8HI VXXBLEND_VUS
+ vsi __builtin_vec_xxblend (vsi, vsi, vui);
+ VXXBLEND_V4SI VXXBLEND_VSI
+ vui __builtin_vec_xxblend (vui, vui, vui);
+ VXXBLEND_V4SI VXXBLEND_VUI
+ vsll __builtin_vec_xxblend (vsll, vsll, vull);
+ VXXBLEND_V2DI VXXBLEND_VSLL
+ vull __builtin_vec_xxblend (vull, vull, vull);
+ VXXBLEND_V2DI VXXBLEND_VULL
+ vf __builtin_vec_xxblend (vf, vf, vui);
+ VXXBLEND_V4SF
+ vd __builtin_vec_xxblend (vd, vd, vull);
+ VXXBLEND_V2DF
+
+[VEC_BPERM, vec_bperm, __builtin_vec_vbperm_api, _ARCH_PWR8]
+ vull __builtin_vec_vbperm_api (vull, vuc);
+ VBPERMD VBPERMD_VULL
+ vull __builtin_vec_vbperm_api (vuq, vuc);
+ VBPERMD VBPERMD_VUQ
+ vuc __builtin_vec_vbperm_api (vuc, vuc);
+ VBPERMQ2
+
+[VEC_CEIL, vec_ceil, __builtin_vec_ceil]
+ vf __builtin_vec_ceil (vf);
+ XVRSPIP
+ vd __builtin_vec_ceil (vd);
+ XVRDPIP
+
+[VEC_CFUGE, vec_cfuge, __builtin_vec_cfuge, _ARCH_PWR10]
+ vull __builtin_vec_cfuge (vull, vull);
+ VCFUGED
+
+[VEC_CIPHER_BE, vec_cipher_be, __builtin_vec_vcipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipher_be (vuc, vuc);
+ VCIPHER_BE
+
+[VEC_CIPHERLAST_BE, vec_cipherlast_be, __builtin_vec_vcipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vcipherlast_be (vuc, vuc);
+ VCIPHERLAST_BE
+
+[VEC_CLRL, vec_clrl, __builtin_vec_clrl, _ARCH_PWR10]
+ vsc __builtin_vec_clrl (vsc, unsigned int);
+ VCLRLB VCLRLB_S
+ vuc __builtin_vec_clrl (vuc, unsigned int);
+ VCLRLB VCLRLB_U
+
+[VEC_CLRR, vec_clrr, __builtin_vec_clrr, ARCH_PWR10]
+ vsc __builtin_vec_clrr (vsc, unsigned int);
+ VCLRRB VCLRRB_S
+ vuc __builtin_vec_clrr (vuc, unsigned int);
+ VCLRRB VCLRRB_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPAE_P, SKIP, __builtin_vec_vcmpae_p]
+ signed int __builtin_vec_vcmpae_p (vsc, vsc);
+ VCMPAEB_P VCMPAEB_VSC_P
+ signed int __builtin_vec_vcmpae_p (vuc, vuc);
+ VCMPAEB_P VCMPAEB_VUC_P
+ signed int __builtin_vec_vcmpae_p (vbc, vbc);
+ VCMPAEB_P VCMPAEB_VBC_P
+ signed int __builtin_vec_vcmpae_p (vss, vss);
+ VCMPAEH_P VCMPAEH_VSS_P
+ signed int __builtin_vec_vcmpae_p (vus, vus);
+ VCMPAEH_P VCMPAEH_VUS_P
+ signed int __builtin_vec_vcmpae_p (vbs, vbs);
+ VCMPAEH_P VCMPAEH_VBS_P
+ signed int __builtin_vec_vcmpae_p (vp, vp);
+ VCMPAEH_P VCMPAEH_VP_P
+ signed int __builtin_vec_vcmpae_p (vsi, vsi);
+ VCMPAEW_P VCMPAEW_VSI_P
+ signed int __builtin_vec_vcmpae_p (vui, vui);
+ VCMPAEW_P VCMPAEW_VUI_P
+ signed int __builtin_vec_vcmpae_p (vbi, vbi);
+ VCMPAEW_P VCMPAEW_VBI_P
+ signed int __builtin_vec_vcmpae_p (vsll, vsll);
+ VCMPAED_P VCMPAED_VSLL_P
+ signed int __builtin_vec_vcmpae_p (vull, vull);
+ VCMPAED_P VCMPAED_VULL_P
+ signed int __builtin_vec_vcmpae_p (vbll, vbll);
+ VCMPAED_P VCMPAED_VBLL_P
+ signed int __builtin_vec_vcmpae_p (vf, vf);
+ VCMPAEFP_P
+ signed int __builtin_vec_vcmpae_p (vd, vd);
+ VCMPAEDP_P
+
+[VEC_CMPB, vec_cmpb, __builtin_vec_cmpb]
+ vsi __builtin_vec_cmpb (vf, vf);
+ VCMPBFP
+
+[VEC_CMPEQ, vec_cmpeq, __builtin_vec_cmpeq]
+ vbc __builtin_vec_cmpeq (vsc, vsc);
+ VCMPEQUB VCMPEQUB_VSC
+ vbc __builtin_vec_cmpeq (vuc, vuc);
+ VCMPEQUB VCMPEQUB_VUC
+ vbc __builtin_vec_cmpeq (vbc, vbc);
+ VCMPEQUB VCMPEQUB_VBC
+ vbs __builtin_vec_cmpeq (vss, vss);
+ VCMPEQUH VCMPEQUH_VSS
+ vbs __builtin_vec_cmpeq (vus, vus);
+ VCMPEQUH VCMPEQUH_VUS
+ vbs __builtin_vec_cmpeq (vbs, vbs);
+ VCMPEQUH VCMPEQUH_VBS
+ vbi __builtin_vec_cmpeq (vsi, vsi);
+ VCMPEQUW VCMPEQUW_VSI
+ vbi __builtin_vec_cmpeq (vui, vui);
+ VCMPEQUW VCMPEQUW_VUI
+ vbi __builtin_vec_cmpeq (vbi, vbi);
+ VCMPEQUW VCMPEQUW_VBI
+ vbll __builtin_vec_cmpeq (vsll, vsll);
+ VCMPEQUD VCMPEQUD_VSLL
+ vbll __builtin_vec_cmpeq (vull, vull);
+ VCMPEQUD VCMPEQUD_VULL
+ vbll __builtin_vec_cmpeq (vbll, vbll);
+ VCMPEQUD VCMPEQUD_VBLL
+ vbi __builtin_vec_cmpeq (vf, vf);
+ XVCMPEQSP
+ vbll __builtin_vec_cmpeq (vd, vd);
+ XVCMPEQDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPEQ_P, SKIP, __builtin_vec_vcmpeq_p]
+ signed int __builtin_vec_vcmpeq_p (signed int, vuc, vuc);
+ VCMPEQUB_P VCMPEQUB_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsc, vsc);
+ VCMPEQUB_P VCMPEQUB_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbc, vbc);
+ VCMPEQUB_P VCMPEQUB_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vus, vus);
+ VCMPEQUH_P VCMPEQUH_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vss, vss);
+ VCMPEQUH_P VCMPEQUH_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbs, vbs);
+ VCMPEQUH_P VCMPEQUH_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vp, vp);
+ VCMPEQUH_P VCMPEQUH_PP
+ signed int __builtin_vec_vcmpeq_p (signed int, vui, vui);
+ VCMPEQUW_P VCMPEQUW_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsi, vsi);
+ VCMPEQUW_P VCMPEQUW_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbi, vbi);
+ VCMPEQUW_P VCMPEQUW_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vull, vull);
+ VCMPEQUD_P VCMPEQUD_PU
+ signed int __builtin_vec_vcmpeq_p (signed int, vsll, vsll);
+ VCMPEQUD_P VCMPEQUD_PS
+ signed int __builtin_vec_vcmpeq_p (signed int, vbll, vbll);
+ VCMPEQUD_P VCMPEQUD_PB
+ signed int __builtin_vec_vcmpeq_p (signed int, vf, vf);
+ XVCMPEQSP_P
+ signed int __builtin_vec_vcmpeq_p (signed int, vd, vd);
+ XVCMPEQDP_P
+
+[VEC_CMPEQB, SKIP, __builtin_byte_in_set]
+ signed int __builtin_byte_in_set (unsigned char, unsigned long long);
+ CMPEQB
+
+[VEC_CMPGE, vec_cmpge, __builtin_vec_cmpge]
+ vbc __builtin_vec_cmpge (vsc, vsc);
+ CMPGE_16QI CMPGE_16QI_VSC
+ vbc __builtin_vec_cmpge (vuc, vuc);
+ CMPGE_16QI CMPGE_16QI_VUC
+ vbs __builtin_vec_cmpge (vss, vss);
+ CMPGE_8HI CMPGE_8HI_VSS
+ vbs __builtin_vec_cmpge (vus, vus);
+ CMPGE_8HI CMPGE_8HI_VUS
+ vbi __builtin_vec_cmpge (vsi, vsi);
+ CMPGE_4SI CMPGE_4SI_VSI
+ vbi __builtin_vec_cmpge (vui, vui);
+ CMPGE_4SI CMPGE_4SI_VUI
+ vbll __builtin_vec_cmpge (vsll, vsll);
+ CMPGE_2DI CMPGE_2DI_VSLL
+ vbll __builtin_vec_cmpge (vull, vull);
+ CMPGE_2DI CMPGE_2DI_VULL
+ vbi __builtin_vec_cmpge (vf, vf);
+ XVCMPGESP
+ vbll __builtin_vec_cmpge (vd, vd);
+ XVCMPGEDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+; Also, cmpge is the same as cmpgt for all cases except floating point.
+; There is further code to deal with this special case in
+; altivec_build_resolved_builtin. TODO: Make sure this is still true.
+[VEC_CMPGE_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P VCMPGTUB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P VCMPGTSB_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P VCMPGTUH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P VCMPGTSH_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P VCMPGTUW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P VCMPGTSW_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P VCMPGTUD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P VCMPGTSD_PR
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGESP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGEDP_P
+
+[VEC_CMPGT, vec_cmpgt, __builtin_vec_cmpgt]
+ vbc __builtin_vec_cmpgt (vsc, vsc);
+ VCMPGTSB
+ vbc __builtin_vec_cmpgt (vuc, vuc);
+ VCMPGTUB
+ vbs __builtin_vec_cmpgt (vss, vss);
+ VCMPGTSH
+ vbs __builtin_vec_cmpgt (vus, vus);
+ VCMPGTUH
+ vbi __builtin_vec_cmpgt (vsi, vsi);
+ VCMPGTSW
+ vbi __builtin_vec_cmpgt (vui, vui);
+ VCMPGTUW
+ vbll __builtin_vec_cmpgt (vsll, vsll);
+ VCMPGTSD
+ vbll __builtin_vec_cmpgt (vull, vull);
+ VCMPGTUD
+ vbi __builtin_vec_cmpgt (vf, vf);
+ XVCMPGTSP
+ vbll __builtin_vec_cmpgt (vd, vd);
+ XVCMPGTDP
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPGT_P, SKIP, __builtin_vec_vcmpgt_p]
+ signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc);
+ VCMPGTUB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc);
+ VCMPGTSB_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vus, vus);
+ VCMPGTUH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vss, vss);
+ VCMPGTSH_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vui, vui);
+ VCMPGTUW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi);
+ VCMPGTSW_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vull, vull);
+ VCMPGTUD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll);
+ VCMPGTSD_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vf, vf);
+ XVCMPGTSP_P
+ signed int __builtin_vec_vcmpgt_p (signed int, vd, vd);
+ XVCMPGTDP_P
+
+; Note that there is no entry for VEC_CMPLE. VEC_CMPLE is implemented
+; using VEC_CMPGE with reversed arguments in altivec.h.
+
+; Note that there is no entry for VEC_CMPLT. VEC_CMPLT is implemented
+; using VEC_CMPGT with reversed arguments in altivec.h.
+
+[VEC_CMPNE, vec_cmpne, __builtin_vec_cmpne]
+ vbc __builtin_vec_cmpne (vbc, vbc);
+ VCMPNEB VCMPNEB_VBC
+ vbc __builtin_vec_cmpne (vsc, vsc);
+ VCMPNEB VCMPNEB_VSC
+ vbc __builtin_vec_cmpne (vuc, vuc);
+ VCMPNEB VCMPNEB_VUC
+ vbs __builtin_vec_cmpne (vbs, vbs);
+ VCMPNEH VCMPNEH_VBS
+ vbs __builtin_vec_cmpne (vss, vss);
+ VCMPNEH VCMPNEH_VSS
+ vbs __builtin_vec_cmpne (vus, vus);
+ VCMPNEH VCMPNEH_VUS
+ vbi __builtin_vec_cmpne (vbi, vbi);
+ VCMPNEW VCMPNEW_VBI
+ vbi __builtin_vec_cmpne (vsi, vsi);
+ VCMPNEW VCMPNEW_VSI
+ vbi __builtin_vec_cmpne (vui, vui);
+ VCMPNEW VCMPNEW_VUI
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNE_P, SKIP, __builtin_vec_vcmpne_p]
+ signed int __builtin_vec_vcmpne_p (vsc, vsc);
+ VCMPNEB_P VCMPNEB_VSC_P
+ signed int __builtin_vec_vcmpne_p (vuc, vuc);
+ VCMPNEB_P VCMPNEB_VUC_P
+ signed int __builtin_vec_vcmpne_p (vbc, vbc);
+ VCMPNEB_P VCMPNEB_VBC_P
+ signed int __builtin_vec_vcmpne_p (vss, vss);
+ VCMPNEH_P VCMPNEH_VSS_P
+ signed int __builtin_vec_vcmpne_p (vus, vus);
+ VCMPNEH_P VCMPNEH_VUS_P
+ signed int __builtin_vec_vcmpne_p (vbs, vbs);
+ VCMPNEH_P VCMPNEH_VBS_P
+ signed int __builtin_vec_vcmpne_p (vp, vp);
+ VCMPNEH_P VCMPNEH_VP_P
+ signed int __builtin_vec_vcmpne_p (vsi, vsi);
+ VCMPNEW_P VCMPNEW_VSI_P
+ signed int __builtin_vec_vcmpne_p (vui, vui);
+ VCMPNEW_P VCMPNEW_VUI_P
+ signed int __builtin_vec_vcmpne_p (vbi, vbi);
+ VCMPNEW_P VCMPNEW_VBI_P
+ signed int __builtin_vec_vcmpne_p (vsll, vsll);
+ VCMPNED_P VCMPNED_VSLL_P
+ signed int __builtin_vec_vcmpne_p (vull, vull);
+ VCMPNED_P VCMPNED_VULL_P
+ signed int __builtin_vec_vcmpne_p (vbll, vbll);
+ VCMPNED_P VCMPNED_VBLL_P
+ signed int __builtin_vec_vcmpne_p (vf, vf);
+ VCMPNEFP_P
+ signed int __builtin_vec_vcmpne_p (vd, vd);
+ VCMPNEDP_P
+
+[VEC_CMPNEZ, vec_cmpnez, __builtin_vec_cmpnez, _ARCH_PWR9]
+ vbc __builtin_vec_cmpnez (vsc, vsc);
+ CMPNEZB CMPNEZB_S
+ vbc __builtin_vec_cmpnez (vuc, vuc);
+ CMPNEZB CMPNEZB_U
+ vbs __builtin_vec_cmpnez (vss, vss);
+ CMPNEZH CMPNEZH_S
+ vbs __builtin_vec_cmpnez (vus, vus);
+ CMPNEZH CMPNEZH_U
+ vbi __builtin_vec_cmpnez (vsi, vsi);
+ CMPNEZW CMPNEZW_S
+ vbi __builtin_vec_cmpnez (vui, vui);
+ CMPNEZW CMPNEZW_U
+
+; We skip generating a #define because of the C-versus-C++ complexity
+; in altivec.h. Look there for the template-y details.
+[VEC_CMPNEZ_P, SKIP, __builtin_vec_vcmpnez_p]
+ signed int __builtin_vec_vcmpnez_p (signed int, vsc, vsc);
+ VCMPNEZB_P VCMPNEZB_VSC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vuc, vuc);
+ VCMPNEZB_P VCMPNEZB_VUC_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vss, vss);
+ VCMPNEZH_P VCMPNEZH_VSS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vus, vus);
+ VCMPNEZH_P VCMPNEZH_VUS_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vsi, vsi);
+ VCMPNEZW_P VCMPNEZW_VSI_P
+ signed int __builtin_vec_vcmpnez_p (signed int, vui, vui);
+ VCMPNEZW_P VCMPNEZW_VUI_P
+
+[VEC_CMPRB, SKIP, __builtin_byte_in_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB
+
+[VEC_CMPRB2, SKIP, __builtin_byte_in_either_range]
+ signed int __builtin_byte_in_range (unsigned char, unsigned int);
+ CMPRB2
+
+[VEC_CNTLZ, vec_cntlz, __builtin_vec_vclz, _ARCH_PWR8]
+ vsc __builtin_vec_vclz (vsc);
+ VCLZB VCLZB_S
+ vuc __builtin_vec_vclz (vuc);
+ VCLZB VCLZB_U
+ vss __builtin_vec_vclz (vss);
+ VCLZH VCLZH_S
+ vus __builtin_vec_vclz (vus);
+ VCLZH VCLZH_U
+ vsi __builtin_vec_vclz (vsi);
+ VCLZW VCLZW_S
+ vui __builtin_vec_vclz (vui);
+ VCLZW VCLZW_U
+ vsll __builtin_vec_vclz (vsll);
+ VCLZD VCLZD_S
+ vull __builtin_vec_vclz (vull);
+ VCLZD VCLZD_U
+
+[VEC_CNTLZM, vec_cntlzm, __builtin_vec_vclzdm, _ARCH_PWR10]
+ vull __builtin_vec_vclzdm (vull, vull);
+ VCLZDM
+
+[VEC_CNTTZM, vec_cnttzm, __builtin_vec_vctzdm, _ARCH_PWR10]
+ vull __builtin_vec_vctzdm (vull, vull);
+ CNTTZDM
+
+[VEC_CNTLZ_LSBB, vec_cntlz_lsbb, __builtin_vec_vclzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vclzlsbb (vsc);
+ VCLZLSBB_V16QI VCLZLSBB_VSC
+ signed int __builtin_vec_vclzlsbb (vuc);
+ VCLZLSBB_V16QI VCLZLSBB_VUC
+ signed int __builtin_vec_vclzlsbb (vss);
+ VCLZLSBB_V8HI VCLZLSBB_VSS
+ signed int __builtin_vec_vclzlsbb (vus);
+ VCLZLSBB_V8HI VCLZLSBB_VUS
+ signed int __builtin_vec_vclzlsbb (vsi);
+ VCLZLSBB_V4SI VCLZLSBB_VSI
+ signed int __builtin_vec_vclzlsbb (vui);
+ VCLZLSBB_V4SI VCLZLSBB_VUI
+
+[VEC_CNTM, vec_cntm, __builtin_vec_cntm, _ARCH_PWR10]
+ unsigned long long __builtin_vec_cntm (vuc, const int);
+ VCNTMBB
+ unsigned long long __builtin_vec_cntm (vus, const int);
+ VCNTMBH
+ unsigned long long __builtin_vec_cntm (vui, const int);
+ VCNTMBW
+ unsigned long long __builtin_vec_cntm (vull, const int);
+ VCNTMBD
+
+[VEC_CNTTZ, vec_cnttz, __builtin_vec_vctz, _ARCH_PWR9]
+ vsc __builtin_vec_vctz (vsc);
+ VCTZB VCTZB_S
+ vuc __builtin_vec_vctz (vuc);
+ VCTZB VCTZB_U
+ vss __builtin_vec_vctz (vss);
+ VCTZH VCTZH_S
+ vus __builtin_vec_vctz (vus);
+ VCTZH VCTZH_U
+ vsi __builtin_vec_vctz (vsi);
+ VCTZW VCTZW_S
+ vui __builtin_vec_vctz (vui);
+ VCTZW VCTZW_U
+ vsll __builtin_vec_vctz (vsll);
+ VCTZD VCTZD_S
+ vull __builtin_vec_vctz (vull);
+ VCTZD VCTZD_U
+
+[VEC_CNTTZ_LSBB, vec_cnttz_lsbb, __builtin_vec_vctzlsbb, _ARCH_PWR9]
+ signed int __builtin_vec_vctzlsbb (vsc);
+ VCTZLSBB_V16QI VCTZLSBB_VSC
+ signed int __builtin_vec_vctzlsbb (vuc);
+ VCTZLSBB_V16QI VCTZLSBB_VUC
+ signed int __builtin_vec_vctzlsbb (vss);
+ VCTZLSBB_V8HI VCTZLSBB_VSS
+ signed int __builtin_vec_vctzlsbb (vus);
+ VCTZLSBB_V8HI VCTZLSBB_VUS
+ signed int __builtin_vec_vctzlsbb (vsi);
+ VCTZLSBB_V4SI VCTZLSBB_VSI
+ signed int __builtin_vec_vctzlsbb (vui);
+ VCTZLSBB_V4SI VCTZLSBB_VUI
+
+[VEC_CONVERT_4F32_8I16, SKIP, __builtin_vec_convert_4f32_8i16]
+ vus __builtin_vec_convert_4f32_8i16 (vf, vf);
+ CONVERT_4F32_8I16
+
+[VEC_CONVERT_4F32_8F16, vec_pack_to_short_fp32, __builtin_vec_convert_4f32_8f16, _ARCH_PWR9]
+ vus __builtin_vec_convert_4f32_8f16 (vf, vf);
+ CONVERT_4F32_8F16
+
+[VEC_COPYSIGN, vec_cpsgn, __builtin_vec_copysign]
+ vf __builtin_vec_copysign (vf, vf);
+ CPSGNSP
+ vd __builtin_vec_copysign (vd, vd);
+ CPSGNDP
+
+[VEC_CTF, vec_ctf, __builtin_vec_ctf]
+ vf __builtin_vec_ctf (vsi, const int);
+ VCFSX
+ vf __builtin_vec_ctf (vui, const int);
+ VCFUX
+ vd __builtin_vec_ctf (vsll, const int);
+ XVCVSXDDP_SCALE
+ vd __builtin_vec_ctf (vull, const int);
+ XVCVUXDDP_SCALE
+
+[VEC_CTS, vec_cts, __builtin_vec_cts]
+ vsi __builtin_vec_cts (vf, const int);
+ VCTSXS
+ vsll __builtin_vec_cts (vd, const int);
+ XVCVDPSXDS_SCALE
+
+[VEC_CTU, vec_ctu, __builtin_vec_ctu]
+ vui __builtin_vec_ctu (vf, const int);
+ VCTUXS
+ vull __builtin_vec_ctu (vd, const int);
+ XVCVDPUXDS_SCALE
+
+[VEC_DIV, vec_div, __builtin_vec_div, __VSX__]
+ vsll __builtin_vec_div (vsll, vsll);
+ DIV_V2DI
+ vull __builtin_vec_div (vull, vull);
+ UDIV_V2DI
+ vf __builtin_vec_div (vf, vf);
+ XVDIVSP
+ vd __builtin_vec_div (vd, vd);
+ XVDIVDP
+
+[VEC_DOUBLE, vec_double, __builtin_vec_double]
+ vd __builtin_vec_double (vsll);
+ XVCVSXDDP
+ vd __builtin_vec_double (vull);
+ XVCVUXDDP
+
+[VEC_DOUBLEE, vec_doublee, __builtin_vec_doublee]
+ vd __builtin_vec_doublee (vsi);
+ DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vui);
+ UNS_DOUBLEE_V4SI
+ vd __builtin_vec_doublee (vf);
+ DOUBLEE_V4SF
+
+[VEC_DOUBLEH, vec_doubleh, __builtin_vec_doubleh]
+ vd __builtin_vec_doubleh (vsi);
+ DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vui);
+ UNS_DOUBLEH_V4SI
+ vd __builtin_vec_doubleh (vf);
+ DOUBLEH_V4SF
+
+[VEC_DOUBLEL, vec_doublel, __builtin_vec_doublel]
+ vd __builtin_vec_doublel (vsi);
+ DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vui);
+ UNS_DOUBLEL_V4SI
+ vd __builtin_vec_doublel (vf);
+ DOUBLEL_V4SF
+
+[VEC_DOUBLEO, vec_doubleo, __builtin_vec_doubleo]
+ vd __builtin_vec_doubleo (vsi);
+ DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vui);
+ UNS_DOUBLEO_V4SI
+ vd __builtin_vec_doubleo (vf);
+ DOUBLEO_V4SF
+
+[VEC_DST, vec_dst, __builtin_vec_dst]
+ void __builtin_vec_dst (unsigned char *, const int, const int);
+ DST DST_UC
+ void __builtin_vec_dst (signed char *, const int, const int);
+ DST DST_SC
+ void __builtin_vec_dst (unsigned short *, const int, const int);
+ DST DST_US
+ void __builtin_vec_dst (signed short *, const int, const int);
+ DST DST_SS
+ void __builtin_vec_dst (unsigned int *, const int, const int);
+ DST DST_UI
+ void __builtin_vec_dst (signed int *, const int, const int);
+ DST DST_SI
+ void __builtin_vec_dst (unsigned long long *, const int, const int);
+ DST DST_ULL
+ void __builtin_vec_dst (signed long long *, const int, const int);
+ DST DST_SLL
+ void __builtin_vec_dst (float *, const int, const int);
+ DST DST_F
+ void __builtin_vec_dst (vuc *, const int, const int);
+ DST DST_VUC
+ void __builtin_vec_dst (vsc *, const int, const int);
+ DST DST_VSC
+ void __builtin_vec_dst (vbc *, const int, const int);
+ DST DST_VBC
+ void __builtin_vec_dst (vus *, const int, const int);
+ DST DST_VUS
+ void __builtin_vec_dst (vss *, const int, const int);
+ DST DST_VSS
+ void __builtin_vec_dst (vbs *, const int, const int);
+ DST DST_VBS
+ void __builtin_vec_dst (vp *, const int, const int);
+ DST DST_VP
+ void __builtin_vec_dst (vui *, const int, const int);
+ DST DST_VUI
+ void __builtin_vec_dst (vsi *, const int, const int);
+ DST DST_VSI
+ void __builtin_vec_dst (vbi *, const int, const int);
+ DST DST_VBI
+ void __builtin_vec_dst (vf *, const int, const int);
+ DST DST_VF
+
+[VEC_DSTST, vec_dstst, __builtin_vec_dstst]
+ void __builtin_vec_dstst (unsigned char *, const int, const int);
+ DSTST DSTST_UC
+ void __builtin_vec_dstst (signed char *, const int, const int);
+ DSTST DSTST_SC
+ void __builtin_vec_dstst (unsigned short *, const int, const int);
+ DSTST DSTST_US
+ void __builtin_vec_dstst (signed short *, const int, const int);
+ DSTST DSTST_SS
+ void __builtin_vec_dstst (unsigned int *, const int, const int);
+ DSTST DSTST_UI
+ void __builtin_vec_dstst (signed int *, const int, const int);
+ DSTST DSTST_SI
+ void __builtin_vec_dstst (unsigned long long *, const int, const int);
+ DSTST DSTST_ULL
+ void __builtin_vec_dstst (signed long long *, const int, const int);
+ DSTST DSTST_SLL
+ void __builtin_vec_dstst (float *, const int, const int);
+ DSTST DSTST_F
+ void __builtin_vec_dstst (vuc *, const int, const int);
+ DSTST DSTST_VUC
+ void __builtin_vec_dstst (vsc *, const int, const int);
+ DSTST DSTST_VSC
+ void __builtin_vec_dstst (vbc *, const int, const int);
+ DSTST DSTST_VBC
+ void __builtin_vec_dstst (vus *, const int, const int);
+ DSTST DSTST_VUS
+ void __builtin_vec_dstst (vss *, const int, const int);
+ DSTST DSTST_VSS
+ void __builtin_vec_dstst (vbs *, const int, const int);
+ DSTST DSTST_VBS
+ void __builtin_vec_dstst (vp *, const int, const int);
+ DSTST DSTST_VP
+ void __builtin_vec_dstst (vui *, const int, const int);
+ DSTST DSTST_VUI
+ void __builtin_vec_dstst (vsi *, const int, const int);
+ DSTST DSTST_VSI
+ void __builtin_vec_dstst (vbi *, const int, const int);
+ DSTST DSTST_VBI
+ void __builtin_vec_dstst (vf *, const int, const int);
+ DSTST DSTST_VF
+
+[VEC_DSTSTT, vec_dststt, __builtin_vec_dststt]
+ void __builtin_vec_dststt (unsigned char *, const int, const int);
+ DSTSTT DSTSTT_UC
+ void __builtin_vec_dststt (signed char *, const int, const int);
+ DSTSTT DSTSTT_SC
+ void __builtin_vec_dststt (unsigned short *, const int, const int);
+ DSTSTT DSTSTT_US
+ void __builtin_vec_dststt (signed short *, const int, const int);
+ DSTSTT DSTSTT_SS
+ void __builtin_vec_dststt (unsigned int *, const int, const int);
+ DSTSTT DSTSTT_UI
+ void __builtin_vec_dststt (signed int *, const int, const int);
+ DSTSTT DSTSTT_SI
+ void __builtin_vec_dststt (unsigned long long *, const int, const int);
+ DSTSTT DSTSTT_ULL
+ void __builtin_vec_dststt (signed long long *, const int, const int);
+ DSTSTT DSTSTT_SLL
+ void __builtin_vec_dststt (float *, const int, const int);
+ DSTSTT DSTSTT_F
+ void __builtin_vec_dststt (vuc *, const int, const int);
+ DSTSTT DSTSTT_VUC
+ void __builtin_vec_dststt (vsc *, const int, const int);
+ DSTSTT DSTSTT_VSC
+ void __builtin_vec_dststt (vbc *, const int, const int);
+ DSTSTT DSTSTT_VBC
+ void __builtin_vec_dststt (vus *, const int, const int);
+ DSTSTT DSTSTT_VUS
+ void __builtin_vec_dststt (vss *, const int, const int);
+ DSTSTT DSTSTT_VSS
+ void __builtin_vec_dststt (vbs *, const int, const int);
+ DSTSTT DSTSTT_VBS
+ void __builtin_vec_dststt (vp *, const int, const int);
+ DSTSTT DSTSTT_VP
+ void __builtin_vec_dststt (vui *, const int, const int);
+ DSTSTT DSTSTT_VUI
+ void __builtin_vec_dststt (vsi *, const int, const int);
+ DSTSTT DSTSTT_VSI
+ void __builtin_vec_dststt (vbi *, const int, const int);
+ DSTSTT DSTSTT_VBI
+ void __builtin_vec_dststt (vf *, const int, const int);
+ DSTSTT DSTSTT_VF
+
+[VEC_DSTT, vec_dstt, __builtin_vec_dstt]
+ void __builtin_vec_dstt (unsigned char *, const int, const int);
+ DSTT DSTT_UC
+ void __builtin_vec_dstt (signed char *, const int, const int);
+ DSTT DSTT_SC
+ void __builtin_vec_dstt (unsigned short *, const int, const int);
+ DSTT DSTT_US
+ void __builtin_vec_dstt (signed short *, const int, const int);
+ DSTT DSTT_SS
+ void __builtin_vec_dstt (unsigned int *, const int, const int);
+ DSTT DSTT_UI
+ void __builtin_vec_dstt (signed int *, const int, const int);
+ DSTT DSTT_SI
+ void __builtin_vec_dstt (unsigned long long *, const int, const int);
+ DSTT DSTT_ULL
+ void __builtin_vec_dstt (signed long long *, const int, const int);
+ DSTT DSTT_SLL
+ void __builtin_vec_dstt (float *, const int, const int);
+ DSTT DSTT_F
+ void __builtin_vec_dstt (vuc *, const int, const int);
+ DSTT DSTT_VUC
+ void __builtin_vec_dstt (vsc *, const int, const int);
+ DSTT DSTT_VSC
+ void __builtin_vec_dstt (vbc *, const int, const int);
+ DSTT DSTT_VBC
+ void __builtin_vec_dstt (vus *, const int, const int);
+ DSTT DSTT_VUS
+ void __builtin_vec_dstt (vss *, const int, const int);
+ DSTT DSTT_VSS
+ void __builtin_vec_dstt (vbs *, const int, const int);
+ DSTT DSTT_VBS
+ void __builtin_vec_dstt (vp *, const int, const int);
+ DSTT DSTT_VP
+ void __builtin_vec_dstt (vui *, const int, const int);
+ DSTT DSTT_VUI
+ void __builtin_vec_dstt (vsi *, const int, const int);
+ DSTT DSTT_VSI
+ void __builtin_vec_dstt (vbi *, const int, const int);
+ DSTT DSTT_VBI
+ void __builtin_vec_dstt (vf *, const int, const int);
+ DSTT DSTT_VF
+
+[VEC_EQV, vec_eqv, __builtin_vec_eqv, _ARCH_PWR8]
+ vsc __builtin_vec_eqv (vsc, vsc);
+ EQV_V16QI
+ vuc __builtin_vec_eqv (vuc, vuc);
+ EQV_V16QI_UNS EQV_V16QI_VUC
+ vbc __builtin_vec_eqv (vbc, vbc);
+ EQV_V16QI_UNS EQV_V16QI_VBC
+ vss __builtin_vec_eqv (vss, vss);
+ EQV_V8HI
+ vus __builtin_vec_eqv (vus, vus);
+ EQV_V8HI_UNS EQV_V8HI_VUS
+ vbs __builtin_vec_eqv (vbs, vbs);
+ EQV_V8HI_UNS EQV_V8HI_VBS
+ vsi __builtin_vec_eqv (vsi, vsi);
+ EQV_V4SI
+ vui __builtin_vec_eqv (vui, vui);
+ EQV_V4SI_UNS EQV_V4SI_VUI
+ vbi __builtin_vec_eqv (vbi, vbi);
+ EQV_V4SI_UNS EQV_V4SI_VBI
+ vsll __builtin_vec_eqv (vsll, vsll);
+ EQV_V2DI
+ vull __builtin_vec_eqv (vull, vull);
+ EQV_V2DI_UNS EQV_V2DI_VULL
+ vbll __builtin_vec_eqv (vbll, vbll);
+ EQV_V2DI_UNS EQV_V2DI_VBLL
+ vf __builtin_vec_eqv (vf, vf);
+ EQV_V4SF
+ vd __builtin_vec_eqv (vd, vd);
+ EQV_V2DF
+
+[VEC_EXPANDM, vec_expandm, __builtin_vec_vexpandm, _ARCH_PWR10]
+ vuc __builtin_vec_vexpandm (vuc);
+ VEXPANDMB
+ vus __builtin_vec_vexpandm (vus);
+ VEXPANDMH
+ vui __builtin_vec_vexpandm (vui);
+ VEXPANDMW
+ vull __builtin_vec_vexpandm (vull);
+ VEXPANDMD
+ vuq __builtin_vec_vexpandm (vuq);
+ VEXPANDMQ
+
+[VEC_EXPTE, vec_expte, __builtin_vec_expte]
+ vf __builtin_vec_expte (vf);
+ VEXPTEFP
+
+[VEC_EXTRACTM, vec_extractm, __builtin_vec_vextractm, _ARCH_PWR10]
+ signed int __builtin_vec_vextractm (vuc);
+ VEXTRACTMB
+ signed int __builtin_vec_vextractm (vus);
+ VEXTRACTMH
+ signed int __builtin_vec_vextractm (vui);
+ VEXTRACTMW
+ signed int __builtin_vec_vextractm (vull);
+ VEXTRACTMD
+ signed int __builtin_vec_vextractm (vuq);
+ VEXTRACTMQ
+
+[VEC_EXTRACT_FP_FROM_SHORTH, vec_extract_fp32_from_shorth, __builtin_vec_vextract_fp_from_shorth, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shorth (vus);
+ VEXTRACT_FP_FROM_SHORTH
+
+[VEC_EXTRACT_FP_FROM_SHORTL, vec_extract_fp32_from_shortl, __builtin_vec_vextract_fp_from_shortl, _ARCH_PWR9]
+ vf __builtin_vec_vextract_fp_from_shortl (vus);
+ VEXTRACT_FP_FROM_SHORTL
+
+[VEC_EXTRACTH, vec_extracth, __builtin_vec_extracth, _ARCH_PWR10]
+ vull __builtin_vec_extracth (vuc, vuc, unsigned char);
+ VEXTRACTBR
+ vull __builtin_vec_extracth (vus, vus, unsigned char);
+ VEXTRACTHR
+ vull __builtin_vec_extracth (vui, vui, unsigned char);
+ VEXTRACTWR
+ vull __builtin_vec_extracth (vull, vull, unsigned char);
+ VEXTRACTDR
+
+[VEC_EXTRACTL, vec_extractl, __builtin_vec_extractl, _ARCH_PWR10]
+ vull __builtin_vec_extractl (vuc, vuc, unsigned char);
+ VEXTRACTBL
+ vull __builtin_vec_extractl (vus, vus, unsigned char);
+ VEXTRACTHL
+ vull __builtin_vec_extractl (vui, vui, unsigned char);
+ VEXTRACTWL
+ vull __builtin_vec_extractl (vull, vull, unsigned char);
+ VEXTRACTDL
+
+[VEC_EXTRACT4B, vec_extract4b, __builtin_vec_extract4b, _ARCH_PWR9]
+ vull __builtin_vec_extract4b (vuc, const int);
+ EXTRACT4B
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTULX, vec_xlx, __builtin_vec_vextulx, _ARCH_PWR9]
+ signed char __builtin_vec_vextulx (unsigned int, vsc);
+ VEXTUBLX VEXTUBLX_S
+ unsigned char __builtin_vec_vextulx (unsigned int, vuc);
+ VEXTUBLX VEXTUBLX_U
+ signed short __builtin_vec_vextulx (unsigned int, vss);
+ VEXTUHLX VEXTUHLX_S
+ unsigned short __builtin_vec_vextulx (unsigned int, vus);
+ VEXTUHLX VEXTUHLX_U
+ signed int __builtin_vec_vextulx (unsigned int, vsi);
+ VEXTUWLX VEXTUWLX_S
+ unsigned int __builtin_vec_vextulx (unsigned int, vui);
+ VEXTUWLX VEXTUWLX_U
+ float __builtin_vec_vextulx (unsigned int, vf);
+ VEXTUWLX VEXTUWLX_F
+
+; TODO: Look like invalid return type conversions here (present before also).
+; Need versions of __builtin_altivec_vextu*[lr]x that return signed values
+; and floats.
+[VEC_EXTURX, vec_xrx, __builtin_vec_vexturx, _ARCH_PWR9]
+ signed char __builtin_vec_vexturx (unsigned int, vsc);
+ VEXTUBRX VEXTUBRX_S
+ unsigned char __builtin_vec_vexturx (unsigned int, vuc);
+ VEXTUBRX VEXTUBRX_U
+ signed short __builtin_vec_vexturx (unsigned int, vss);
+ VEXTUHRX VEXTUHRX_S
+ unsigned short __builtin_vec_vexturx (unsigned int, vus);
+ VEXTUHRX VEXTUHRX_U
+ signed int __builtin_vec_vexturx (unsigned int, vsi);
+ VEXTUWRX VEXTUWRX_S
+ unsigned int __builtin_vec_vexturx (unsigned int, vui);
+ VEXTUWRX VEXTUWRX_U
+ float __builtin_vec_vexturx (unsigned int, vf);
+ VEXTUWRX VEXTUWRX_F
+
+[VEC_FIRSTMATCHINDEX, vec_first_match_index, __builtin_vec_first_match_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_index (vsc, vsc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_match_index (vuc, vuc);
+ VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_match_index (vss, vss);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_match_index (vus, vus);
+ VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_match_index (vsi, vsi);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_match_index (vui, vui);
+ VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VUI
+
+[VEC_FIRSTMATCHOREOSINDEX, vec_first_match_or_eos_index, __builtin_vec_first_match_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_match_or_eos_index (vsc, vsc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_match_or_eos_index (vuc, vuc);
+ VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_match_or_eos_index (vss, vss);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_match_or_eos_index (vus, vus);
+ VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_match_or_eos_index (vsi, vsi);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_match_or_eos_index (vui, vui);
+ VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VUI
+
+[VEC_FIRSTMISMATCHINDEX, vec_first_mismatch_index, __builtin_vec_first_mismatch_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_index (vsc, vsc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_index (vuc, vuc);
+ VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_index (vss, vss);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_index (vus, vus);
+ VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_index (vsi, vsi);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_index (vui, vui);
+ VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VUI
+
+[VEC_FIRSTMISMATCHOREOSINDEX, vec_first_mismatch_or_eos_index, __builtin_vec_first_mismatch_or_eos_index, _ARCH_PWR9]
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsc, vsc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VSC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vuc, vuc);
+ VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VUC
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vss, vss);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VSS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vus, vus);
+ VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VUS
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vsi, vsi);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VSI
+ unsigned int __builtin_vec_first_mismatch_or_eos_index (vui, vui);
+ VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VUI
+
+[VEC_FLOAT, vec_float, __builtin_vec_float]
+ vf __builtin_vec_float (vsi);
+ XVCVSXWSP_V4SF
+ vf __builtin_vec_float (vui);
+ XVCVUXWSP_V4SF
+
+[VEC_FLOAT2, vec_float2, __builtin_vec_float2]
+ vf __builtin_vec_float2 (vsll, vsll);
+ FLOAT2_V2DI
+ vf __builtin_vec_float2 (vull, vull);
+ UNS_FLOAT2_V2DI
+ vf __builtin_vec_float2 (vd, vd);
+ FLOAT2_V2DF
+
+[VEC_FLOATE, vec_floate, __builtin_vec_floate]
+ vf __builtin_vec_floate (vsll);
+ FLOATE_V2DI
+ vf __builtin_vec_floate (vull);
+ UNS_FLOATE_V2DI
+ vf __builtin_vec_floate (vd);
+ FLOATE_V2DF
+
+[VEC_FLOATO, vec_floato, __builtin_vec_floato]
+ vf __builtin_vec_floato (vsll);
+ FLOATO_V2DI
+ vf __builtin_vec_floato (vull);
+ UNS_FLOATO_V2DI
+ vf __builtin_vec_floato (vd);
+ FLOATO_V2DF
+
+[VEC_FLOOR, vec_floor, __builtin_vec_floor]
+ vf __builtin_vec_floor (vf);
+ XVRSPIM
+ vd __builtin_vec_floor (vd);
+ XVRDPIM
+
+[VEC_GB, vec_gb, __builtin_vec_vgbbd, _ARCH_PWR8]
+ vsc __builtin_vec_vgbbd (vsc);
+ VGBBD VGBBD_S
+ vuc __builtin_vec_vgbbd (vuc);
+ VGBBD VGBBD_U
+
+[VEC_GENBM, vec_genbm, __builtin_vec_mtvsrbm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrbm (unsigned long long);
+ MTVSRBM
+
+[VEC_GENHM, vec_genhm, __builtin_vec_mtvsrhm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrhm (unsigned long long);
+ MTVSRHM
+
+[VEC_GENWM, vec_genwm, __builtin_vec_mtvsrwm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrwm (unsigned long long);
+ MTVSRWM
+
+[VEC_GENDM, vec_gendm, __builtin_vec_mtvsrdm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrdm (unsigned long long);
+ MTVSRDM
+
+[VEC_GENQM, vec_genqm, __builtin_vec_mtvsrqm, _ARCH_PWR10]
+ vuc __builtin_vec_mtvsrqm (unsigned long long);
+ MTVSRQM
+
+[VEC_GENPCVM, vec_genpcvm, __builtin_vec_xxgenpcvm, _ARCH_PWR10]
+ vuc __builtin_vec_xxgenpcvm (vuc, const int);
+ XXGENPCVM_V16QI
+ vus __builtin_vec_xxgenpcvm (vus, const int);
+ XXGENPCVM_V8HI
+ vui __builtin_vec_xxgenpcvm (vui, const int);
+ XXGENPCVM_V4SI
+ vull __builtin_vec_xxgenpcvm (vull, const int);
+ XXGENPCVM_V2DI
+
+[VEC_GNB, vec_gnb, __builtin_vec_gnb, _ARCH_PWR10]
+ vull __builtin_vec_gnb (vuq, unsigned char);
+ VGNB
+
+[VEC_INSERTH, vec_inserth, __builtin_vec_inserth, _ARCH_PWR10]
+ vuc __builtin_vec_inserth (unsigned char, vuc, unsigned int);
+ VINSERTGPRBR
+ vuc __builtin_vec_inserth (vuc, vuc, unsigned int);
+ VINSERTVPRBR
+ vus __builtin_vec_inserth (unsigned short, vus, unsigned int);
+ VINSERTGPRHR
+ vus __builtin_vec_inserth (vus, vus, unsigned int);
+ VINSERTVPRHR
+ vui __builtin_vec_inserth (unsigned int, vui, unsigned int);
+ VINSERTGPRWR
+ vui __builtin_vec_inserth (vui, vui, unsigned int);
+ VINSERTVPRWR
+ vull __builtin_vec_inserth (unsigned long long, vull, unsigned int);
+ VINSERTGPRDR
+
+[VEC_INSERTL, vec_insertl, __builtin_vec_insertl, _ARCH_PWR10]
+ vuc __builtin_vec_insertl (unsigned char, vuc, unsigned int);
+ VINSERTGPRBL
+ vuc __builtin_vec_insertl (vuc, vuc, unsigned int);
+ VINSERTVPRBL
+ vus __builtin_vec_insertl (unsigned short, vus, unsigned int);
+ VINSERTGPRHL
+ vus __builtin_vec_insertl (vus, vus, unsigned int);
+ VINSERTVPRHL
+ vui __builtin_vec_insertl (unsigned int, vui, unsigned int);
+ VINSERTGPRWL
+ vui __builtin_vec_insertl (vui, vui, unsigned int);
+ VINSERTVPRWL
+ vull __builtin_vec_insertl (unsigned long long, vull, unsigned int);
+ VINSERTGPRDL
+
+[VEC_INSERT4B, vec_insert4b, __builtin_vec_insert4b, _ARCH_PWR9]
+ vuc __builtin_vec_insert4b (vsi, vuc, const int);
+ INSERT4B INSERT4B_S
+ vuc __builtin_vec_insert4b (vui, vuc, const int);
+ INSERT4B INSERT4B_U
+
+[VEC_LD, vec_ld, __builtin_vec_ld]
+ vsc __builtin_vec_ld (signed long long, vsc *);
+ LVX_V16QI LVX_V16QI_VSC
+ vsc __builtin_vec_ld (signed long long, signed char *);
+ LVX_V16QI LVX_V16QI_SC
+ vuc __builtin_vec_ld (signed long long, vuc *);
+ LVX_V16QI LVX_V16QI_VUC
+ vuc __builtin_vec_ld (signed long long, unsigned char *);
+ LVX_V16QI LVX_V16QI_UC
+ vbc __builtin_vec_ld (signed long long, vbc *);
+ LVX_V16QI LVX_V16QI_VBC
+ vss __builtin_vec_ld (signed long long, vss *);
+ LVX_V8HI LVX_V8HI_VSS
+ vss __builtin_vec_ld (signed long long, signed short *);
+ LVX_V8HI LVX_V8HI_SS
+ vus __builtin_vec_ld (signed long long, vus *);
+ LVX_V8HI LVX_V8HI_VUS
+ vus __builtin_vec_ld (signed long long, unsigned short *);
+ LVX_V8HI LVX_V8HI_US
+ vbs __builtin_vec_ld (signed long long, vbs *);
+ LVX_V8HI LVX_V8HI_VBS
+ vp __builtin_vec_ld (signed long long, vp *);
+ LVX_V8HI LVX_V8HI_VP
+ vsi __builtin_vec_ld (signed long long, vsi *);
+ LVX_V4SI LVX_V4SI_VSI
+ vsi __builtin_vec_ld (signed long long, signed int *);
+ LVX_V4SI LVX_V4SI_SI
+ vui __builtin_vec_ld (signed long long, vui *);
+ LVX_V4SI LVX_V4SI_VUI
+ vui __builtin_vec_ld (signed long long, unsigned int *);
+ LVX_V4SI LVX_V4SI_UI
+ vbi __builtin_vec_ld (signed long long, vbi *);
+ LVX_V4SI LVX_V4SI_VBI
+ vsll __builtin_vec_ld (signed long long, vsll *);
+ LVX_V2DI LVX_V2DI_VSLL
+ vsll __builtin_vec_ld (signed long long, signed long long *);
+ LVX_V2DI LVX_V2DI_SLL
+ vull __builtin_vec_ld (signed long long, vull *);
+ LVX_V2DI LVX_V2DI_VULL
+ vull __builtin_vec_ld (signed long long, unsigned long long *);
+ LVX_V2DI LVX_V2DI_ULL
+ vbll __builtin_vec_ld (signed long long, vbll *);
+ LVX_V2DI LVX_V2DI_VBLL
+ vsq __builtin_vec_ld (signed long long, const vsq *);
+ LVX_V1TI LVX_V1TI_VSQ
+ vuq __builtin_vec_ld (signed long long, const vuq *);
+ LVX_V1TI LVX_V1TI_VUQ
+ vsq __builtin_vec_ld (signed long long, __int128 *);
+ LVX_V1TI LVX_V1TI_TI
+ vuq __builtin_vec_ld (signed long long, unsigned __int128 *);
+ LVX_V1TI LVX_V1TI_UTI
+ vf __builtin_vec_ld (signed long long, vf *);
+ LVX_V4SF LVX_V4SF_VF
+ vf __builtin_vec_ld (signed long long, float *);
+ LVX_V4SF LVX_V4SF_F
+ vd __builtin_vec_ld (signed long long, vd *);
+ LVX_V2DF LVX_V2DF_VD
+ vd __builtin_vec_ld (signed long long, double *);
+ LVX_V2DF LVX_V2DF_D
+
+[VEC_LDE, vec_lde, __builtin_vec_lde]
+ vsc __builtin_vec_lde (signed long long, signed char *);
+ LVEBX LVEBX_SC
+ vuc __builtin_vec_lde (signed long long, unsigned char *);
+ LVEBX LVEBX_UC
+ vss __builtin_vec_lde (signed long long, signed short *);
+ LVEHX LVEHX_SS
+ vus __builtin_vec_lde (signed long long, unsigned short *);
+ LVEHX LVEHX_US
+ vsi __builtin_vec_lde (signed long long, signed int *);
+ LVEWX LVEWX_SI
+ vui __builtin_vec_lde (signed long long, unsigned int *);
+ LVEWX LVEWX_UI
+ vf __builtin_vec_lde (signed long long, float *);
+ LVEWX LVEWX_F
+
+[VEC_LDL, vec_ldl, __builtin_vec_ldl]
+ vsc __builtin_vec_ldl (signed long long, vsc *);
+ LVXL_V16QI LVXL_V16QI_VSC
+ vsc __builtin_vec_ldl (signed long long, signed char *);
+ LVXL_V16QI LVXL_V16QI_SC
+ vuc __builtin_vec_ldl (signed long long, vuc *);
+ LVXL_V16QI LVXL_V16QI_VUC
+ vuc __builtin_vec_ldl (signed long long, unsigned char *);
+ LVXL_V16QI LVXL_V16QI_UC
+ vbc __builtin_vec_ldl (signed long long, vbc *);
+ LVXL_V16QI LVXL_V16QI_VBC
+ vss __builtin_vec_ldl (signed long long, vss *);
+ LVXL_V8HI LVXL_V8HI_VSS
+ vss __builtin_vec_ldl (signed long long, signed short *);
+ LVXL_V8HI LVXL_V8HI_SS
+ vus __builtin_vec_ldl (signed long long, vus *);
+ LVXL_V8HI LVXL_V8HI_VUS
+ vus __builtin_vec_ldl (signed long long, unsigned short *);
+ LVXL_V8HI LVXL_V8HI_US
+ vbs __builtin_vec_ldl (signed long long, vbs *);
+ LVXL_V8HI LVXL_V8HI_VBS
+ vp __builtin_vec_ldl (signed long long, vp *);
+ LVXL_V8HI LVXL_V8HI_VP
+ vsi __builtin_vec_ldl (signed long long, vsi *);
+ LVXL_V4SI LVXL_V4SI_VSI
+ vsi __builtin_vec_ldl (signed long long, signed int *);
+ LVXL_V4SI LVXL_V4SI_SI
+ vui __builtin_vec_ldl (signed long long, vui *);
+ LVXL_V4SI LVXL_V4SI_VUI
+ vui __builtin_vec_ldl (signed long long, unsigned int *);
+ LVXL_V4SI LVXL_V4SI_UI
+ vbi __builtin_vec_ldl (signed long long, vbi *);
+ LVXL_V4SI LVXL_V4SI_VBI
+ vsll __builtin_vec_ldl (signed long long, vsll *);
+ LVXL_V2DI LVXL_V2DI_VSLL
+ vsll __builtin_vec_ldl (signed long long, signed long long *);
+ LVXL_V2DI LVXL_V2DI_SLL
+ vull __builtin_vec_ldl (signed long long, vull *);
+ LVXL_V2DI LVXL_V2DI_VULL
+ vull __builtin_vec_ldl (signed long long, unsigned long long *);
+ LVXL_V2DI LVXL_V2DI_ULL
+ vbll __builtin_vec_ldl (signed long long, vbll *);
+ LVXL_V2DI LVXL_V2DI_VBLL
+ vsq __builtin_vec_ldl (signed long long, const vsq *);
+ LVXL_V1TI LVXL_V1TI_VSQ
+ vuq __builtin_vec_ldl (signed long long, const vuq *);
+ LVXL_V1TI LVXL_V1TI_VUQ
+ vsq __builtin_vec_ldl (signed long long, __int128 *);
+ LVXL_V1TI LVXL_V1TI_TI
+ vuq __builtin_vec_ldl (signed long long, unsigned __int128 *);
+ LVXL_V1TI LVXL_V1TI_UTI
+ vf __builtin_vec_ldl (signed long long, vf *);
+ LVXL_V4SF LVXL_V4SF_VF
+ vf __builtin_vec_ldl (signed long long, float *);
+ LVXL_V4SF LVXL_V4SF_F
+ vd __builtin_vec_ldl (signed long long, vd *);
+ LVXL_V2DF LVXL_V2DF_VD
+ vd __builtin_vec_ldl (signed long long, double *);
+ LVXL_V2DF LVXL_V2DF_D
+
+[VEC_LOGE, vec_loge, __builtin_vec_loge]
+ vf __builtin_vec_loge (vf);
+ VLOGEFP
+
+[VEC_LVLX, vec_lvlx, __builtin_vec_lvlx, __PPU__]
+ vbc __builtin_vec_lvlx (signed long long, vbc *);
+ LVLX LVLX_VBC
+ vsc __builtin_vec_lvlx (signed long long, vsc *);
+ LVLX LVLX_VSC
+ vsc __builtin_vec_lvlx (signed long long, signed char *);
+ LVLX LVLX_SC
+ vuc __builtin_vec_lvlx (signed long long, vuc *);
+ LVLX LVLX_VUC
+ vuc __builtin_vec_lvlx (signed long long, unsigned char *);
+ LVLX LVLX_UC
+ vbs __builtin_vec_lvlx (signed long long, vbs *);
+ LVLX LVLX_VBS
+ vss __builtin_vec_lvlx (signed long long, vss *);
+ LVLX LVLX_VSS
+ vss __builtin_vec_lvlx (signed long long, signed short *);
+ LVLX LVLX_SS
+ vus __builtin_vec_lvlx (signed long long, vus *);
+ LVLX LVLX_VUS
+ vus __builtin_vec_lvlx (signed long long, unsigned short *);
+ LVLX LVLX_US
+ vp __builtin_vec_lvlx (signed long long, vp *);
+ LVLX LVLX_VP
+ vbi __builtin_vec_lvlx (signed long long, vbi *);
+ LVLX LVLX_VBI
+ vsi __builtin_vec_lvlx (signed long long, vsi *);
+ LVLX LVLX_VSI
+ vsi __builtin_vec_lvlx (signed long long, signed int *);
+ LVLX LVLX_SI
+ vui __builtin_vec_lvlx (signed long long, vui *);
+ LVLX LVLX_VUI
+ vui __builtin_vec_lvlx (signed long long, unsigned int *);
+ LVLX LVLX_UI
+ vf __builtin_vec_lvlx (signed long long, vf *);
+ LVLX LVLX_VF
+ vf __builtin_vec_lvlx (signed long long, float *);
+ LVLX LVLX_F
+
+[VEC_LVLXL, vec_lvlxl, __builtin_vec_lvlxl, __PPU__]
+ vbc __builtin_vec_lvlxl (signed long long, vbc *);
+ LVLXL LVLXL_VBC
+ vsc __builtin_vec_lvlxl (signed long long, vsc *);
+ LVLXL LVLXL_VSC
+ vsc __builtin_vec_lvlxl (signed long long, signed char *);
+ LVLXL LVLXL_SC
+ vuc __builtin_vec_lvlxl (signed long long, vuc *);
+ LVLXL LVLXL_VUC
+ vuc __builtin_vec_lvlxl (signed long long, unsigned char *);
+ LVLXL LVLXL_UC
+ vbs __builtin_vec_lvlxl (signed long long, vbs *);
+ LVLXL LVLXL_VBS
+ vss __builtin_vec_lvlxl (signed long long, vss *);
+ LVLXL LVLXL_VSS
+ vss __builtin_vec_lvlxl (signed long long, signed short *);
+ LVLXL LVLXL_SS
+ vus __builtin_vec_lvlxl (signed long long, vus *);
+ LVLXL LVLXL_VUS
+ vus __builtin_vec_lvlxl (signed long long, unsigned short *);
+ LVLXL LVLXL_US
+ vp __builtin_vec_lvlxl (signed long long, vp *);
+ LVLXL LVLXL_VP
+ vbi __builtin_vec_lvlxl (signed long long, vbi *);
+ LVLXL LVLXL_VBI
+ vsi __builtin_vec_lvlxl (signed long long, vsi *);
+ LVLXL LVLXL_VSI
+ vsi __builtin_vec_lvlxl (signed long long, signed int *);
+ LVLXL LVLXL_SI
+ vui __builtin_vec_lvlxl (signed long long, vui *);
+ LVLXL LVLXL_VUI
+ vui __builtin_vec_lvlxl (signed long long, unsigned int *);
+ LVLXL LVLXL_UI
+ vf __builtin_vec_lvlxl (signed long long, vf *);
+ LVLXL LVLXL_VF
+ vf __builtin_vec_lvlxl (signed long long, float *);
+ LVLXL LVLXL_F
+
+[VEC_LVRX, vec_lvrx, __builtin_vec_lvrx, __PPU__]
+ vbc __builtin_vec_lvrx (signed long long, vbc *);
+ LVRX LVRX_VBC
+ vsc __builtin_vec_lvrx (signed long long, vsc *);
+ LVRX LVRX_VSC
+ vsc __builtin_vec_lvrx (signed long long, signed char *);
+ LVRX LVRX_SC
+ vuc __builtin_vec_lvrx (signed long long, vuc *);
+ LVRX LVRX_VUC
+ vuc __builtin_vec_lvrx (signed long long, unsigned char *);
+ LVRX LVRX_UC
+ vbs __builtin_vec_lvrx (signed long long, vbs *);
+ LVRX LVRX_VBS
+ vss __builtin_vec_lvrx (signed long long, vss *);
+ LVRX LVRX_VSS
+ vss __builtin_vec_lvrx (signed long long, signed short *);
+ LVRX LVRX_SS
+ vus __builtin_vec_lvrx (signed long long, vus *);
+ LVRX LVRX_VUS
+ vus __builtin_vec_lvrx (signed long long, unsigned short *);
+ LVRX LVRX_US
+ vp __builtin_vec_lvrx (signed long long, vp *);
+ LVRX LVRX_VP
+ vbi __builtin_vec_lvrx (signed long long, vbi *);
+ LVRX LVRX_VBI
+ vsi __builtin_vec_lvrx (signed long long, vsi *);
+ LVRX LVRX_VSI
+ vsi __builtin_vec_lvrx (signed long long, signed int *);
+ LVRX LVRX_SI
+ vui __builtin_vec_lvrx (signed long long, vui *);
+ LVRX LVRX_VUI
+ vui __builtin_vec_lvrx (signed long long, unsigned int *);
+ LVRX LVRX_UI
+ vf __builtin_vec_lvrx (signed long long, vf *);
+ LVRX LVRX_VF
+ vf __builtin_vec_lvrx (signed long long, float *);
+ LVRX LVRX_F
+
+[VEC_LVRXL, vec_lvrxl, __builtin_vec_lvrxl, __PPU__]
+ vbc __builtin_vec_lvrxl (signed long long, vbc *);
+ LVRXL LVRXL_VBC
+ vsc __builtin_vec_lvrxl (signed long long, vsc *);
+ LVRXL LVRXL_VSC
+ vsc __builtin_vec_lvrxl (signed long long, signed char *);
+ LVRXL LVRXL_SC
+ vuc __builtin_vec_lvrxl (signed long long, vuc *);
+ LVRXL LVRXL_VUC
+ vuc __builtin_vec_lvrxl (signed long long, unsigned char *);
+ LVRXL LVRXL_UC
+ vbs __builtin_vec_lvrxl (signed long long, vbs *);
+ LVRXL LVRXL_VBS
+ vss __builtin_vec_lvrxl (signed long long, vss *);
+ LVRXL LVRXL_VSS
+ vss __builtin_vec_lvrxl (signed long long, signed short *);
+ LVRXL LVRXL_SS
+ vus __builtin_vec_lvrxl (signed long long, vus *);
+ LVRXL LVRXL_VUS
+ vus __builtin_vec_lvrxl (signed long long, unsigned short *);
+ LVRXL LVRXL_US
+ vp __builtin_vec_lvrxl (signed long long, vp *);
+ LVRXL LVRXL_VP
+ vbi __builtin_vec_lvrxl (signed long long, vbi *);
+ LVRXL LVRXL_VBI
+ vsi __builtin_vec_lvrxl (signed long long, vsi *);
+ LVRXL LVRXL_VSI
+ vsi __builtin_vec_lvrxl (signed long long, signed int *);
+ LVRXL LVRXL_SI
+ vui __builtin_vec_lvrxl (signed long long, vui *);
+ LVRXL LVRXL_VUI
+ vui __builtin_vec_lvrxl (signed long long, unsigned int *);
+ LVRXL LVRXL_UI
+ vf __builtin_vec_lvrxl (signed long long, vf *);
+ LVRXL LVRXL_VF
+ vf __builtin_vec_lvrxl (signed long long, float *);
+ LVRXL LVRXL_F
+
+[VEC_LVSL, vec_lvsl, __builtin_vec_lvsl]
+ vuc __builtin_vec_lvsl (signed long long, unsigned char *);
+ LVSL LVSL_UC
+ vuc __builtin_vec_lvsl (signed long long, signed char *);
+ LVSL LVSL_SC
+
+[VEC_LVSR, vec_lvsr, __builtin_vec_lvsr]
+ vuc __builtin_vec_lvsr (signed long long, unsigned char *);
+ LVSR LVSR_UC
+ vuc __builtin_vec_lvsr (signed long long, signed char *);
+ LVSR LVSR_SC
+
+[VEC_LXVL, vec_xl_len, __builtin_vec_lxvl, _ARCH_PPC64_PWR9]
+ vsc __builtin_vec_lxvl (signed char *, unsigned long long);
+ LXVL LXVL_VSC
+ vuc __builtin_vec_lxvl (unsigned char *, unsigned long long);
+ LXVL LXVL_VUC
+ vss __builtin_vec_lxvl (signed short *, unsigned long long);
+ LXVL LXVL_VSS
+ vus __builtin_vec_lxvl (unsigned short *, unsigned long long);
+ LXVL LXVL_VUS
+ vsi __builtin_vec_lxvl (signed int *, unsigned long long);
+ LXVL LXVL_VSI
+ vui __builtin_vec_lxvl (unsigned int *, unsigned long long);
+ LXVL LXVL_VUI
+ vsll __builtin_vec_lxvl (signed long long *, unsigned long long);
+ LXVL LXVL_VSLL
+ vull __builtin_vec_lxvl (unsigned long long *, unsigned long long);
+ LXVL LXVL_VULL
+ vsq __builtin_vec_lxvl (signed __int128 *, unsigned long long);
+ LXVL LXVL_VSQ
+ vuq __builtin_vec_lxvl (unsigned __int128 *, unsigned long long);
+ LXVL LXVL_VUQ
+ vf __builtin_vec_lxvl (float *, unsigned long long);
+ LXVL LXVL_VF
+ vd __builtin_vec_lxvl (double *, unsigned long long);
+ LXVL LXVL_VD
+
+[VEC_MADD, vec_madd, __builtin_vec_madd]
+ vss __builtin_vec_madd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS
+ vss __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS
+ vss __builtin_vec_madd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS
+ vus __builtin_vec_madd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS
+ vf __builtin_vec_madd (vf, vf, vf);
+ XVMADDSP
+ vd __builtin_vec_madd (vd, vd, vd);
+ XVMADDDP
+
+[VEC_MADDS, vec_madds, __builtin_vec_madds]
+ vss __builtin_vec_madds (vss, vss, vss);
+ VMHADDSHS
+
+[VEC_MAX, vec_max, __builtin_vec_max]
+ vsc __builtin_vec_max (vsc, vsc);
+ VMAXSB
+ vuc __builtin_vec_max (vuc, vuc);
+ VMAXUB
+ vss __builtin_vec_max (vss, vss);
+ VMAXSH
+ vus __builtin_vec_max (vus, vus);
+ VMAXUH
+ vsi __builtin_vec_max (vsi, vsi);
+ VMAXSW
+ vui __builtin_vec_max (vui, vui);
+ VMAXUW
+ vsll __builtin_vec_max (vsll, vsll);
+ VMAXSD
+ vull __builtin_vec_max (vull, vull);
+ VMAXUD
+ vf __builtin_vec_max (vf, vf);
+ XVMAXSP
+ vd __builtin_vec_max (vd, vd);
+ XVMAXDP
+
+[VEC_MERGEE, vec_mergee, __builtin_vec_vmrgew]
+ vsi __builtin_vec_vmrgew (vsi, vsi);
+ VMRGEW_V4SI VMRGEW_VSI
+ vui __builtin_vec_vmrgew (vui, vui);
+ VMRGEW_V4SI VMRGEW_VUI
+ vbi __builtin_vec_vmrgew (vbi, vbi);
+ VMRGEW_V4SI VMRGEW_VBI
+ vsll __builtin_vec_vmrgew (vsll, vsll);
+ VMRGEW_V2DI VMRGEW_VSLL
+ vull __builtin_vec_vmrgew (vull, vull);
+ VMRGEW_V2DI VMRGEW_VULL
+ vbll __builtin_vec_vmrgew (vbll, vbll);
+ VMRGEW_V2DI VMRGEW_VBLL
+ vf __builtin_vec_vmrgew (vf, vf);
+ VMRGEW_V4SF
+ vd __builtin_vec_vmrgew (vd, vd);
+ VMRGEW_V2DF
+
+[VEC_MERGEH, vec_mergeh, __builtin_vec_mergeh]
+ vbc __builtin_vec_mergeh (vbc, vbc);
+ VMRGHB VMRGHB_VBC
+ vsc __builtin_vec_mergeh (vsc, vsc);
+ VMRGHB VMRGHB_VSC
+ vuc __builtin_vec_mergeh (vuc, vuc);
+ VMRGHB VMRGHB_VUC
+ vbs __builtin_vec_mergeh (vbs, vbs);
+ VMRGHH VMRGHH_VBS
+ vss __builtin_vec_mergeh (vss, vss);
+ VMRGHH VMRGHH_VSS
+ vus __builtin_vec_mergeh (vus, vus);
+ VMRGHH VMRGHH_VUS
+ vp __builtin_vec_mergeh (vp, vp);
+ VMRGHH VMRGHH_VP
+ vbi __builtin_vec_mergeh (vbi, vbi);
+ VMRGHW VMRGHW_VBI
+ vsi __builtin_vec_mergeh (vsi, vsi);
+ VMRGHW VMRGHW_VSI
+ vui __builtin_vec_mergeh (vui, vui);
+ VMRGHW VMRGHW_VUI
+ vbll __builtin_vec_mergeh (vbll, vbll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VBLL
+ vsll __builtin_vec_mergeh (vsll, vsll);
+ VEC_MERGEH_V2DI VEC_MERGEH_VSLL
+ vull __builtin_vec_mergeh (vull, vull);
+ VEC_MERGEH_V2DI VEC_MERGEH_VULL
+ vf __builtin_vec_mergeh (vf, vf);
+ VMRGHW VMRGHW_VF
+ vd __builtin_vec_mergeh (vd, vd);
+ VEC_MERGEH_V2DF
+
+[VEC_MERGEL, vec_mergel, __builtin_vec_mergel]
+ vbc __builtin_vec_mergel (vbc, vbc);
+ VMRGLB VMRGLB_VBC
+ vsc __builtin_vec_mergel (vsc, vsc);
+ VMRGLB VMRGLB_VSC
+ vuc __builtin_vec_mergel (vuc, vuc);
+ VMRGLB VMRGLB_VUC
+ vbs __builtin_vec_mergel (vbs, vbs);
+ VMRGLH VMRGLH_VBS
+ vss __builtin_vec_mergel (vss, vss);
+ VMRGLH VMRGLH_VSS
+ vus __builtin_vec_mergel (vus, vus);
+ VMRGLH VMRGLH_VUS
+ vp __builtin_vec_mergel (vp, vp);
+ VMRGLH VMRGLH_VP
+ vbi __builtin_vec_mergel (vbi, vbi);
+ VMRGLW VMRGLW_VBI
+ vsi __builtin_vec_mergel (vsi, vsi);
+ VMRGLW VMRGLW_VSI
+ vui __builtin_vec_mergel (vui, vui);
+ VMRGLW VMRGLW_VUI
+ vbll __builtin_vec_mergel (vbll, vbll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VBLL
+ vsll __builtin_vec_mergel (vsll, vsll);
+ VEC_MERGEL_V2DI VEC_MERGEL_VSLL
+ vull __builtin_vec_mergel (vull, vull);
+ VEC_MERGEL_V2DI VEC_MERGEL_VULL
+ vf __builtin_vec_mergel (vf, vf);
+ VMRGLW VMRGLW_VF
+ vd __builtin_vec_mergel (vd, vd);
+ VEC_MERGEL_V2DF
+
+[VEC_MERGEO, vec_mergeo, __builtin_vec_vmrgow]
+ vsi __builtin_vec_vmrgow (vsi, vsi);
+ VMRGOW_V4SI VMRGOW_VSI
+ vui __builtin_vec_vmrgow (vui, vui);
+ VMRGOW_V4SI VMRGOW_VUI
+ vbi __builtin_vec_vmrgow (vbi, vbi);
+ VMRGOW_V4SI VMRGOW_VBI
+ vsll __builtin_vec_vmrgow (vsll, vsll);
+ VMRGOW_V2DI VMRGOW_VSLL
+ vull __builtin_vec_vmrgow (vull, vull);
+ VMRGOW_V2DI VMRGOW_VULL
+ vbll __builtin_vec_vmrgow (vbll, vbll);
+ VMRGOW_V2DI VMRGOW_VBLL
+ vf __builtin_vec_vmrgow (vf, vf);
+ VMRGOW_V4SF
+ vd __builtin_vec_vmrgow (vd, vd);
+ VMRGOW_V2DF
+
+[VEC_MIN, vec_min, __builtin_vec_min]
+ vsc __builtin_vec_min (vsc, vsc);
+ VMINSB
+ vuc __builtin_vec_min (vuc, vuc);
+ VMINUB
+ vss __builtin_vec_min (vss, vss);
+ VMINSH
+ vus __builtin_vec_min (vus, vus);
+ VMINUH
+ vsi __builtin_vec_min (vsi, vsi);
+ VMINSW
+ vui __builtin_vec_min (vui, vui);
+ VMINUW
+ vsll __builtin_vec_min (vsll, vsll);
+ VMINSD
+ vull __builtin_vec_min (vull, vull);
+ VMINUD
+ vf __builtin_vec_min (vf, vf);
+ XVMINSP
+ vd __builtin_vec_min (vd, vd);
+ XVMINDP
+
+[VEC_MLADD, vec_mladd, __builtin_vec_mladd]
+ vss __builtin_vec_mladd (vss, vss, vss);
+ VMLADDUHM VMLADDUHM_VSS2
+ vss __builtin_vec_mladd (vss, vus, vus);
+ VMLADDUHM VMLADDUHM_VSSVUS2
+ vss __builtin_vec_mladd (vus, vss, vss);
+ VMLADDUHM VMLADDUHM_VUSVSS2
+ vus __builtin_vec_mladd (vus, vus, vus);
+ VMLADDUHM VMLADDUHM_VUS2
+
+[VEC_MRADDS, vec_mradds, __builtin_vec_mradds]
+ vss __builtin_vec_mradds (vss, vss, vss);
+ VMHRADDSHS
+
+[VEC_MSUB, vec_msub, __builtin_vec_msub, __VSX__]
+ vf __builtin_vec_msub (vf, vf, vf);
+ XVMSUBSP
+ vd __builtin_vec_msub (vd, vd, vd);
+ XVMSUBDP
+
+[VEC_MSUM, vec_msum, __builtin_vec_msum]
+ vui __builtin_vec_msum (vuc, vuc, vui);
+ VMSUMUBM
+ vsi __builtin_vec_msum (vsc, vuc, vsi);
+ VMSUMMBM
+ vui __builtin_vec_msum (vus, vus, vui);
+ VMSUMUHM
+ vsi __builtin_vec_msum (vss, vss, vsi);
+ VMSUMSHM
+ vsq __builtin_vec_msum (vsll, vsll, vsq);
+ VMSUMUDM VMSUMUDM_S
+ vuq __builtin_vec_msum (vull, vull, vuq);
+ VMSUMUDM VMSUMUDM_U
+
+[VEC_MSUMS, vec_msums, __builtin_vec_msums]
+ vui __builtin_vec_msums (vus, vus, vui);
+ VMSUMUHS
+ vsi __builtin_vec_msums (vss, vss, vsi);
+ VMSUMSHS
+
+[VEC_MTVSCR, vec_mtvscr, __builtin_vec_mtvscr]
+ void __builtin_vec_mtvscr (vbc);
+ MTVSCR MTVSCR_VBC
+ void __builtin_vec_mtvscr (vsc);
+ MTVSCR MTVSCR_VSC
+ void __builtin_vec_mtvscr (vuc);
+ MTVSCR MTVSCR_VUC
+ void __builtin_vec_mtvscr (vbs);
+ MTVSCR MTVSCR_VBS
+ void __builtin_vec_mtvscr (vss);
+ MTVSCR MTVSCR_VSS
+ void __builtin_vec_mtvscr (vus);
+ MTVSCR MTVSCR_VUS
+ void __builtin_vec_mtvscr (vp);
+ MTVSCR MTVSCR_VP
+ void __builtin_vec_mtvscr (vbi);
+ MTVSCR MTVSCR_VBI
+ void __builtin_vec_mtvscr (vsi);
+ MTVSCR MTVSCR_VSI
+ void __builtin_vec_mtvscr (vui);
+ MTVSCR MTVSCR_VUI
+
+; Note that there is no entry for VEC_MUL. See rs6000-c.c:
+; altivec_resolve_overloaded_builtin, where there is special-case
+; code for VEC_MUL. TODO: Is this really necessary? Investigate.
+
+[VEC_MULE, vec_mule, __builtin_vec_mule]
+ vss __builtin_vec_mule (vsc, vsc);
+ VMULESB
+ vus __builtin_vec_mule (vuc, vuc);
+ VMULEUB
+ vsi __builtin_vec_mule (vss, vss);
+ VMULESH
+ vui __builtin_vec_mule (vus, vus);
+ VMULEUH
+ vsll __builtin_vec_mule (vsi, vsi);
+ VMULESW
+ vull __builtin_vec_mule (vui, vui);
+ VMULEUW
+
+[VEC_MULO, vec_mulo, __builtin_vec_mulo]
+ vss __builtin_vec_mulo (vsc, vsc);
+ VMULOSB
+ vus __builtin_vec_mulo (vuc, vuc);
+ VMULOUB
+ vsi __builtin_vec_mulo (vss, vss);
+ VMULOSH
+ vui __builtin_vec_mulo (vus, vus);
+ VMULOUH
+ vsll __builtin_vec_mulo (vsi, vsi);
+ VMULOSW
+ vull __builtin_vec_mulo (vui, vui);
+ VMULOUW
+
+[VEC_NABS, vec_nabs, __builtin_vec_nabs]
+ vsc __builtin_vec_nabs (vsc);
+ NABS_V16QI
+ vss __builtin_vec_nabs (vss);
+ NABS_V8HI
+ vsi __builtin_vec_nabs (vsi);
+ NABS_V4SI
+ vsll __builtin_vec_nabs (vsll);
+ NABS_V2DI
+ vf __builtin_vec_nabs (vf);
+ NABS_V4SF
+ vd __builtin_vec_nabs (vd);
+ NABS_V2DF
+
+[VEC_NAND, vec_nand, __builtin_vec_nand, _ARCH_PWR8]
+ vsc __builtin_vec_nand (vsc, vsc);
+ NAND_V16QI
+ vuc __builtin_vec_nand (vuc, vuc);
+ NAND_V16QI_UNS NAND_VUC
+ vbc __builtin_vec_nand (vbc, vbc);
+ NAND_V16QI_UNS NAND_VBC
+ vss __builtin_vec_nand (vss, vss);
+ NAND_V8HI
+ vus __builtin_vec_nand (vus, vus);
+ NAND_V8HI_UNS NAND_VUS
+ vbs __builtin_vec_nand (vbs, vbs);
+ NAND_V8HI_UNS NAND_VBS
+ vsi __builtin_vec_nand (vsi, vsi);
+ NAND_V4SI
+ vui __builtin_vec_nand (vui, vui);
+ NAND_V4SI_UNS NAND_VUI
+ vbi __builtin_vec_nand (vbi, vbi);
+ NAND_V4SI_UNS NAND_VBI
+ vsll __builtin_vec_nand (vsll, vsll);
+ NAND_V2DI
+ vull __builtin_vec_nand (vull, vull);
+ NAND_V2DI_UNS NAND_VULL
+ vbll __builtin_vec_nand (vbll, vbll);
+ NAND_V2DI_UNS NAND_VBLL
+ vf __builtin_vec_nand (vf, vf);
+ NAND_V4SF
+ vd __builtin_vec_nand (vd, vd);
+ NAND_V2DF
+
+[VEC_NCIPHER_BE, vec_ncipher_be, __builtin_vec_vncipher_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipher_be (vuc, vuc);
+ VNCIPHER_BE
+
+[VEC_NCIPHERLAST_BE, vec_ncipherlast_be, __builtin_vec_vncipherlast_be, _ARCH_PWR8]
+ vuc __builtin_vec_vncipherlast_be (vuc, vuc);
+ VNCIPHERLAST_BE
+
+[VEC_NEARBYINT, vec_nearbyint, __builtin_vec_nearbyint, __VSX__]
+ vf __builtin_vec_nearbyint (vf);
+ XVRSPI XVRSPI_NBI
+ vd __builtin_vec_nearbyint (vd);
+ XVRDPI XVRDPI_NBI
+
+[VEC_NEG, vec_neg, __builtin_vec_neg]
+ vsc __builtin_vec_neg (vsc);
+ NEG_V16QI
+ vss __builtin_vec_neg (vss);
+ NEG_V8HI
+ vsi __builtin_vec_neg (vsi);
+ NEG_V4SI
+ vsll __builtin_vec_neg (vsll);
+ NEG_V2DI
+ vf __builtin_vec_neg (vf);
+ NEG_V4SF
+ vd __builtin_vec_neg (vd);
+ NEG_V2DF
+
+[VEC_NMADD, vec_nmadd, __builtin_vec_nmadd, __VSX__]
+ vf __builtin_vec_nmadd (vf, vf, vf);
+ XVNMADDSP
+ vd __builtin_vec_nmadd (vd, vd, vd);
+ XVNMADDDP
+
+[VEC_NMSUB, vec_nmsub, __builtin_vec_nmsub]
+ vf __builtin_vec_nmsub (vf, vf, vf);
+ XVNMSUBSP
+ vd __builtin_vec_nmsub (vd, vd, vd);
+ XVNMSUBDP
+
+[VEC_NOR, vec_nor, __builtin_vec_nor]
+ vsc __builtin_vec_nor (vsc, vsc);
+ VNOR_V16QI
+ vuc __builtin_vec_nor (vuc, vuc);
+ VNOR_V16QI_UNS VNOR_V16QI_U
+ vbc __builtin_vec_nor (vbc, vbc);
+ VNOR_V16QI_UNS VNOR_V16QI_B
+ vss __builtin_vec_nor (vss, vss);
+ VNOR_V8HI
+ vus __builtin_vec_nor (vus, vus);
+ VNOR_V8HI_UNS VNOR_V8HI_U
+ vbs __builtin_vec_nor (vbs, vbs);
+ VNOR_V8HI_UNS VNOR_V8HI_B
+ vsi __builtin_vec_nor (vsi, vsi);
+ VNOR_V4SI
+ vui __builtin_vec_nor (vui, vui);
+ VNOR_V4SI_UNS VNOR_V4SI_U
+ vbi __builtin_vec_nor (vbi, vbi);
+ VNOR_V4SI_UNS VNOR_V4SI_B
+ vsll __builtin_vec_nor (vsll, vsll);
+ VNOR_V2DI
+ vull __builtin_vec_nor (vull, vull);
+ VNOR_V2DI_UNS VNOR_V2DI_U
+ vbll __builtin_vec_nor (vbll, vbll);
+ VNOR_V2DI_UNS VNOR_V2DI_B
+ vf __builtin_vec_nor (vf, vf);
+ VNOR_V4SF
+ vd __builtin_vec_nor (vd, vd);
+ VNOR_V2DF
+
+[VEC_OR, vec_or, __builtin_vec_or]
+ vsc __builtin_vec_or (vsc, vsc);
+ VOR_V16QI
+ vuc __builtin_vec_or (vuc, vuc);
+ VOR_V16QI_UNS VOR_V16QI_U
+ vbc __builtin_vec_or (vbc, vbc);
+ VOR_V16QI_UNS VOR_V16QI_B
+ vss __builtin_vec_or (vss, vss);
+ VOR_V8HI
+ vus __builtin_vec_or (vus, vus);
+ VOR_V8HI_UNS VOR_V8HI_U
+ vbs __builtin_vec_or (vbs, vbs);
+ VOR_V8HI_UNS VOR_V8HI_B
+ vsi __builtin_vec_or (vsi, vsi);
+ VOR_V4SI
+ vui __builtin_vec_or (vui, vui);
+ VOR_V4SI_UNS VOR_V4SI_U
+ vbi __builtin_vec_or (vbi, vbi);
+ VOR_V4SI_UNS VOR_V4SI_B
+ vsll __builtin_vec_or (vsll, vsll);
+ VOR_V2DI
+ vull __builtin_vec_or (vull, vull);
+ VOR_V2DI_UNS VOR_V2DI_U
+ vbll __builtin_vec_or (vbll, vbll);
+ VOR_V2DI_UNS VOR_V2DI_B
+ vf __builtin_vec_or (vf, vf);
+ VOR_V4SF
+ vd __builtin_vec_or (vd, vd);
+ VOR_V2DF
+
+[VEC_ORC, vec_orc, __builtin_vec_orc, _ARCH_PWR8]
+ vsc __builtin_vec_orc (vsc, vsc);
+ ORC_V16QI
+ vuc __builtin_vec_orc (vuc, vuc);
+ ORC_V16QI_UNS ORC_VUC
+ vbc __builtin_vec_orc (vbc, vbc);
+ ORC_V16QI_UNS ORC_VBC
+ vss __builtin_vec_orc (vss, vss);
+ ORC_V8HI
+ vus __builtin_vec_orc (vus, vus);
+ ORC_V8HI_UNS ORC_VUS
+ vbs __builtin_vec_orc (vbs, vbs);
+ ORC_V8HI_UNS ORC_VBS
+ vsi __builtin_vec_orc (vsi, vsi);
+ ORC_V4SI
+ vui __builtin_vec_orc (vui, vui);
+ ORC_V4SI_UNS ORC_VUI
+ vbi __builtin_vec_orc (vbi, vbi);
+ ORC_V4SI_UNS ORC_VBI
+ vsll __builtin_vec_orc (vsll, vsll);
+ ORC_V2DI
+ vull __builtin_vec_orc (vull, vull);
+ ORC_V2DI_UNS ORC_VULL
+ vbll __builtin_vec_orc (vbll, vbll);
+ ORC_V2DI_UNS ORC_VBLL
+ vf __builtin_vec_orc (vf, vf);
+ ORC_V4SF
+ vd __builtin_vec_orc (vd, vd);
+ ORC_V2DF
+
+[VEC_PACK, vec_pack, __builtin_vec_pack]
+ vsc __builtin_vec_pack (vss, vss);
+ VPKUHUM VPKUHUM_VSS
+ vuc __builtin_vec_pack (vus, vus);
+ VPKUHUM VPKUHUM_VUS
+ vbc __builtin_vec_pack (vbs, vbs);
+ VPKUHUM VPKUHUM_VBS
+ vss __builtin_vec_pack (vsi, vsi);
+ VPKUWUM VPKUWUM_VSI
+ vus __builtin_vec_pack (vui, vui);
+ VPKUWUM VPKUWUM_VUI
+ vbs __builtin_vec_pack (vbi, vbi);
+ VPKUWUM VPKUWUM_VBI
+ vsi __builtin_vec_pack (vsll, vsll);
+ VPKUDUM VPKUDUM_VSLL
+ vui __builtin_vec_pack (vull, vull);
+ VPKUDUM VPKUDUM_VULL
+ vbi __builtin_vec_pack (vbll, vbll);
+ VPKUDUM VPKUDUM_VBLL
+ vf __builtin_vec_pack (vd, vd);
+ FLOAT2_V2DF FLOAT2_V2DF_PACK
+
+[VEC_PACKPX, vec_packpx, __builtin_vec_packpx]
+ vp __builtin_vec_packpx (vui, vui);
+ VPKPX
+
+[VEC_PACKS, vec_packs, __builtin_vec_packs]
+ vuc __builtin_vec_packs (vus, vus);
+ VPKUHUS
+ vsc __builtin_vec_packs (vss, vss);
+ VPKSHSS
+ vus __builtin_vec_packs (vui, vui);
+ VPKUWUS
+ vss __builtin_vec_packs (vsi, vsi);
+ VPKSWSS
+ vui __builtin_vec_packs (vull, vull);
+ VPKUDUS
+ vsi __builtin_vec_packs (vsll, vsll);
+ VPKSDSS
+
+[VEC_PDEP, vec_pdep, __builtin_vec_vpdepd, _ARCH_PWR10]
+ vull __builtin_vec_vpdepd (vull, vull);
+ VPDEPD
+
+[VEC_PERM, vec_perm, __builtin_vec_perm]
+ vsc __builtin_vec_perm (vsc, vsc, vuc);
+ VPERM_16QI
+ vuc __builtin_vec_perm (vuc, vuc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VUC
+ vbc __builtin_vec_perm (vbc, vbc, vuc);
+ VPERM_16QI_UNS VPERM_16QI_VBC
+ vss __builtin_vec_perm (vss, vss, vuc);
+ VPERM_8HI
+ vus __builtin_vec_perm (vus, vus, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VUS
+ vbs __builtin_vec_perm (vbs, vbs, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VBS
+ vp __builtin_vec_perm (vp, vp, vuc);
+ VPERM_8HI_UNS VPERM_8HI_VP
+ vsi __builtin_vec_perm (vsi, vsi, vuc);
+ VPERM_4SI
+ vui __builtin_vec_perm (vui, vui, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VUI
+ vbi __builtin_vec_perm (vbi, vbi, vuc);
+ VPERM_4SI_UNS VPERM_4SI_VBI
+ vsll __builtin_vec_perm (vsll, vsll, vuc);
+ VPERM_2DI
+ vull __builtin_vec_perm (vull, vull, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VULL
+ vbll __builtin_vec_perm (vbll, vbll, vuc);
+ VPERM_2DI_UNS VPERM_2DI_VBLL
+ vf __builtin_vec_perm (vf, vf, vuc);
+ VPERM_4SF
+ vd __builtin_vec_perm (vd, vd, vuc);
+ VPERM_2DF
+
+[VEC_PERMX, vec_permx, __builtin_vec_xxpermx, _ARCH_PWR10]
+ vsc __builtin_vec_xxpermx (vsc, vsc, vuc, const int);
+ XXPERMX_V16QI
+ vuc __builtin_vec_xxpermx (vuc, vuc, vuc, const int);
+ XXPERMX_UV16QI
+ vss __builtin_vec_xxpermx (vss, vss, vuc, const int);
+ XXPERMX_V8HI
+ vus __builtin_vec_xxpermx (vus, vus, vuc, const int);
+ XXPERMX_UV8HI
+ vsi __builtin_vec_xxpermx (vsi, vsi, vuc, const int);
+ XXPERMX_V4SI
+ vui __builtin_vec_xxpermx (vui, vui, vuc, const int);
+ XXPERMX_UV4SI
+ vsll __builtin_vec_xxpermx (vsll, vsll, vuc, const int);
+ XXPERMX_V2DI
+ vull __builtin_vec_xxpermx (vull, vull, vuc, const int);
+ XXPERMX_UV2DI
+ vf __builtin_vec_xxpermx (vf, vf, vuc, const int);
+ XXPERMX_V4SF
+ vd __builtin_vec_xxpermx (vd, vd, vuc, const int);
+ XXPERMX_V2DF
+
+[VEC_PERMXOR, vec_permxor, __builtin_vec_vpermxor]
+ vsc __builtin_vec_vpermxor (vsc, vsc, vsc);
+ VPERMXOR VPERMXOR_VSC
+ vuc __builtin_vec_vpermxor (vuc, vuc, vuc);
+ VPERMXOR VPERMXOR_VUC
+ vbc __builtin_vec_vpermxor (vbc, vbc, vbc);
+ VPERMXOR VPERMXOR_VBC
+
+[VEC_PEXT, vec_pext, __builtin_vec_vpextd, _ARCH_PWR10]
+ vull __builtin_vec_vpextd (vull, vull);
+ VPEXTD
+
+[VEC_PMSUM, vec_pmsum_be, __builtin_vec_vpmsum]
+ vus __builtin_vec_vpmsum (vuc, vuc);
+ VPMSUMB VPMSUMB_V
+ vui __builtin_vec_vpmsum (vus, vus);
+ VPMSUMH VPMSUMH_V
+ vull __builtin_vec_vpmsum (vui, vui);
+ VPMSUMW VPMSUMW_V
+ vuq __builtin_vec_vpmsum (vull, vull);
+ VPMSUMD VPMSUMD_V
+
+[VEC_POPCNT, vec_popcnt, __builtin_vec_vpopcnt, _ARCH_PWR8]
+ vsc __builtin_vec_vpopcnt (vsc);
+ VPOPCNTB
+ vuc __builtin_vec_vpopcnt (vuc);
+ VPOPCNTUB
+ vss __builtin_vec_vpopcnt (vss);
+ VPOPCNTH
+ vus __builtin_vec_vpopcnt (vus);
+ VPOPCNTUH
+ vsi __builtin_vec_vpopcnt (vsi);
+ VPOPCNTW
+ vui __builtin_vec_vpopcnt (vui);
+ VPOPCNTUW
+ vsll __builtin_vec_vpopcnt (vsll);
+ VPOPCNTD
+ vull __builtin_vec_vpopcnt (vull);
+ VPOPCNTUD
+
+[VEC_PARITY_LSBB, vec_parity_lsbb, __builtin_vec_vparity_lsbb, _ARCH_PWR9]
+ vui __builtin_vec_vparity_lsbb (vsi);
+ VPRTYBW VPRTYBW_S
+ vui __builtin_vec_vparity_lsbb (vui);
+ VPRTYBW VPRTYBW_U
+ vull __builtin_vec_vparity_lsbb (vsll);
+ VPRTYBD VPRTYBD_S
+ vull __builtin_vec_vparity_lsbb (vull);
+ VPRTYBD VPRTYBD_U
+ vuq __builtin_vec_vparity_lsbb (vsq);
+ VPRTYBQ VPRTYBQ_S
+ vuq __builtin_vec_vparity_lsbb (vuq);
+ VPRTYBQ VPRTYBQ_U
+
+[VEC_RE, vec_re, __builtin_vec_re]
+ vf __builtin_vec_re (vf);
+ XVRESP
+ vd __builtin_vec_re (vd);
+ XVREDP
+
+[VEC_RECIP, vec_recipdiv, __builtin_vec_recipdiv]
+ vf __builtin_vec_recipdiv (vf, vf);
+ RECIP_V4SF
+ vd __builtin_vec_recipdiv (vd, vd);
+ RECIP_V2DF
+
+[VEC_REPLACE_ELT, vec_replace_elt, __builtin_vec_replace_elt, _ARCH_PWR10]
+ vui __builtin_vec_replace_elt (vui, unsigned int, const int);
+ VREPLACE_ELT_UV4SI
+ vsi __builtin_vec_replace_elt (vsi, signed int, const int);
+ VREPLACE_ELT_V4SI
+ vull __builtin_vec_replace_elt (vull, unsigned long long, const int);
+ VREPLACE_ELT_UV2DI
+ vsll __builtin_vec_replace_elt (vsll, signed long long, const int);
+ VREPLACE_ELT_V2DI
+ vf __builtin_vec_replace_elt (vf, float, const int);
+ VREPLACE_ELT_V4SF
+ vd __builtin_vec_replace_elt (vd, double, const int);
+ VREPLACE_ELT_V2DF
+
+[VEC_REPLACE_UN, vec_replace_unaligned, __builtin_vec_replace_un, _ARCH_PWR10]
+ vui __builtin_vec_replace_un (vui, unsigned int, const int);
+ VREPLACE_UN_UV4SI
+ vsi __builtin_vec_replace_un (vsi, signed int, const int);
+ VREPLACE_UN_V4SI
+ vull __builtin_vec_replace_un (vull, unsigned long long, const int);
+ VREPLACE_UN_UV2DI
+ vsll __builtin_vec_replace_un (vsll, signed long long, const int);
+ VREPLACE_UN_V2DI
+ vf __builtin_vec_replace_un (vf, float, const int);
+ VREPLACE_UN_V4SF
+ vd __builtin_vec_replace_un (vd, double, const int);
+ VREPLACE_UN_V2DF
+
+[VEC_REVB, vec_revb, __builtin_vec_revb, _ARCH_PWR8]
+ vss __builtin_vec_revb (vss);
+ REVB_V8HI REVB_VSS
+ vus __builtin_vec_revb (vus);
+ REVB_V8HI REVB_VUS
+ vsi __builtin_vec_revb (vsi);
+ REVB_V4SI REVB_VSI
+ vui __builtin_vec_revb (vui);
+ REVB_V4SI REVB_VUI
+ vsll __builtin_vec_revb (vsll);
+ REVB_V2DI REVB_VSLL
+ vull __builtin_vec_revb (vull);
+ REVB_V2DI REVB_VULL
+ vsq __builtin_vec_revb (vsq);
+ REVB_V1TI REVB_VSQ
+ vuq __builtin_vec_revb (vuq);
+ REVB_V1TI REVB_VUQ
+ vf __builtin_vec_revb (vf);
+ REVB_V4SF
+ vd __builtin_vec_revb (vd);
+ REVB_V2DF
+
+[VEC_REVE, vec_reve, __builtin_vec_vreve]
+ vsc __builtin_vec_vreve (vsc);
+ VREVE_V16QI VREVE_VSC
+ vuc __builtin_vec_vreve (vuc);
+ VREVE_V16QI VREVE_VUC
+ vbc __builtin_vec_vreve (vbc);
+ VREVE_V16QI VREVE_VBC
+ vss __builtin_vec_vreve (vss);
+ VREVE_V8HI VREVE_VSS
+ vus __builtin_vec_vreve (vus);
+ VREVE_V8HI VREVE_VUS
+ vbs __builtin_vec_vreve (vbs);
+ VREVE_V8HI VREVE_VBS
+ vsi __builtin_vec_vreve (vsi);
+ VREVE_V4SI VREVE_VSI
+ vui __builtin_vec_vreve (vui);
+ VREVE_V4SI VREVE_VUI
+ vbi __builtin_vec_vreve (vbi);
+ VREVE_V4SI VREVE_VBI
+ vsll __builtin_vec_vreve (vsll);
+ VREVE_V2DI VREVE_VSLL
+ vull __builtin_vec_vreve (vull);
+ VREVE_V2DI VREVE_VULL
+ vbll __builtin_vec_vreve (vbll);
+ VREVE_V2DI VREVE_VBLL
+ vf __builtin_vec_vreve (vf);
+ VREVE_V4SF
+ vd __builtin_vec_vreve (vd);
+ VREVE_V2DF
+
+[VEC_RINT, vec_rint, __builtin_vec_rint, __VSX__]
+ vf __builtin_vec_rint (vf);
+ XVRSPIC
+ vd __builtin_vec_rint (vd);
+ XVRDPIC
+
+[VEC_RL, vec_rl, __builtin_vec_rl]
+ vsc __builtin_vec_rl (vsc, vuc);
+ VRLB VRLB_VSC
+ vuc __builtin_vec_rl (vuc, vuc);
+ VRLB VRLB_VUC
+ vss __builtin_vec_rl (vss, vus);
+ VRLH VRLH_VSS
+ vus __builtin_vec_rl (vus, vus);
+ VRLH VRLH_VUS
+ vsi __builtin_vec_rl (vsi, vui);
+ VRLW VRLW_VSI
+ vui __builtin_vec_rl (vui, vui);
+ VRLW VRLW_VUI
+ vsll __builtin_vec_rl (vsll, vull);
+ VRLD VRLD_VSLL
+ vull __builtin_vec_rl (vull, vull);
+ VRLD VRLD_VULL
+
+[VEC_RLMI, vec_rlmi, __builtin_vec_rlmi]
+ vui __builtin_vec_rlmi (vui, vui, vui);
+ VRLWMI
+ vull __builtin_vec_rlmi (vull, vull, vull);
+ VRLDMI
+
+[VEC_RLNM, vec_rlnm, __builtin_vec_rlnm]
+ vui __builtin_vec_rlnm (vui, vui);
+ VRLWNM
+ vull __builtin_vec_rlnm (vull, vull);
+ VRLDNM
+
+[VEC_ROUND, vec_round, __builtin_vec_round]
+ vf __builtin_vec_round (vf);
+ XVRSPI
+ vd __builtin_vec_round (vd);
+ XVRDPI
+
+[VEC_RSQRT, vec_rsqrt, __builtin_vec_rsqrt]
+ vf __builtin_vec_rsqrt (vf);
+ RSQRT_4SF
+ vd __builtin_vec_rsqrt (vd);
+ RSQRT_2DF
+
+[VEC_RSQRTE, vec_rsqrte, __builtin_vec_rsqrte]
+ vf __builtin_vec_rsqrte (vf);
+ XVRSQRTESP
+ vd __builtin_vec_rsqrte (vd);
+ XVRSQRTEDP
+
+[VEC_SBOX_BE, vec_sbox_be, __builtin_vec_sbox_be, _ARCH_PWR8]
+ vuc __builtin_vec_sbox_be (vuc);
+ VSBOX_BE
+
+[VEC_SEL, vec_sel, __builtin_vec_sel]
+ vsc __builtin_vec_sel (vsc, vsc, vbc);
+ VSEL_16QI VSEL_16QI_B
+ vsc __builtin_vec_sel (vsc, vsc, vuc);
+ VSEL_16QI VSEL_16QI_U
+ vuc __builtin_vec_sel (vuc, vuc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_UB
+ vuc __builtin_vec_sel (vuc, vuc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_UU
+ vbc __builtin_vec_sel (vbc, vbc, vbc);
+ VSEL_16QI_UNS VSEL_16QI_BB
+ vbc __builtin_vec_sel (vbc, vbc, vuc);
+ VSEL_16QI_UNS VSEL_16QI_BU
+ vss __builtin_vec_sel (vss, vss, vbs);
+ VSEL_8HI VSEL_8HI_B
+ vss __builtin_vec_sel (vss, vss, vus);
+ VSEL_8HI VSEL_8HI_U
+ vus __builtin_vec_sel (vus, vus, vbs);
+ VSEL_8HI_UNS VSEL_8HI_UB
+ vus __builtin_vec_sel (vus, vus, vus);
+ VSEL_8HI_UNS VSEL_8HI_UU
+ vbs __builtin_vec_sel (vbs, vbs, vbs);
+ VSEL_8HI_UNS VSEL_8HI_BB
+ vbs __builtin_vec_sel (vbs, vbs, vus);
+ VSEL_8HI_UNS VSEL_8HI_BU
+ vsi __builtin_vec_sel (vsi, vsi, vbi);
+ VSEL_4SI VSEL_4SI_B
+ vsi __builtin_vec_sel (vsi, vsi, vui);
+ VSEL_4SI VSEL_4SI_U
+ vui __builtin_vec_sel (vui, vui, vbi);
+ VSEL_4SI_UNS VSEL_4SI_UB
+ vui __builtin_vec_sel (vui, vui, vui);
+ VSEL_4SI_UNS VSEL_4SI_UU
+ vbi __builtin_vec_sel (vbi, vbi, vbi);
+ VSEL_4SI_UNS VSEL_4SI_BB
+ vbi __builtin_vec_sel (vbi, vbi, vui);
+ VSEL_4SI_UNS VSEL_4SI_BU
+ vsll __builtin_vec_sel (vsll, vsll, vbll);
+ VSEL_2DI_B VSEL_2DI_B
+ vsll __builtin_vec_sel (vsll, vsll, vull);
+ VSEL_2DI_B VSEL_2DI_U
+ vull __builtin_vec_sel (vull, vull, vbll);
+ VSEL_2DI_UNS VSEL_2DI_UB
+ vull __builtin_vec_sel (vull, vull, vull);
+ VSEL_2DI_UNS VSEL_2DI_UU
+ vbll __builtin_vec_sel (vbll, vbll, vbll);
+ VSEL_2DI_UNS VSEL_2DI_BB
+ vbll __builtin_vec_sel (vbll, vbll, vull);
+ VSEL_2DI_UNS VSEL_2DI_BU
+ vf __builtin_vec_sel (vf, vf, vbll);
+ VSEL_4SF VSEL_4SF_B
+ vf __builtin_vec_sel (vf, vf, vull);
+ VSEL_4SF VSEL_4SF_U
+ vd __builtin_vec_sel (vd, vd, vbll);
+ VSEL_2DF VSEL_2DF_B
+ vd __builtin_vec_sel (vd, vd, vull);
+ VSEL_2DF VSEL_2DF_U
+
+[VEC_SHASIGMA_BE, vec_shasigma_be, __builtin_crypto_vshasigma]
+ vui __builtin_crypto_vshasigma (vui, const int, const int);
+ VSHASIGMAW
+ vull __builtin_crypto_vshasigma (vull, const int, const int);
+ VSHASIGMAD
+
+[VEC_SIGNED, vec_signed, __builtin_vec_vsigned]
+ vsi __builtin_vec_vsigned (vf);
+ VEC_VSIGNED_V4SF
+ vsll __builtin_vec_vsigned (vd);
+ VEC_VSIGNED_V2DF
+
+[VEC_SIGNED2, vec_signed2, __builtin_vec_vsigned2]
+ vsi __builtin_vec_vsigned2 (vd, vd);
+ VEC_VSIGNED2_V2DF
+
+[VEC_SIGNEDO, vec_signedo, __builtin_vec_vsignedo]
+ vui __builtin_vec_vsignedo (vd);
+ VEC_VSIGNEDO_V2DF
+
+[VEC_SL, vec_sl, __builtin_vec_sl]
+ vsc __builtin_vec_sl (vsc, vuc);
+ VSLB VSLB_VSC
+ vuc __builtin_vec_sl (vuc, vuc);
+ VSLB VSLB_VUC
+ vss __builtin_vec_sl (vss, vus);
+ VSLH VSLH_VSS
+ vus __builtin_vec_sl (vus, vus);
+ VSLH VSLH_VUS
+ vsi __builtin_vec_sl (vsi, vui);
+ VSLW VSLW_VSI
+ vui __builtin_vec_sl (vui, vui);
+ VSLW VSLW_VUI
+ vsll __builtin_vec_sl (vsll, vull);
+ VSLD VSLD_VSLL
+ vull __builtin_vec_sl (vull, vull);
+ VSLD VSLD_VULL
+
+[VEC_SLD, vec_sld, __builtin_vec_sld]
+ vsc __builtin_vec_sld (vsc, vsc, const int);
+ VSLDOI_16QI VSLDOI_VSC
+ vbc __builtin_vec_sld (vbc, vbc, const int);
+ VSLDOI_16QI VSLDOI_VBC
+ vuc __builtin_vec_sld (vuc, vuc, const int);
+ VSLDOI_16QI VSLDOI_VUC
+ vss __builtin_vec_sld (vss, vss, const int);
+ VSLDOI_8HI VSLDOI_VSS
+ vbs __builtin_vec_sld (vbs, vbs, const int);
+ VSLDOI_8HI VSLDOI_VBS
+ vus __builtin_vec_sld (vus, vus, const int);
+ VSLDOI_8HI VSLDOI_VUS
+ vp __builtin_vec_sld (vp, vp, const int);
+ VSLDOI_8HI VSLDOI_VP
+ vsi __builtin_vec_sld (vsi, vsi, const int);
+ VSLDOI_4SI VSLDOI_VSI
+ vbi __builtin_vec_sld (vbi, vbi, const int);
+ VSLDOI_4SI VSLDOI_VBI
+ vui __builtin_vec_sld (vui, vui, const int);
+ VSLDOI_4SI VSLDOI_VUI
+ vsll __builtin_vec_sld (vsll, vsll, const int);
+ VSLDOI_2DI VSLDOI_VSLL
+ vbll __builtin_vec_sld (vbll, vbll, const int);
+ VSLDOI_2DI VSLDOI_VBLL
+ vull __builtin_vec_sld (vull, vull, const int);
+ VSLDOI_2DI VSLDOI_VULL
+ vf __builtin_vec_sld (vf, vf, const int);
+ VSLDOI_4SF
+ vd __builtin_vec_sld (vd, vd, const int);
+ VSLDOI_2DF
+
+[VEC_SLDB, vec_sldb, __builtin_vec_sldb, _ARCH_PWR10]
+ vsc __builtin_vec_sldb (vsc, vsc, const int);
+ VSLDB_V16QI VSLDB_VSC
+ vuc __builtin_vec_sldb (vuc, vuc, const int);
+ VSLDB_V16QI VSLDB_VUC
+ vss __builtin_vec_sldb (vss, vss, const int);
+ VSLDB_V8HI VSLDB_VSS
+ vus __builtin_vec_sldb (vus, vus, const int);
+ VSLDB_V8HI VSLDB_VUS
+ vsi __builtin_vec_sldb (vsi, vsi, const int);
+ VSLDB_V4SI VSLDB_VSI
+ vui __builtin_vec_sldb (vui, vui, const int);
+ VSLDB_V4SI VSLDB_VUI
+ vsll __builtin_vec_sldb (vsll, vsll, const int);
+ VSLDB_V2DI VSLDB_VSLL
+ vull __builtin_vec_sldb (vull, vull, const int);
+ VSLDB_V2DI VSLDB_VULL
+
+[VEC_SLDW, vec_sldw, __builtin_vec_sldw]
+ vsc __builtin_vec_sldw (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC
+ vuc __builtin_vec_sldw (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC
+ vss __builtin_vec_sldw (vss, vss, const int);
+ XXSLDWI_16QI XXSLDWI_VSS
+ vus __builtin_vec_sldw (vus, vus, const int);
+ XXSLDWI_16QI XXSLDWI_VUS
+ vsi __builtin_vec_sldw (vsi, vsi, const int);
+ XXSLDWI_16QI XXSLDWI_VSI
+ vui __builtin_vec_sldw (vui, vui, const int);
+ XXSLDWI_16QI XXSLDWI_VUI
+ vsll __builtin_vec_sldw (vsll, vsll, const int);
+ XXSLDWI_16QI XXSLDWI_VSLL
+ vull __builtin_vec_sldw (vull, vull, const int);
+ XXSLDWI_16QI XXSLDWI_VULL
+
+[VEC_SLL, vec_sll, __builtin_vec_sll]
+ vsc __builtin_vec_sll (vsc, vuc);
+ VSL VSL_VSC
+ vuc __builtin_vec_sll (vuc, vuc);
+ VSL VSL_VUC
+ vss __builtin_vec_sll (vss, vuc);
+ VSL VSL_VSS
+ vus __builtin_vec_sll (vus, vuc);
+ VSL VSL_VUS
+ vp __builtin_vec_sll (vp, vuc);
+ VSL VSL_VP
+ vsi __builtin_vec_sll (vsi, vuc);
+ VSL VSL_VSI
+ vui __builtin_vec_sll (vui, vuc);
+ VSL VSL_VUI
+ vsll __builtin_vec_sll (vsll, vuc);
+ VSL VSL_VSLL
+ vull __builtin_vec_sll (vull, vuc);
+ VSL VSL_VULL
+
+[VEC_SLO, vec_slo, __builtin_vec_slo]
+ vsc __builtin_vec_slo (vsc, vsc);
+ VSLO VSLO_VSCS
+ vsc __builtin_vec_slo (vsc, vuc);
+ VSLO VSLO_VSCU
+ vuc __builtin_vec_slo (vuc, vsc);
+ VSLO VSLO_VUCS
+ vuc __builtin_vec_slo (vuc, vuc);
+ VSLO VSLO_VUCU
+ vss __builtin_vec_slo (vss, vsc);
+ VSLO VSLO_VSSS
+ vss __builtin_vec_slo (vss, vuc);
+ VSLO VSLO_VSSU
+ vus __builtin_vec_slo (vus, vsc);
+ VSLO VSLO_VUSS
+ vus __builtin_vec_slo (vus, vuc);
+ VSLO VSLO_VUSU
+ vp __builtin_vec_slo (vp, vsc);
+ VSLO VSLO_VPS
+ vp __builtin_vec_slo (vp, vuc);
+ VSLO VSLO_VPU
+ vsi __builtin_vec_slo (vsi, vsc);
+ VSLO VSLO_VSIS
+ vsi __builtin_vec_slo (vsi, vuc);
+ VSLO VSLO_VSIU
+ vui __builtin_vec_slo (vui, vsc);
+ VSLO VSLO_VUIS
+ vui __builtin_vec_slo (vui, vuc);
+ VSLO VSLO_VUIU
+ vsll __builtin_vec_slo (vsll, vsc);
+ VSLO VSLO_VSLLS
+ vsll __builtin_vec_slo (vsll, vuc);
+ VSLO VSLO_VSLLU
+ vull __builtin_vec_slo (vull, vsc);
+ VSLO VSLO_VULLS
+ vull __builtin_vec_slo (vull, vuc);
+ VSLO VSLO_VULLU
+ vf __builtin_vec_slo (vf, vsc);
+ VSLO VSLO_VFS
+ vf __builtin_vec_slo (vf, vuc);
+ VSLO VSLO_VFU
+
+[VEC_SLV, vec_slv, __builtin_vec_vslv, _ARCH_PWR9]
+ vuc __builtin_vec_vslv (vuc, vuc);
+ VSLV
+
+[VEC_SPLAT, vec_splat, __builtin_vec_splat]
+ vsc __builtin_vec_splat (vsc, signed int);
+ VSPLTB VSPLTB_VSC
+ vuc __builtin_vec_splat (vuc, signed int);
+ VSPLTB VSPLTB_VUC
+ vbc __builtin_vec_splat (vbc, signed int);
+ VSPLTB VSPLTB_VBC
+ vss __builtin_vec_splat (vss, signed int);
+ VSPLTH VSPLTH_VSS
+ vus __builtin_vec_splat (vus, signed int);
+ VSPLTH VSPLTH_VUS
+ vbs __builtin_vec_splat (vbs, signed int);
+ VSPLTH VSPLTH_VBS
+ vp __builtin_vec_splat (vp, signed int);
+ VSPLTH VSPLTH_VP
+ vf __builtin_vec_splat (vf, signed int);
+ VSPLTW VSPLTW_VF
+ vsi __builtin_vec_splat (vsi, signed int);
+ VSPLTW VSPLTW_VSI
+ vui __builtin_vec_splat (vui, signed int);
+ VSPLTW VSPLTW_VUI
+ vbi __builtin_vec_splat (vbi, signed int);
+ VSPLTW VSPLTW_VBI
+ vd __builtin_vec_splat (vd, signed int);
+ XXSPLTD_V2DF
+ vsll __builtin_vec_splat (vsll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VSLL
+ vull __builtin_vec_splat (vull, signed int);
+ XXSPLTD_V2DI XXSPLTD_VULL
+ vbll __builtin_vec_splat (vbll, signed int);
+ XXSPLTD_V2DI XXSPLTD_VBLL
+
+[VEC_SPLATI, vec_splati, __builtin_vec_xxspltiw, _ARCH_PWR10]
+ vsi __builtin_vec_xxspltiw (signed int);
+ VXXSPLTIW_V4SI
+ vf __builtin_vec_xxspltiw (float);
+ VXXSPLTIW_V4SF
+
+[VEC_SPLATID, vec_splatid, __builtin_vec_xxspltid, ARCH_PWR10]
+ vd __builtin_vec_xxspltid (float);
+ VXXSPLTIDP
+
+[VEC_SPLATI_INS, vec_splati_ins, __builtin_vec_xxsplti32dx, _ARCH_PWR10]
+ vsi __builtin_vec_xxsplti32dx (vsi, const int, signed int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VSI
+ vui __builtin_vec_xxsplti32dx (vui, const int, unsigned int);
+ VXXSPLTI32DX_V4SI VXXSPLTI32DX_VUI
+ vf __builtin_vec_xxsplti32dx (vf, const int, float);
+ VXXSPLTI32DX_V4SF
+
+[VEC_SQRT, vec_sqrt, __builtin_vec_sqrt, __VSX__]
+ vf __builtin_vec_sqrt (vf);
+ XVSQRTSP
+ vd __builtin_vec_sqrt (vd);
+ XVSQRTDP
+
+[VEC_SR, vec_sr, __builtin_vec_sr]
+ vsc __builtin_vec_sr (vsc, vuc);
+ VSRB VSRB_VSC
+ vuc __builtin_vec_sr (vuc, vuc);
+ VSRB VSRB_VUC
+ vss __builtin_vec_sr (vss, vus);
+ VSRH VSRH_VSS
+ vus __builtin_vec_sr (vus, vus);
+ VSRH VSRH_VUS
+ vsi __builtin_vec_sr (vsi, vui);
+ VSRW VSRW_VSI
+ vui __builtin_vec_sr (vui, vui);
+ VSRW VSRW_VUI
+ vsll __builtin_vec_sr (vsll, vull);
+ VSRD VSRD_VSLL
+ vull __builtin_vec_sr (vull, vull);
+ VSRD VSRD_VULL
+
+[VEC_SRA, vec_sra, __builtin_vec_sra]
+ vsc __builtin_vec_sra (vsc, vuc);
+ VSRAB VSRAB_VSC
+ vuc __builtin_vec_sra (vuc, vuc);
+ VSRAB VSRAB_VUC
+ vss __builtin_vec_sra (vss, vus);
+ VSRAH VSRAH_VSS
+ vus __builtin_vec_sra (vus, vus);
+ VSRAH VSRAH_VUS
+ vsi __builtin_vec_sra (vsi, vui);
+ VSRAW VSRAW_VSI
+ vui __builtin_vec_sra (vui, vui);
+ VSRAW VSRAW_VUI
+ vsll __builtin_vec_sra (vsll, vull);
+ VSRAD VSRAD_VSLL
+ vull __builtin_vec_sra (vull, vull);
+ VSRAD VSRAD_VULL
+
+[VEC_SRDB, vec_srdb, __builtin_vec_srdb, _ARCH_PWR10]
+ vsc __builtin_vec_srdb (vsc, vsc, const int);
+ VSRDB_V16QI VSRDB_VSC
+ vuc __builtin_vec_srdb (vuc, vuc, const int);
+ VSRDB_V16QI VSRDB_VUC
+ vss __builtin_vec_srdb (vss, vss, const int);
+ VSRDB_V8HI VSRDB_VSS
+ vus __builtin_vec_srdb (vus, vus, const int);
+ VSRDB_V8HI VSRDB_VUS
+ vsi __builtin_vec_srdb (vsi, vsi, const int);
+ VSRDB_V4SI VSRDB_VSI
+ vui __builtin_vec_srdb (vui, vui, const int);
+ VSRDB_V4SI VSRDB_VUI
+ vsll __builtin_vec_srdb (vsll, vsll, const int);
+ VSRDB_V2DI VSRDB_VSLL
+ vull __builtin_vec_srdb (vull, vull, const int);
+ VSRDB_V2DI VSRDB_VULL
+
+[VEC_SRL, vec_srl, __builtin_vec_srl]
+ vsc __builtin_vec_srl (vsc, vuc);
+ VSR VSR_VSC
+ vuc __builtin_vec_srl (vuc, vuc);
+ VSR VSR_VUC
+ vss __builtin_vec_srl (vss, vuc);
+ VSR VSR_VSS
+ vus __builtin_vec_srl (vus, vuc);
+ VSR VSR_VUS
+ vp __builtin_vec_srl (vp, vuc);
+ VSR VSR_VP
+ vsi __builtin_vec_srl (vsi, vuc);
+ VSR VSR_VSI
+ vui __builtin_vec_srl (vui, vuc);
+ VSR VSR_VUI
+ vsll __builtin_vec_srl (vsll, vuc);
+ VSR VSR_VSLL
+ vull __builtin_vec_srl (vull, vuc);
+ VSR VSR_VULL
+
+[VEC_SRO, vec_sro, __builtin_vec_sro]
+ vsc __builtin_vec_sro (vsc, vsc);
+ VSRO VSRO_VSCS
+ vsc __builtin_vec_sro (vsc, vuc);
+ VSRO VSRO_VSCU
+ vuc __builtin_vec_sro (vuc, vsc);
+ VSRO VSRO_VUCS
+ vuc __builtin_vec_sro (vuc, vuc);
+ VSRO VSRO_VUCU
+ vss __builtin_vec_sro (vss, vsc);
+ VSRO VSRO_VSSS
+ vss __builtin_vec_sro (vss, vuc);
+ VSRO VSRO_VSSU
+ vus __builtin_vec_sro (vus, vsc);
+ VSRO VSRO_VUSS
+ vus __builtin_vec_sro (vus, vuc);
+ VSRO VSRO_VUSU
+ vp __builtin_vec_sro (vp, vsc);
+ VSRO VSRO_VPS
+ vp __builtin_vec_sro (vp, vuc);
+ VSRO VSRO_VPU
+ vsi __builtin_vec_sro (vsi, vsc);
+ VSRO VSRO_VSIS
+ vsi __builtin_vec_sro (vsi, vuc);
+ VSRO VSRO_VSIU
+ vui __builtin_vec_sro (vui, vsc);
+ VSRO VSRO_VUIS
+ vui __builtin_vec_sro (vui, vuc);
+ VSRO VSRO_VUIU
+ vsll __builtin_vec_sro (vsll, vsc);
+ VSRO VSRO_VSLLS
+ vsll __builtin_vec_sro (vsll, vuc);
+ VSRO VSRO_VSLLU
+ vull __builtin_vec_sro (vull, vsc);
+ VSRO VSRO_VULLS
+ vull __builtin_vec_sro (vull, vuc);
+ VSRO VSRO_VULLU
+ vf __builtin_vec_sro (vf, vsc);
+ VSRO VSRO_VFS
+ vf __builtin_vec_sro (vf, vuc);
+ VSRO VSRO_VFU
+
+[VEC_SRV, vec_srv, __builtin_vec_vsrv, _ARCH_PWR9]
+ vuc __builtin_vec_vsrv (vuc, vuc);
+ VSRV
+
+[VEC_ST, vec_st, __builtin_vec_st]
+ void __builtin_vec_st (vsc, signed long long, vsc *);
+ STVX_V16QI STVX_VSC
+ void __builtin_vec_st (vsc, signed long long, signed char *);
+ STVX_V16QI STVX_SC
+ void __builtin_vec_st (vuc, signed long long, vuc *);
+ STVX_V16QI STVX_VUC
+ void __builtin_vec_st (vuc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC
+ void __builtin_vec_st (vbc, signed long long, vbc *);
+ STVX_V16QI STVX_VBC
+ void __builtin_vec_st (vbc, signed long long, signed char *);
+ STVX_V16QI STVX_SC_B
+ void __builtin_vec_st (vbc, signed long long, unsigned char *);
+ STVX_V16QI STVX_UC_B
+ void __builtin_vec_st (vss, signed long long, vss *);
+ STVX_V8HI STVX_VSS
+ void __builtin_vec_st (vss, signed long long, signed short *);
+ STVX_V8HI STVX_SS
+ void __builtin_vec_st (vus, signed long long, vus *);
+ STVX_V8HI STVX_VUS
+ void __builtin_vec_st (vus, signed long long, unsigned short *);
+ STVX_V8HI STVX_US
+ void __builtin_vec_st (vbs, signed long long, vbs *);
+ STVX_V8HI STVX_VBS
+ void __builtin_vec_st (vbs, signed long long, signed short *);
+ STVX_V8HI STVX_SS_B
+ void __builtin_vec_st (vbs, signed long long, unsigned short *);
+ STVX_V8HI STVX_US_B
+ void __builtin_vec_st (vp, signed long long, vp *);
+ STVX_V8HI STVX_P
+ void __builtin_vec_st (vsi, signed long long, vsi *);
+ STVX_V4SI STVX_VSI
+ void __builtin_vec_st (vsi, signed long long, signed int *);
+ STVX_V4SI STVX_SI
+ void __builtin_vec_st (vui, signed long long, vui *);
+ STVX_V4SI STVX_VUI
+ void __builtin_vec_st (vui, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI
+ void __builtin_vec_st (vbi, signed long long, vbi *);
+ STVX_V4SI STVX_VBI
+ void __builtin_vec_st (vbi, signed long long, signed int *);
+ STVX_V4SI STVX_SI_B
+ void __builtin_vec_st (vbi, signed long long, unsigned int *);
+ STVX_V4SI STVX_UI_B
+ void __builtin_vec_st (vsll, signed long long, vsll *);
+ STVX_V2DI STVX_VSLL
+ void __builtin_vec_st (vsll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL
+ void __builtin_vec_st (vull, signed long long, vull *);
+ STVX_V2DI STVX_VULL
+ void __builtin_vec_st (vull, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL
+ void __builtin_vec_st (vbll, signed long long, vbll *);
+ STVX_V2DI STVX_VBLL
+ void __builtin_vec_st (vbll, signed long long, signed long long *);
+ STVX_V2DI STVX_SLL_B
+ void __builtin_vec_st (vbll, signed long long, unsigned long long *);
+ STVX_V2DI STVX_ULL_B
+ void __builtin_vec_st (vf, signed long long, vf *);
+ STVX_V4SF STVX_VF
+ void __builtin_vec_st (vf, signed long long, float *);
+ STVX_V4SF STVX_F
+ void __builtin_vec_st (vd, signed long long, vd *);
+ STVX_V2DF STVX_VD
+ void __builtin_vec_st (vd, signed long long, double *);
+ STVX_V2DF STVX_D
+
+[VEC_STE, vec_ste, __builtin_vec_ste]
+ void __builtin_vec_ste (vsc, signed long long, signed char *);
+ STVEBX STVEBX_S
+ void __builtin_vec_ste (vuc, signed long long, unsigned char *);
+ STVEBX STVEBX_U
+ void __builtin_vec_ste (vbc, signed long long, signed char *);
+ STVEBX STVEBX_BS
+ void __builtin_vec_ste (vbc, signed long long, unsigned char *);
+ STVEBX STVEBX_BU
+ void __builtin_vec_ste (vss, signed long long, signed short *);
+ STVEHX STVEHX_S
+ void __builtin_vec_ste (vus, signed long long, unsigned short *);
+ STVEHX STVEHX_U
+ void __builtin_vec_ste (vbs, signed long long, signed short *);
+ STVEHX STVEHX_BS
+ void __builtin_vec_ste (vbs, signed long long, unsigned short *);
+ STVEHX STVEHX_BU
+ void __builtin_vec_ste (vp, signed long long, signed short *);
+ STVEHX STVEHX_PS
+ void __builtin_vec_ste (vp, signed long long, unsigned short *);
+ STVEHX STVEHX_PU
+ void __builtin_vec_ste (vsi, signed long long, signed int *);
+ STVEWX STVEHWX_S
+ void __builtin_vec_ste (vui, signed long long, unsigned int *);
+ STVEWX STVEWX_U
+ void __builtin_vec_ste (vbi, signed long long, signed int *);
+ STVEWX STVEWX_BS
+ void __builtin_vec_ste (vbi, signed long long, unsigned int *);
+ STVEWX STVEWX_BU
+ void __builtin_vec_ste (vf, signed long long, float *);
+ STVEWX STVEWX_F
+
+[VEC_STL, vec_stl, __builtin_vec_stl]
+ void __builtin_vec_stl (vsc, signed long long, vsc *);
+ STVXL_V16QI STVXL_VSC
+ void __builtin_vec_stl (vsc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC
+ void __builtin_vec_stl (vuc, signed long long, vuc *);
+ STVXL_V16QI STVXL_VUC
+ void __builtin_vec_stl (vuc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC
+ void __builtin_vec_stl (vbc, signed long long, vbc *);
+ STVXL_V16QI STVXL_VBC
+ void __builtin_vec_stl (vbc, signed long long, signed char *);
+ STVXL_V16QI STVXL_SC_B
+ void __builtin_vec_stl (vbc, signed long long, unsigned char *);
+ STVXL_V16QI STVXL_UC_B
+ void __builtin_vec_stl (vss, signed long long, vss *);
+ STVXL_V8HI STVXL_VSS
+ void __builtin_vec_stl (vss, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS
+ void __builtin_vec_stl (vus, signed long long, vus *);
+ STVXL_V8HI STVXL_VUS
+ void __builtin_vec_stl (vus, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US
+ void __builtin_vec_stl (vbs, signed long long, vbs *);
+ STVXL_V8HI STVXL_VBS
+ void __builtin_vec_stl (vbs, signed long long, signed short *);
+ STVXL_V8HI STVXL_SS_B
+ void __builtin_vec_stl (vbs, signed long long, unsigned short *);
+ STVXL_V8HI STVXL_US_B
+ void __builtin_vec_stl (vp, signed long long, vp *);
+ STVXL_V8HI STVXL_P
+ void __builtin_vec_stl (vsi, signed long long, vsi *);
+ STVXL_V4SI STVXL_VSI
+ void __builtin_vec_stl (vsi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI
+ void __builtin_vec_stl (vui, signed long long, vui *);
+ STVXL_V4SI STVXL_VUI
+ void __builtin_vec_stl (vui, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI
+ void __builtin_vec_stl (vbi, signed long long, vbi *);
+ STVXL_V4SI STVXL_VBI
+ void __builtin_vec_stl (vbi, signed long long, signed int *);
+ STVXL_V4SI STVXL_SI_B
+ void __builtin_vec_stl (vbi, signed long long, unsigned int *);
+ STVXL_V4SI STVXL_UI_B
+ void __builtin_vec_stl (vsll, signed long long, vsll *);
+ STVXL_V2DI STVXL_VSLL
+ void __builtin_vec_stl (vsll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL
+ void __builtin_vec_stl (vull, signed long long, vull *);
+ STVXL_V2DI STVXL_VULL
+ void __builtin_vec_stl (vull, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL
+ void __builtin_vec_stl (vbll, signed long long, vbll *);
+ STVXL_V2DI STVXL_VBLL
+ void __builtin_vec_stl (vbll, signed long long, signed long long *);
+ STVXL_V2DI STVXL_SLL_B
+ void __builtin_vec_stl (vbll, signed long long, unsigned long long *);
+ STVXL_V2DI STVXL_ULL_B
+ void __builtin_vec_stl (vf, signed long long, vf *);
+ STVXL_V4SF STVXL_VF
+ void __builtin_vec_stl (vf, signed long long, float *);
+ STVXL_V4SF STVXL_F
+ void __builtin_vec_stl (vd, signed long long, vd *);
+ STVXL_V2DF STVXL_VD
+ void __builtin_vec_stl (vd, signed long long, double *);
+ STVXL_V2DF STVXL_D
+
+[VEC_STRIL, vec_stril, __builtin_vec_stril, ARCH_PWR10]
+ vuc __builtin_vec_stril (vuc);
+ VSTRIBL VSTRIBL_U
+ vsc __builtin_vec_stril (vsc);
+ VSTRIBL VSTRIBL_S
+ vus __builtin_vec_stril (vus);
+ VSTRIHL VSTRIHL_U
+ vss __builtin_vec_stril (vss);
+ VSTRIHL VSTRIHL_S
+
+[VEC_STRIL_P, vec_stril_p, __builtin_vec_stril_p, _ARCH_PWR10]
+ signed int __builtin_vec_stril_p (vuc);
+ VSTRIBL_P VSTRIBL_PU
+ signed int __builtin_vec_stril_p (vsc);
+ VSTRIBL_P VSTRIBL_PS
+ signed int __builtin_vec_stril_p (vus);
+ VSTRIHL_P VSTRIHL_PU
+ signed int __builtin_vec_stril_p (vss);
+ VSTRIHL_P VSTRIHL_PS
+
+[VEC_STRIR, vec_strir, __builtin_vec_strir, _ARCH_PWR10]
+ vuc __builtin_vec_strir (vuc);
+ VSTRIBR VSTRIBR_U
+ vsc __builtin_vec_strir (vsc);
+ VSTRIBR VSTRIBR_S
+ vus __builtin_vec_strir (vus);
+ VSTRIHR VSTRIHR_U
+ vss __builtin_vec_strir (vss);
+ VSTRIHR VSTRIHR_S
+
+[VEC_STRIR_P, vec_strir_p, __builtin_vec_strir_p, ARCH_PWR10]
+ signed int __builtin_vec_strir_p (vuc);
+ VSTRIBR_P VSTRIBR_PU
+ signed int __builtin_vec_strir_p (vsc);
+ VSTRIBR_P VSTRIBR_PS
+ signed int __builtin_vec_strir_p (vus);
+ VSTRIHR_P VSTRIHR_PU
+ signed int __builtin_vec_strir_p (vss);
+ VSTRIHR_P VSTRIHR_PS
+
+[VEC_STVLX, vec_stvlx, __builtin_vec_stvlx, __PPU__]
+ void __builtin_vec_stvlx (vbc, signed long long, vbc *);
+ STVLX STVLX_VBC
+ void __builtin_vec_stvlx (vsc, signed long long, vsc *);
+ STVLX STVLX_VSC
+ void __builtin_vec_stvlx (vsc, signed long long, signed char *);
+ STVLX STVLX_SC
+ void __builtin_vec_stvlx (vuc, signed long long, vuc *);
+ STVLX STVLX_VUC
+ void __builtin_vec_stvlx (vuc, signed long long, unsigned char *);
+ STVLX STVLX_UC
+ void __builtin_vec_stvlx (vbs, signed long long, vbs *);
+ STVLX STVLX_VBS
+ void __builtin_vec_stvlx (vss, signed long long, vss *);
+ STVLX STVLX_VSS
+ void __builtin_vec_stvlx (vss, signed long long, signed short *);
+ STVLX STVLX_SS
+ void __builtin_vec_stvlx (vus, signed long long, vus *);
+ STVLX STVLX_VUS
+ void __builtin_vec_stvlx (vus, signed long long, unsigned short *);
+ STVLX STVLX_US
+ void __builtin_vec_stvlx (vp, signed long long, vp *);
+ STVLX STVLX_VP
+ void __builtin_vec_stvlx (vbi, signed long long, vbi *);
+ STVLX STVLX_VBI
+ void __builtin_vec_stvlx (vsi, signed long long, vsi *);
+ STVLX STVLX_VSI
+ void __builtin_vec_stvlx (vsi, signed long long, signed int *);
+ STVLX STVLX_SI
+ void __builtin_vec_stvlx (vui, signed long long, vui *);
+ STVLX STVLX_VUI
+ void __builtin_vec_stvlx (vui, signed long long, unsigned int *);
+ STVLX STVLX_UI
+ void __builtin_vec_stvlx (vf, signed long long, vf *);
+ STVLX STVLX_VF
+ void __builtin_vec_stvlx (vf, signed long long, float *);
+ STVLX STVLX_F
+
+[VEC_STVLXL, vec_stvlxl, __builtin_vec_stvlxl, __PPU__]
+ void __builtin_vec_stvlxl (vbc, signed long long, vbc *);
+ STVLXL STVLXL_VBC
+ void __builtin_vec_stvlxl (vsc, signed long long, vsc *);
+ STVLXL STVLXL_VSC
+ void __builtin_vec_stvlxl (vsc, signed long long, signed char *);
+ STVLXL STVLXL_SC
+ void __builtin_vec_stvlxl (vuc, signed long long, vuc *);
+ STVLXL STVLXL_VUC
+ void __builtin_vec_stvlxl (vuc, signed long long, unsigned char *);
+ STVLXL STVLXL_UC
+ void __builtin_vec_stvlxl (vbs, signed long long, vbs *);
+ STVLXL STVLXL_VBS
+ void __builtin_vec_stvlxl (vss, signed long long, vss *);
+ STVLXL STVLXL_VSS
+ void __builtin_vec_stvlxl (vss, signed long long, signed short *);
+ STVLXL STVLXL_SS
+ void __builtin_vec_stvlxl (vus, signed long long, vus *);
+ STVLXL STVLXL_VUS
+ void __builtin_vec_stvlxl (vus, signed long long, unsigned short *);
+ STVLXL STVLXL_US
+ void __builtin_vec_stvlxl (vp, signed long long, vp *);
+ STVLXL STVLXL_VP
+ void __builtin_vec_stvlxl (vbi, signed long long, vbi *);
+ STVLXL STVLXL_VBI
+ void __builtin_vec_stvlxl (vsi, signed long long, vsi *);
+ STVLXL STVLXL_VSI
+ void __builtin_vec_stvlxl (vsi, signed long long, signed int *);
+ STVLXL STVLXL_SI
+ void __builtin_vec_stvlxl (vui, signed long long, vui *);
+ STVLXL STVLXL_VUI
+ void __builtin_vec_stvlxl (vui, signed long long, unsigned int *);
+ STVLXL STVLXL_UI
+ void __builtin_vec_stvlxl (vf, signed long long, vf *);
+ STVLXL STVLXL_VF
+ void __builtin_vec_stvlxl (vf, signed long long, float *);
+ STVLXL STVLXL_F
+
+[VEC_STVRX, vec_stvrx, __builtin_vec_stvrx, __PPU__]
+ void __builtin_vec_stvrx (vbc, signed long long, vbc *);
+ STVRX STVRX_VBC
+ void __builtin_vec_stvrx (vsc, signed long long, vsc *);
+ STVRX STVRX_VSC
+ void __builtin_vec_stvrx (vsc, signed long long, signed char *);
+ STVRX STVRX_SC
+ void __builtin_vec_stvrx (vuc, signed long long, vuc *);
+ STVRX STVRX_VUC
+ void __builtin_vec_stvrx (vuc, signed long long, unsigned char *);
+ STVRX STVRX_UC
+ void __builtin_vec_stvrx (vbs, signed long long, vbs *);
+ STVRX STVRX_VBS
+ void __builtin_vec_stvrx (vss, signed long long, vss *);
+ STVRX STVRX_VSS
+ void __builtin_vec_stvrx (vss, signed long long, signed short *);
+ STVRX STVRX_SS
+ void __builtin_vec_stvrx (vus, signed long long, vus *);
+ STVRX STVRX_VUS
+ void __builtin_vec_stvrx (vus, signed long long, unsigned short *);
+ STVRX STVRX_US
+ void __builtin_vec_stvrx (vp, signed long long, vp *);
+ STVRX STVRX_VP
+ void __builtin_vec_stvrx (vbi, signed long long, vbi *);
+ STVRX STVRX_VBI
+ void __builtin_vec_stvrx (vsi, signed long long, vsi *);
+ STVRX STVRX_VSI
+ void __builtin_vec_stvrx (vsi, signed long long, signed int *);
+ STVRX STVRX_SI
+ void __builtin_vec_stvrx (vui, signed long long, vui *);
+ STVRX STVRX_VUI
+ void __builtin_vec_stvrx (vui, signed long long, unsigned int *);
+ STVRX STVRX_UI
+ void __builtin_vec_stvrx (vf, signed long long, vf *);
+ STVRX STVRX_VF
+ void __builtin_vec_stvrx (vf, signed long long, float *);
+ STVRX STVRX_F
+
+[VEC_STVRXL, vec_stvrxl, __builtin_vec_stvrxl, __PPU__]
+ void __builtin_vec_stvrxl (vbc, signed long long, vbc *);
+ STVRXL STVRXL_VBC
+ void __builtin_vec_stvrxl (vsc, signed long long, vsc *);
+ STVRXL STVRXL_VSC
+ void __builtin_vec_stvrxl (vsc, signed long long, signed char *);
+ STVRXL STVRXL_SC
+ void __builtin_vec_stvrxl (vuc, signed long long, vuc *);
+ STVRXL STVRXL_VUC
+ void __builtin_vec_stvrxl (vuc, signed long long, unsigned char *);
+ STVRXL STVRXL_UC
+ void __builtin_vec_stvrxl (vbs, signed long long, vbs *);
+ STVRXL STVRXL_VBS
+ void __builtin_vec_stvrxl (vss, signed long long, vss *);
+ STVRXL STVRXL_VSS
+ void __builtin_vec_stvrxl (vss, signed long long, signed short *);
+ STVRXL STVRXL_SS
+ void __builtin_vec_stvrxl (vus, signed long long, vus *);
+ STVRXL STVRXL_VUS
+ void __builtin_vec_stvrxl (vus, signed long long, unsigned short *);
+ STVRXL STVRXL_US
+ void __builtin_vec_stvrxl (vp, signed long long, vp *);
+ STVRXL STVRXL_VP
+ void __builtin_vec_stvrxl (vbi, signed long long, vbi *);
+ STVRXL STVRXL_VBI
+ void __builtin_vec_stvrxl (vsi, signed long long, vsi *);
+ STVRXL STVRXL_VSI
+ void __builtin_vec_stvrxl (vsi, signed long long, signed int *);
+ STVRXL STVRXL_SI
+ void __builtin_vec_stvrxl (vui, signed long long, vui *);
+ STVRXL STVRXL_VUI
+ void __builtin_vec_stvrxl (vui, signed long long, unsigned int *);
+ STVRXL STVRXL_UI
+ void __builtin_vec_stvrxl (vf, signed long long, vf *);
+ STVRXL STVRXL_VF
+ void __builtin_vec_stvrxl (vf, signed long long, float *);
+ STVRXL STVRXL_F
+
+[VEC_STXVL, vec_xst_len, __builtin_vec_stxvl, _ARCH_PPC64_PWR9]
+ void __builtin_vec_stxvl (vsc, signed char *, unsigned long long);
+ STXVL STXVL_VSC
+ void __builtin_vec_stxvl (vuc, unsigned char *, unsigned long long);
+ STXVL STXVL_VUC
+ void __builtin_vec_stxvl (vss, signed short *, unsigned long long);
+ STXVL STXVL_VSS
+ void __builtin_vec_stxvl (vus, unsigned short *, unsigned long long);
+ STXVL STXVL_VUS
+ void __builtin_vec_stxvl (vsi, signed int *, unsigned long long);
+ STXVL STXVL_VSI
+ void __builtin_vec_stxvl (vui, unsigned int *, unsigned long long);
+ STXVL STXVL_VUI
+ void __builtin_vec_stxvl (vsll, signed long long *, unsigned long long);
+ STXVL STXVL_VSLL
+ void __builtin_vec_stxvl (vull, unsigned long long *, unsigned long long);
+ STXVL STXVL_VULL
+ void __builtin_vec_stxvl (vsq, signed __int128 *, unsigned long long);
+ STXVL STXVL_VSQ
+ void __builtin_vec_stxvl (vuq, unsigned __int128 *, unsigned long long);
+ STXVL STXVL_VUQ
+ void __builtin_vec_stxvl (vf, float *, unsigned long long);
+ STXVL STXVL_VF
+ void __builtin_vec_stxvl (vd, double *, unsigned long long);
+ STXVL STXVL_VD
+
+[VEC_SUB, vec_sub, __builtin_vec_sub]
+ vsc __builtin_vec_sub (vsc, vsc);
+ VSUBUBM VSUBUBM_VSC
+ vuc __builtin_vec_sub (vuc, vuc);
+ VSUBUBM VSUBUBM_VUC
+ vss __builtin_vec_sub (vss, vss);
+ VSUBUHM VSUBUHM_VSS
+ vus __builtin_vec_sub (vus, vus);
+ VSUBUHM VSUBUHM_VUS
+ vsi __builtin_vec_sub (vsi, vsi);
+ VSUBUWM VSUBUWM_VSI
+ vui __builtin_vec_sub (vui, vui);
+ VSUBUWM VSUBUWM_VUI
+ vsll __builtin_vec_sub (vsll, vsll);
+ VSUBUDM VSUBUDM_VSLL
+ vull __builtin_vec_sub (vull, vull);
+ VSUBUDM VSUBUDM_VULL
+ vsq __builtin_vec_sub (vsq, vsq);
+ VSUBUQM VSUBUQM_VSQ
+ vuq __builtin_vec_sub (vuq, vuq);
+ VSUBUQM VSUBUQM_VUQ
+ vf __builtin_vec_sub (vf, vf);
+ XVSUBSP
+ vd __builtin_vec_sub (vd, vd);
+ XVSUBDP
+
+[VEC_SUBC, vec_subc, __builtin_vec_subc]
+ vsi __builtin_vec_subc (vsi, vsi);
+ VSUBCUW VSUBCUW_VSI
+ vui __builtin_vec_subc (vui, vui);
+ VSUBCUW VSUBCUW_VUI
+ vsq __builtin_vec_subc (vsq, vsq);
+ VSUBCUQ VSUBCUQ_VSQ
+ vuq __builtin_vec_subc (vuq, vuq);
+ VSUBCUQ VSUBCUQ_VUQ
+
+[VEC_SUBS, vec_subs, __builtin_vec_subs]
+ vuc __builtin_vec_subs (vuc, vuc);
+ VSUBUBS
+ vsc __builtin_vec_subs (vsc, vsc);
+ VSUBSBS
+ vus __builtin_vec_subs (vus, vus);
+ VSUBUHS
+ vss __builtin_vec_subs (vss, vss);
+ VSUBSHS
+ vui __builtin_vec_subs (vui, vui);
+ VSUBUWS
+ vsi __builtin_vec_subs (vsi, vsi);
+ VSUBSWS
+
+[VEC_SUM2S, vec_sum2s, __builtin_vec_sum2s]
+ vsi __builtin_vec_sum2s (vsi, vsi);
+ VSUM2SWS
+
+[VEC_SUM4S, vec_sum4s, __builtin_vec_sum4s]
+ vui __builtin_vec_sum4s (vuc, vui);
+ VSUM4UBS
+ vsi __builtin_vec_sum4s (vsc, vui);
+ VSUM4SBS
+ vsi __builtin_vec_sum4s (vss, vsi);
+ VSUM4SHS
+
+[VEC_SUMS, vec_sums, __builtin_vec_sums]
+ vsi __builtin_vec_sums (vsi, vsi);
+ VSUMSWS
+
+[VEC_TERNARYLOGIC, vec_ternarylogic, __builtin_vec_xxeval, _ARCH_PWR10]
+ vuc __builtin_vec_xxeval (vuc, vuc, vuc, const int);
+ XXEVAL XXEVAL_VUC
+ vus __builtin_vec_xxeval (vus, vus, vus, const int);
+ XXEVAL XXEVAL_VUS
+ vui __builtin_vec_xxeval (vui, vui, vui, const int);
+ XXEVAL XXEVAL_VUI
+ vull __builtin_vec_xxeval (vull, vull, vull, const int);
+ XXEVAL XXEVAL_VULL
+ vuq __builtin_vec_xxeval (vuq, vuq, vuq, const int);
+ XXEVAL XXEVAL_VUQ
+
+[VEC_TEST_LSBB_ALL_ONES, vec_test_lsbb_all_ones, __builtin_vec_xvtlsbb_all_ones, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_ones (vuc);
+ XVTLSBB_ONES
+
+[VEC_TEST_LSBB_ALL_ZEROS, vec_test_lsbb_all_zeros, __builtin_vec_xvtlsbb_all_zeros, _ARCH_PWR9]
+ signed int __builtin_vec_xvtlsbb_all_zeros (vuc);
+ XVTLSBB_ZEROS
+
+[VEC_TRUNC, vec_trunc, __builtin_vec_trunc]
+ vf __builtin_vec_trunc (vf);
+ XVRSPIZ
+ vd __builtin_vec_trunc (vd);
+ XVRDPIZ
+
+[VEC_TSTSFI_GT, SKIP, __builtin_dfp_dtstsfi_gt]
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal64);
+ TSTSFI_GT_DD
+ signed int __builtin_dfp_dtstsfi_gt (unsigned int, _Decimal128);
+ TSTSFI_GT_TD
+
+[VEC_TSTSFI_EQ, SKIP, __builtin_dfp_dtstsfi_eq]
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal64);
+ TSTSFI_EQ_DD
+ signed int __builtin_dfp_dtstsfi_eq (unsigned int, _Decimal128);
+ TSTSFI_EQ_TD
+
+[VEC_TSTSFI_LT, SKIP, __builtin_dfp_dtstsfi_lt]
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal64);
+ TSTSFI_LT_DD
+ signed int __builtin_dfp_dtstsfi_lt (unsigned int, _Decimal128);
+ TSTSFI_LT_TD
+
+[VEC_TSTSFI_OV, SKIP, __builtin_dfp_dtstsfi_ov]
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal64);
+ TSTSFI_OV_DD
+ signed int __builtin_dfp_dtstsfi_ov (unsigned int, _Decimal128);
+ TSTSFI_OV_TD
+
+[VEC_UNPACKH, vec_unpackh, __builtin_vec_unpackh]
+ vss __builtin_vec_unpackh (vsc);
+ VUPKHSB VUPKHSB_VSC
+ vbs __builtin_vec_unpackh (vbc);
+ VUPKHSB VUPKHSB_VBC
+ vsi __builtin_vec_unpackh (vss);
+ VUPKHSH VUPKHSH_VSS
+ vbi __builtin_vec_unpackh (vbs);
+ VUPKHSH VUPKHSH_VBS
+ vui __builtin_vec_unpackh (vp);
+ VUPKHPX
+ vsll __builtin_vec_unpackh (vsi);
+ VUPKHSW VUPKHSW_VSI
+ vbll __builtin_vec_unpackh (vbi);
+ VUPKHSW VUPKHSW_VBI
+ vd __builtin_vec_unpackh (vf);
+ DOUBLEH_V4SF VUPKHF
+
+[VEC_UNPACKL, vec_unpackl, __builtin_vec_unpackl]
+ vss __builtin_vec_unpackl (vsc);
+ VUPKLSB VUPKLSB_VSC
+ vbs __builtin_vec_unpackl (vbc);
+ VUPKLSB VUPKLSB_VBC
+ vsi __builtin_vec_unpackl (vss);
+ VUPKLSH VUPKLSH_VSS
+ vbi __builtin_vec_unpackl (vbs);
+ VUPKLSH VUPKLSH_VBS
+ vui __builtin_vec_unpackl (vp);
+ VUPKLPX
+ vsll __builtin_vec_unpackl (vsi);
+ VUPKLSW VUPKLSW_VSI
+ vbll __builtin_vec_unpackl (vbi);
+ VUPKLSW VUPKLSW_VBI
+ vd __builtin_vec_unpackl (vf);
+ DOUBLEL_V4SF VUPKLF
+
+[VEC_UNSIGNED, vec_unsigned, __builtin_vec_vunsigned]
+ vsi __builtin_vec_vunsigned (vf);
+ VEC_VUNSIGNED_V4SF
+ vsll __builtin_vec_vunsigned (vd);
+ VEC_VUNSIGNED_V2DF
+
+[VEC_UNSIGNED2, vec_unsigned2, __builtin_vec_vunsigned2]
+ vsi __builtin_vec_vunsigned2 (vd, vd);
+ VEC_VUNSIGNED2_V2DF
+
+[VEC_UNSIGNEDO, vec_unsignedo, __builtin_vec_vunsignedo]
+ vui __builtin_vec_vunsignedo (vd);
+ VEC_VUNSIGNEDO_V2DF
+
+; Not sure this should exist, but it does. This group is redundant with
+; vec_addec, but the next three don't have an alias.
+[VEC_VADDECUQ, vec_vaddecuq, __builtin_vec_vaddecuq]
+ vsq __builtin_vec_vaddecuq (vsq, vsq, vsq);
+ VADDECUQ VADDECUQ_VSQ2
+ vuq __builtin_vec_vaddecuq (vuq, vuq, vuq);
+ VADDECUQ VADDECUQ_VUQ2
+
+; Not sure this should exist, but it does.
+[VEC_VADDEUQM, vec_vaddeuqm, __builtin_vec_vaddeuqm]
+ vsq __builtin_vec_vaddeuqm (vsq, vsq, vsq);
+ VADDEUQM VADDEUQM_VSQ
+ vuq __builtin_vec_vaddeuqm (vuq, vuq, vuq);
+ VADDEUQM VADDEUQM_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBECUQ, vec_vsubecuq, __builtin_vec_vsubecuq]
+ vsq __builtin_vec_vsubecuq (vsq, vsq, vsq);
+ VSUBECUQ VSUBECUQ_VSQ
+ vuq __builtin_vec_vsubecuq (vuq, vuq, vuq);
+ VSUBECUQ VSUBECUQ_VUQ
+
+; Not sure this should exist, but it does.
+[VEC_VSUBEUQM, vec_vsubeuqm, __builtin_vec_vsubeuqm]
+ vsq __builtin_vec_vsubeuqm (vsq, vsq, vsq);
+ VSUBEUQM VSUBEUQM_VSQ
+ vuq __builtin_vec_vsubeuqm (vuq, vuq, vuq);
+ VSUBEUQM VSUBEUQM_VUQ
+
+[VEC_VEE, vec_extract_exp, __builtin_vec_extract_exp, _ARCH_PWR9]
+ vui __builtin_vec_extract_exp (vf);
+ VEESP
+ vull __builtin_vec_extract_exp (vd);
+ VEEDP
+
+[VEC_VES, vec_extract_sig, __builtin_vec_extract_sig, _ARCH_PWR9]
+ vui __builtin_vec_extract_sig (vf);
+ VESSP
+ vull __builtin_vec_extract_sig (vd);
+ VESDP
+
+[VEC_VIE, vec_insert_exp, __builtin_vec_insert_exp, ARCH_PWR9]
+ vf __builtin_vec_insert_exp (vf, vui);
+ VIESP VIESP_VF
+ vf __builtin_vec_insert_exp (vui, vui);
+ VIESP VIESP_VUI
+ vd __builtin_vec_insert_exp (vd, vull);
+ VIEDP VIEDP_VD
+ vd __builtin_vec_insert_exp (vull, vull);
+ VIEDP VIEDP_VULL
+
+[VEC_VSCEEQ, scalar_cmp_exp_eq, __builtin_vec_scalar_cmp_exp_eq, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_eq (double, double);
+ VSCEDPEQ
+ signed int __builtin_vec_scalar_cmp_exp_eq (_Float128, _Float128);
+ VSCEQPEQ
+
+[VEC_VSCEGT, scalar_cmp_exp_gt, __builtin_vec_scalar_cmp_exp_gt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_gt (double, double);
+ VSCEDPGT
+ signed int __builtin_vec_scalar_cmp_exp_gt (_Float128, _Float128);
+ VSCEQPGT
+
+[VEC_VSCELT, scalar_cmp_exp_lt, __builtin_vec_scalar_cmp_exp_lt, _ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_lt (double, double);
+ VSCEDPLT
+ signed int __builtin_vec_scalar_cmp_exp_lt (_Float128, _Float128);
+ VSCEQPLT
+
+[VEC_VSCEUO, scalar_cmp_exp_unordered, __builtin_vec_scalar_cmp_exp_unordered, ARCH_PWR9]
+ signed int __builtin_vec_scalar_cmp_exp_unordered (double, double);
+ VSCEDPUO
+ signed int __builtin_vec_scalar_cmp_exp_unordered (_Float128, _Float128);
+ VSCEQPUO
+
+[VEC_VSEE, scalar_extract_exp, __builtin_vec_scalar_extract_exp, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_exp (double);
+ VSEEDP
+ unsigned int __builtin_vec_scalar_extract_exp (_Float128);
+ VSEEQP
+
+[VEC_VSES, scalar_extract_sig, __builtin_vec_scalar_extract_sig, _ARCH_PWR9]
+ unsigned int __builtin_vec_scalar_extract_sig (double);
+ VSESDP
+ unsigned int __builtin_vec_scalar_extract_sig (_Float128);
+ VSESQP
+
+[VEC_VSIE, scalar_insert_exp, __builtin_vec_scalar_insert_exp, _ARCH_PWR9]
+ double __builtin_vec_scalar_insert_exp (unsigned int, unsigned int);
+ VSIEDP VSIEDP_UI
+ double __builtin_vec_scalar_insert_exp (double, unsigned int);
+ VSIEDP VSIEDP_D
+ _Float128 __builtin_vec_scalar_insert_exp (unsigned long long, unsigned long long);
+ VSIEQP VSIEQP_ULL
+ _Float128 __builtin_vec_scalar_insert_exp (_Float128, unsigned long long);
+ VSIEQP VSIEQP_F128
+
+[VEC_VSTDC, scalar_test_data_class, __builtin_vec_scalar_test_data_class, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_data_class (float, signed int);
+ VSTDCSP
+ bool __builtin_vec_scalar_test_data_class (double, signed int);
+ VSTDCDP
+ bool __builtin_vec_scalar_test_data_class (_Float128, signed int);
+ VSTDCQP
+
+[VEC_VSTDCN, scalar_test_neg, __builtin_vec_scalar_test_neg, _ARCH_PWR9]
+ bool __builtin_vec_scalar_test_neg (float);
+ VSTDCNSP
+ bool __builtin_vec_scalar_test_neg (double);
+ VSTDCNDP
+ bool __builtin_vec_scalar_test_neg (_Float128);
+ VSTDCNQP
+
+[VEC_VTDC, vec_test_data_class, __builtin_vec_test_data_class, _ARCH_PWR9]
+ vbi __builtin_vec_test_data_class (vf, signed int);
+ VTDCSP
+ vbll __builtin_vec_test_data_class (vd, signed int);
+ VTDCDP
+
+[VEC_XL, vec_xl, __builtin_vec_vsx_ld, __VSX__]
+ vsc __builtin_vec_vsx_ld (signed long long, vsc *);
+ LXVW4X_V4SI LXVW4X_VSC
+ vsc __builtin_vec_vsx_ld (signed long long, signed char *);
+ LXVW4X_V4SI LXVW4X_SC
+ vuc __builtin_vec_vsx_ld (signed long long, vuc *);
+ LXVW4X_V4SI LXVW4X_VUC
+ vuc __builtin_vec_vsx_ld (signed long long, unsigned char *);
+ LXVW4X_V4SI LXVW4X_UC
+ vss __builtin_vec_vsx_ld (signed long long, vss *);
+ LXVW4X_V4SI LXVW4X_VSS
+ vss __builtin_vec_vsx_ld (signed long long, signed short *);
+ LXVW4X_V4SI LXVW4X_SS
+ vus __builtin_vec_vsx_ld (signed long long, vus *);
+ LXVW4X_V4SI LXVW4X_VUS
+ vus __builtin_vec_vsx_ld (signed long long, unsigned short *);
+ LXVW4X_V4SI LXVW4X_US
+ vsi __builtin_vec_vsx_ld (signed long long, vsi *);
+ LXVW4X_V4SI LXVW4X_VSI
+ vsi __builtin_vec_vsx_ld (signed long long, signed int *);
+ LXVW4X_V4SI LXVW4X_SI
+ vui __builtin_vec_vsx_ld (signed long long, vui *);
+ LXVW4X_V4SI LXVW4X_VUI
+ vui __builtin_vec_vsx_ld (signed long long, unsigned int *);
+ LXVW4X_V4SI LXVW4X_UI
+ vsll __builtin_vec_vsx_ld (signed long long, vsll *);
+ LXVD2X_V2DI LXVD2X_VSLL
+ vsll __builtin_vec_vsx_ld (signed long long, signed long long *);
+ LXVD2X_V2DI LXVD2X_SLL
+ vull __builtin_vec_vsx_ld (signed long long, vull *);
+ LXVD2X_V2DI LXVD2X_VULL
+ vull __builtin_vec_vsx_ld (signed long long, unsigned long long *);
+ LXVD2X_V2DI LXVD2X_ULL
+ vsq __builtin_vec_vsx_ld (signed long long, vsq *);
+ LXVD2X_V1TI LXVD2X_VSQ
+ vsq __builtin_vec_vsx_ld (signed long long, signed __int128 *);
+ LXVD2X_V1TI LXVD2X_SQ
+ vuq __builtin_vec_vsx_ld (signed long long, unsigned __int128 *);
+ LXVD2X_V1TI LXVD2X_UQ
+ vf __builtin_vec_vsx_ld (signed long long, vf *);
+ LXVW4X_V4SF LXVW4X_VF
+ vf __builtin_vec_vsx_ld (signed long long, float *);
+ LXVW4X_V4SF LXVW4X_F
+ vd __builtin_vec_vsx_ld (signed long long, vd *);
+ LXVD2X_V2DF LXVD2X_VD
+ vd __builtin_vec_vsx_ld (signed long long, double *);
+ LXVD2X_V2DF LXVD2X_D
+
+[VEC_XL_BE, vec_xl_be, __builtin_vec_xl_be, __VSX__]
+ vsc __builtin_vec_xl_be (signed long long, vsc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VSC
+ vsc __builtin_vec_xl_be (signed long long, signed char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_SC
+ vuc __builtin_vec_xl_be (signed long long, vuc *);
+ LD_ELEMREV_V16QI LD_ELEMREV_VUC
+ vuc __builtin_vec_xl_be (signed long long, unsigned char *);
+ LD_ELEMREV_V16QI LD_ELEMREV_UC
+ vss __builtin_vec_xl_be (signed long long, vss *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VSS
+ vss __builtin_vec_xl_be (signed long long, signed short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_SS
+ vus __builtin_vec_xl_be (signed long long, vus *);
+ LD_ELEMREV_V8HI LD_ELEMREV_VUS
+ vus __builtin_vec_xl_be (signed long long, unsigned short *);
+ LD_ELEMREV_V8HI LD_ELEMREV_US
+ vsi __builtin_vec_xl_be (signed long long, vsi *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VSI
+ vsi __builtin_vec_xl_be (signed long long, signed int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_SI
+ vui __builtin_vec_xl_be (signed long long, vui *);
+ LD_ELEMREV_V4SI LD_ELEMREV_VUI
+ vui __builtin_vec_xl_be (signed long long, unsigned int *);
+ LD_ELEMREV_V4SI LD_ELEMREV_UI
+ vsll __builtin_vec_xl_be (signed long long, vsll *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VSLL
+ vsll __builtin_vec_xl_be (signed long long, signed long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_SLL
+ vull __builtin_vec_xl_be (signed long long, vull *);
+ LD_ELEMREV_V2DI LD_ELEMREV_VULL
+ vull __builtin_vec_xl_be (signed long long, unsigned long long *);
+ LD_ELEMREV_V2DI LD_ELEMREV_ULL
+ vsq __builtin_vec_xl_be (signed long long, signed __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_SQ
+ vuq __builtin_vec_xl_be (signed long long, unsigned __int128 *);
+ LD_ELEMREV_V1TI LD_ELEMREV_UQ
+ vf __builtin_vec_xl_be (signed long long, vf *);
+ LD_ELEMREV_V4SF LD_ELEMREV_VF
+ vf __builtin_vec_xl_be (signed long long, float *);
+ LD_ELEMREV_V4SF LD_ELEMREV_F
+ vd __builtin_vec_xl_be (signed long long, vd *);
+ LD_ELEMREV_V2DF LD_ELEMREV_VD
+ vd __builtin_vec_xl_be (signed long long, double *);
+ LD_ELEMREV_V2DF LD_ELEMREV_DD
+
+[VEC_XL_LEN_R, vec_xl_len_r, __builtin_vec_xl_len_r, _ARCH_PPC64_PWR9]
+ vuc __builtin_vsx_xl_len_r (unsigned char *, unsigned long long);
+ XL_LEN_R
+
+[VEC_XL_SEXT, vec_xl_sext, __builtin_vec_xl_sext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_sext (signed long long, signed char *);
+ SE_LXVRBX
+ vsq __builtin_vec_xl_sext (signed long long, signed short *);
+ SE_LXVRHX
+ vsq __builtin_vec_xl_sext (signed long long, signed int *);
+ SE_LXVRWX
+ vsq __builtin_vec_xl_sext (signed long long, signed long long *);
+ SE_LXVRDX
+
+[VEC_XL_ZEXT, vec_xl_zext, __builtin_vec_xl_zext, _ARCH_PWR10]
+ vsq __builtin_vec_xl_zext (signed long long, signed char *);
+ ZE_LXVRBX
+ vsq __builtin_vec_xl_zext (signed long long, signed short *);
+ ZE_LXVRHX
+ vsq __builtin_vec_xl_zext (signed long long, signed int *);
+ ZE_LXVRWX
+ vsq __builtin_vec_xl_zext (signed long long, signed long long *);
+ ZE_LXVRDX
+
+[VEC_XOR, vec_xor, __builtin_vec_xor]
+ vsc __builtin_vec_xor (vsc, vsc);
+ VXOR_V16QI
+ vuc __builtin_vec_xor (vuc, vuc);
+ VXOR_V16QI_UNS VXOR_VUC
+ vbc __builtin_vec_xor (vbc, vbc);
+ VXOR_V16QI_UNS VXOR_VBC
+ vss __builtin_vec_xor (vss, vss);
+ VXOR_V8HI
+ vus __builtin_vec_xor (vus, vus);
+ VXOR_V8HI_UNS VXOR_VUS
+ vbs __builtin_vec_xor (vbs, vbs);
+ VXOR_V8HI_UNS VXOR_VBS
+ vsi __builtin_vec_xor (vsi, vsi);
+ VXOR_V4SI
+ vui __builtin_vec_xor (vui, vui);
+ VXOR_V4SI_UNS VXOR_VUI
+ vbi __builtin_vec_xor (vbi, vbi);
+ VXOR_V4SI_UNS VXOR_VBI
+ vsll __builtin_vec_xor (vsll, vsll);
+ VXOR_V2DI
+ vull __builtin_vec_xor (vull, vull);
+ VXOR_V2DI_UNS VXOR_VULL
+ vbll __builtin_vec_xor (vbll, vbll);
+ VXOR_V2DI_UNS VXOR_VBLL
+ vf __builtin_vec_xor (vf, vf);
+ VXOR_V4SF
+ vd __builtin_vec_xor (vd, vd);
+ VXOR_V2DF
+
+[VEC_XST, vec_xst, __builtin_vec_vsx_st, __VSX__]
+ void __builtin_vec_vsx_st (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC
+ void __builtin_vec_vsx_st (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC
+ void __builtin_vec_vsx_st (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC
+ void __builtin_vec_vsx_st (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC
+ void __builtin_vec_vsx_st (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC
+ void __builtin_vec_vsx_st (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S
+ void __builtin_vec_vsx_st (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U
+ void __builtin_vec_vsx_st (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS
+ void __builtin_vec_vsx_st (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS
+ void __builtin_vec_vsx_st (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS
+ void __builtin_vec_vsx_st (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US
+ void __builtin_vec_vsx_st (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS
+ void __builtin_vec_vsx_st (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S
+ void __builtin_vec_vsx_st (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U
+ void __builtin_vec_vsx_st (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP
+ void __builtin_vec_vsx_st (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI
+ void __builtin_vec_vsx_st (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI
+ void __builtin_vec_vsx_st (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI
+ void __builtin_vec_vsx_st (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI
+ void __builtin_vec_vsx_st (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI
+ void __builtin_vec_vsx_st (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S
+ void __builtin_vec_vsx_st (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U
+ void __builtin_vec_vsx_st (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL
+ void __builtin_vec_vsx_st (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL
+ void __builtin_vec_vsx_st (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL
+ void __builtin_vec_vsx_st (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL
+ void __builtin_vec_vsx_st (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL
+ void __builtin_vec_vsx_st (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF
+ void __builtin_vec_vsx_st (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F
+ void __builtin_vec_vsx_st (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD
+ void __builtin_vec_vsx_st (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D
+
+[VEC_XST_BE, vec_xst_be, __builtin_vec_xst_be, __VSX__]
+ void __builtin_vec_xst_be (vsc, signed long long, vsc *);
+ STXVW4X_V16QI STXVW4X_VSC_BE
+ void __builtin_vec_xst_be (vsc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_SC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, vuc *);
+ STXVW4X_V16QI STXVW4X_VUC_BE
+ void __builtin_vec_xst_be (vuc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_UC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, vbc *);
+ STXVW4X_V16QI STXVW4X_VBC_BE
+ void __builtin_vec_xst_be (vbc, signed long long, signed char *);
+ STXVW4X_V16QI STXVW4X_VBC_S_BE
+ void __builtin_vec_xst_be (vbc, signed long long, unsigned char *);
+ STXVW4X_V16QI STXVW4X_VBC_U_BE
+ void __builtin_vec_xst_be (vss, signed long long, vss *);
+ STXVW4X_V8HI STXVW4X_VSS_BE
+ void __builtin_vec_xst_be (vss, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_SS_BE
+ void __builtin_vec_xst_be (vus, signed long long, vus *);
+ STXVW4X_V8HI STXVW4X_VUS_BE
+ void __builtin_vec_xst_be (vus, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_US_BE
+ void __builtin_vec_xst_be (vbs, signed long long, vbs *);
+ STXVW4X_V8HI STXVW4X_VBS_BE
+ void __builtin_vec_xst_be (vbs, signed long long, signed short *);
+ STXVW4X_V8HI STXVW4X_VBS_S_BE
+ void __builtin_vec_xst_be (vbs, signed long long, unsigned short *);
+ STXVW4X_V8HI STXVW4X_VBS_U_BE
+ void __builtin_vec_xst_be (vp, signed long long, vp *);
+ STXVW4X_V8HI STXVW4X_VP_BE
+ void __builtin_vec_xst_be (vsi, signed long long, vsi *);
+ STXVW4X_V4SI STXVW4X_VSI_BE
+ void __builtin_vec_xst_be (vsi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_SI_BE
+ void __builtin_vec_xst_be (vui, signed long long, vui *);
+ STXVW4X_V4SI STXVW4X_VUI_BE
+ void __builtin_vec_xst_be (vui, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_UI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, vbi *);
+ STXVW4X_V4SI STXVW4X_VBI_BE
+ void __builtin_vec_xst_be (vbi, signed long long, signed int *);
+ STXVW4X_V4SI STXVW4X_VBI_S_BE
+ void __builtin_vec_xst_be (vbi, signed long long, unsigned int *);
+ STXVW4X_V4SI STXVW4X_VBI_U_BE
+ void __builtin_vec_xst_be (vsll, signed long long, vsll *);
+ STXVD2X_V2DI STXVD2X_VSLL_BE
+ void __builtin_vec_xst_be (vsll, signed long long, signed long long *);
+ STXVD2X_V2DI STXVD2X_SLL_BE
+ void __builtin_vec_xst_be (vull, signed long long, vull *);
+ STXVD2X_V2DI STXVD2X_VULL_BE
+ void __builtin_vec_xst_be (vull, signed long long, unsigned long long *);
+ STXVD2X_V2DI STXVD2X_ULL_BE
+ void __builtin_vec_xst_be (vbll, signed long long, vbll *);
+ STXVD2X_V2DI STXVD2X_VBLL_BE
+ void __builtin_vec_xst_be (vf, signed long long, vf *);
+ STXVW4X_V4SF STXVW4X_VF_BE
+ void __builtin_vec_xst_be (vf, signed long long, float *);
+ STXVW4X_V4SF STXVW4X_F_BE
+ void __builtin_vec_xst_be (vd, signed long long, vd *);
+ STXVD2X_V2DF STXVD2X_VD_BE
+ void __builtin_vec_xst_be (vd, signed long long, double *);
+ STXVD2X_V2DF STXVD2X_D_BE
+
+[VEC_XST_LEN_R, vec_xst_len_r, __builtin_vec_xst_len_r, _ARCH_PPC64_PWR9]
+ void __builtin_vsx_xst_len_r (vuc, unsigned char *, unsigned long long);
+ XST_LEN_R
+
+[VEC_XST_TRUNC, vec_xst_trunc, __builtin_vec_xst_trunc, _ARCH_PWR10]
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed char *);
+ TR_STXVRBX TR_STXVRBX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned char *);
+ TR_STXVRBX TR_STXVRBX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed short *);
+ TR_STXVRHX TR_STXVRHX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned short *);
+ TR_STXVRHX TR_STXVRHX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed int *);
+ TR_STXVRWX TR_STXVRWX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned int *);
+ TR_STXVRWX TR_STXVRWX_U
+ void __builtin_vec_xst_trunc (vsq, signed long long, signed long long *);
+ TR_STXVRDX TR_STXVRDX_S
+ void __builtin_vec_xst_trunc (vuq, signed long long, unsigned long long *);
+ TR_STXVRDX TR_STXVRDX_U
+
+[VEC_XXPERMDI, vec_xxpermdi, __builtin_vsx_xxpermdi, __VSX__]
+ vsc __builtin_vsx_xxpermdi (vsc, vsc, const int);
+ XXPERMDI_16QI XXPERMDI_VSC
+ vuc __builtin_vsx_xxpermdi (vuc, vuc, const int);
+ XXPERMDI_16QI XXPERMDI_VUC
+ vss __builtin_vsx_xxpermdi (vss, vss, const int);
+ XXPERMDI_8HI XXPERMDI_VSS
+ vus __builtin_vsx_xxpermdi (vus, vus, const int);
+ XXPERMDI_8HI XXPERMDI_VUS
+ vsi __builtin_vsx_xxpermdi (vsi, vsi, const int);
+ XXPERMDI_4SI XXPERMDI_VSI
+ vui __builtin_vsx_xxpermdi (vui, vui, const int);
+ XXPERMDI_4SI XXPERMDI_VUI
+ vsll __builtin_vsx_xxpermdi (vsll, vsll, const int);
+ XXPERMDI_2DI XXPERMDI_VSLL
+ vull __builtin_vsx_xxpermdi (vull, vull, const int);
+ XXPERMDI_2DI XXPERMDI_VULL
+ vf __builtin_vsx_xxpermdi (vf, vf, const int);
+ XXPERMDI_4SF XXPERMDI_VF
+ vd __builtin_vsx_xxpermdi (vd, vd, const int);
+ XXPERMDI_2DF XXPERMDI_VD
+
+[VEC_XXSLDWI, vec_xxsldwi, __builtin_vsx_xxsldwi, __VSX__]
+ vsc __builtin_vsx_xxsldwi (vsc, vsc, const int);
+ XXSLDWI_16QI XXSLDWI_VSC2
+ vuc __builtin_vsx_xxsldwi (vuc, vuc, const int);
+ XXSLDWI_16QI XXSLDWI_VUC2
+ vss __builtin_vsx_xxsldwi (vss, vss, const int);
+ XXSLDWI_8HI XXSLDWI_VSS2
+ vus __builtin_vsx_xxsldwi (vus, vus, const int);
+ XXSLDWI_8HI XXSLDWI_VUS2
+ vsi __builtin_vsx_xxsldwi (vsi, vsi, const int);
+ XXSLDWI_4SI XXSLDWI_VSI2
+ vui __builtin_vsx_xxsldwi (vui, vui, const int);
+ XXSLDWI_4SI XXSLDWI_VUI2
+ vsll __builtin_vsx_xxsldwi (vsll, vsll, const int);
+ XXSLDWI_2DI XXSLDWI_VSLL2
+ vull __builtin_vsx_xxsldwi (vull, vull, const int);
+ XXSLDWI_2DI XXSLDWI_VULL2
+ vf __builtin_vsx_xxsldwi (vf, vf, const int);
+ XXSLDWI_4SF XXSLDWI_VF2
+ vd __builtin_vsx_xxsldwi (vd, vd, const int);
+ XXSLDWI_2DF XXSLDWI_VD2
^ permalink raw reply [flat|nested] 4+ messages in thread
end of thread, other threads:[~2021-02-07 18:13 UTC | newest]
Thread overview: 4+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2020-11-24 16:44 [gcc(refs/users/wschmidt/heads/builtins4)] rs6000: Add remaining overloads William Schmidt
-- strict thread matches above, loose matches on Subject: below --
2021-02-07 18:13 William Schmidt
2020-12-16 18:07 William Schmidt
2020-11-02 22:08 William Schmidt
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).