From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: by sourceware.org (Postfix, from userid 1725) id 9E66F383983E; Fri, 25 Jun 2021 16:17:46 +0000 (GMT) DKIM-Filter: OpenDKIM Filter v2.11.0 sourceware.org 9E66F383983E Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: William Schmidt To: gcc-cvs@gcc.gnu.org Subject: [gcc(refs/users/wschmidt/heads/builtins10)] rs6000: Add remaining overloads X-Act-Checkin: gcc X-Git-Author: Bill Schmidt X-Git-Refname: refs/users/wschmidt/heads/builtins10 X-Git-Oldrev: 38e894d88c39f1639c544abb94a8fbf56fb57e81 X-Git-Newrev: 158aa93bb9b685e4c6c66a0244a627a34eea63ba Message-Id: <20210625161746.9E66F383983E@sourceware.org> Date: Fri, 25 Jun 2021 16:17:46 +0000 (GMT) X-BeenThere: gcc-cvs@gcc.gnu.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: Gcc-cvs mailing list List-Unsubscribe: , List-Archive: List-Help: List-Subscribe: , X-List-Received-Date: Fri, 25 Jun 2021 16:17:46 -0000 https://gcc.gnu.org/g:158aa93bb9b685e4c6c66a0244a627a34eea63ba commit 158aa93bb9b685e4c6c66a0244a627a34eea63ba Author: Bill Schmidt Date: Tue Jun 15 09:23:05 2021 -0500 rs6000: Add remaining overloads 2021-06-15 Bill Schmidt gcc/ * config/rs6000/rs6000-overload.def: Add remaining overloads. Diff: --- gcc/config/rs6000/rs6000-overload.def | 6104 +++++++++++++++++++++++++++++++++ 1 file changed, 6104 insertions(+) diff --git a/gcc/config/rs6000/rs6000-overload.def b/gcc/config/rs6000/rs6000-overload.def index d8028c94470..d3f054bec39 100644 --- a/gcc/config/rs6000/rs6000-overload.def +++ b/gcc/config/rs6000/rs6000-overload.def @@ -75,8 +75,6112 @@ ; a semicolon are also treated as blank lines. +[BCDADD, __builtin_bcdadd, __builtin_vec_bcdadd] + vsq __builtin_vec_bcdadd (vsq, vsq, const int); + BCDADD_V1TI + vuc __builtin_vec_bcdadd (vuc, vuc, const int); + BCDADD_V16QI + +[BCDADD_EQ, __builtin_bcdadd_eq, __builtin_vec_bcdadd_eq] + signed int __builtin_vec_bcdadd_eq (vsq, vsq, const int); + BCDADD_EQ_V1TI + signed int __builtin_vec_bcdadd_eq (vuc, vuc, const int); + BCDADD_EQ_V16QI + +[BCDADD_GT, __builtin_bcdadd_gt, __builtin_vec_bcdadd_gt] + signed int __builtin_vec_bcdadd_gt (vsq, vsq, const int); + BCDADD_GT_V1TI + signed int __builtin_vec_bcdadd_gt (vuc, vuc, const int); + BCDADD_GT_V16QI + +[BCDADD_LT, __builtin_bcdadd_lt, __builtin_vec_bcdadd_lt] + signed int __builtin_vec_bcdadd_lt (vsq, vsq, const int); + BCDADD_LT_V1TI + signed int __builtin_vec_bcdadd_lt (vuc, vuc, const int); + BCDADD_LT_V16QI + +[BCDADD_OV, __builtin_bcdadd_ov, __builtin_vec_bcdadd_ov] + signed int __builtin_vec_bcdadd_ov (vsq, vsq, const int); + BCDADD_OV_V1TI + signed int __builtin_vec_bcdadd_ov (vuc, vuc, const int); + BCDADD_OV_V16QI + +[BCDDIV10, __builtin_bcddiv10, __builtin_vec_bcddiv10] + vuc __builtin_vec_bcddiv10 (vuc); + BCDDIV10_V16QI + +[BCDINVALID, __builtin_bcdinvalid, __builtin_vec_bcdinvalid] + signed int __builtin_vec_bcdinvalid (vsq); + BCDINVALID_V1TI + signed int __builtin_vec_bcdinvalid (vuc); + BCDINVALID_V16QI + +[BCDMUL10, __builtin_bcdmul10, __builtin_vec_bcdmul10] + vuc __builtin_vec_bcdmul10 (vuc); + BCDMUL10_V16QI + +[BCDSUB, __builtin_bcdsub, __builtin_vec_bcdsub] + vsq __builtin_vec_bcdsub (vsq, vsq, const int); + BCDSUB_V1TI + vuc __builtin_vec_bcdsub (vuc, vuc, const int); + BCDSUB_V16QI + +[BCDSUB_EQ, __builtin_bcdsub_eq, __builtin_vec_bcdsub_eq] + signed int __builtin_vec_bcdsub_eq (vsq, vsq, const int); + BCDSUB_EQ_V1TI + signed int __builtin_vec_bcdsub_eq (vuc, vuc, const int); + BCDSUB_EQ_V16QI + +[BCDSUB_GE, __builtin_bcdsub_ge, __builtin_vec_bcdsub_ge] + signed int __builtin_vec_bcdsub_ge (vsq, vsq, const int); + BCDSUB_GE_V1TI + signed int __builtin_vec_bcdsub_ge (vuc, vuc, const int); + BCDSUB_GE_V16QI + +[BCDSUB_GT, __builtin_bcdsub_gt, __builtin_vec_bcdsub_gt] + signed int __builtin_vec_bcdsub_gt (vsq, vsq, const int); + BCDSUB_GT_V1TI + signed int __builtin_vec_bcdsub_gt (vuc, vuc, const int); + BCDSUB_GT_V16QI + +[BCDSUB_LE, __builtin_bcdsub_le, __builtin_vec_bcdsub_le] + signed int __builtin_vec_bcdsub_le (vsq, vsq, const int); + BCDSUB_LE_V1TI + signed int __builtin_vec_bcdsub_le (vuc, vuc, const int); + BCDSUB_LE_V16QI + +[BCDSUB_LT, __builtin_bcdsub_lt, __builtin_vec_bcdsub_lt] + signed int __builtin_vec_bcdsub_lt (vsq, vsq, const int); + BCDSUB_LT_V1TI + signed int __builtin_vec_bcdsub_lt (vuc, vuc, const int); + BCDSUB_LT_V16QI + +[BCDSUB_OV, __builtin_bcdsub_ov, __builtin_vec_bcdsub_ov] + signed int __builtin_vec_bcdsub_ov (vsq, vsq, const int); + BCDSUB_OV_V1TI + signed int __builtin_vec_bcdsub_ov (vuc, vuc, const int); + BCDSUB_OV_V16QI + +[BCD2DFP, __builtin_bcd2dfp, __builtin_vec_denb2dfp] + _Decimal128 __builtin_vec_denb2dfp (vuc); + DENB2DFP_V16QI + +[CRYPTO_PERMXOR, SKIP, __builtin_crypto_vpermxor] + vuc __builtin_crypto_vpermxor (vuc, vuc, vuc); + VPERMXOR_V16QI + vus __builtin_crypto_vpermxor (vus, vus, vus); + VPERMXOR_V8HI + vui __builtin_crypto_vpermxor (vui, vui, vui); + VPERMXOR_V4SI + vull __builtin_crypto_vpermxor (vull, vull, vull); + VPERMXOR_V2DI + +[CRYPTO_PMSUM, SKIP, __builtin_crypto_vpmsum] + vuc __builtin_crypto_vpmsum (vuc, vuc); + VPMSUMB VPMSUMB_C + vus __builtin_crypto_vpmsum (vus, vus); + VPMSUMH VPMSUMH_C + vui __builtin_crypto_vpmsum (vui, vui); + VPMSUMW VPMSUMW_C + vull __builtin_crypto_vpmsum (vull, vull); + VPMSUMD VPMSUMD_C + +[SCAL_CMPB, SKIP, __builtin_cmpb] + unsigned int __builtin_cmpb (unsigned int, unsigned int); + CMPB_32 + unsigned long long __builtin_cmpb (unsigned long long, unsigned long long); + CMPB + [VEC_ABS, vec_abs, __builtin_vec_abs] vsc __builtin_vec_abs (vsc); ABS_V16QI vss __builtin_vec_abs (vss); ABS_V8HI + vsi __builtin_vec_abs (vsi); + ABS_V4SI + vsll __builtin_vec_abs (vsll); + ABS_V2DI + vf __builtin_vec_abs (vf); + ABS_V4SF + vd __builtin_vec_abs (vd); + XVABSDP + +[VEC_ABSD, vec_absd, __builtin_vec_vadu, _ARCH_PWR9] + vuc __builtin_vec_vadu (vuc, vuc); + VADUB + vus __builtin_vec_vadu (vus, vus); + VADUH + vui __builtin_vec_vadu (vui, vui); + VADUW + +[VEC_ABSS, vec_abss, __builtin_vec_abss] + vsc __builtin_vec_abss (vsc); + ABSS_V16QI + vss __builtin_vec_abss (vss); + ABSS_V8HI + vsi __builtin_vec_abss (vsi); + ABSS_V4SI + +; XVADDSP{TARGET_VSX};VADDFP +[VEC_ADD, vec_add, __builtin_vec_add] + vsc __builtin_vec_add (vsc, vsc); + VADDUBM VADDUBM_VSC + vuc __builtin_vec_add (vuc, vuc); + VADDUBM VADDUBM_VUC + vss __builtin_vec_add (vss, vss); + VADDUHM VADDUHM_VSS + vus __builtin_vec_add (vus, vus); + VADDUHM VADDUHM_VUS + vsi __builtin_vec_add (vsi, vsi); + VADDUWM VADDUWM_VSI + vui __builtin_vec_add (vui, vui); + VADDUWM VADDUWM_VUI + vsll __builtin_vec_add (vsll, vsll); + VADDUDM VADDUDM_VSLL + vull __builtin_vec_add (vull, vull); + VADDUDM VADDUDM_VULL + vsq __builtin_vec_add (vsq, vsq); + VADDUQM VADDUQM_VSQ + vuq __builtin_vec_add (vuq, vuq); + VADDUQM VADDUQM_VUQ + vf __builtin_vec_add (vf, vf); + VADDFP + vd __builtin_vec_add (vd, vd); + XVADDDP +; The following variants are deprecated. + vsc __builtin_vec_add (vbc, vsc); + VADDUBM VADDUBM_VBC_VSC + vsc __builtin_vec_add (vsc, vbc); + VADDUBM VADDUBM_VSC_VBC + vuc __builtin_vec_add (vbc, vuc); + VADDUBM VADDUBM_VBC_VUC + vuc __builtin_vec_add (vuc, vbc); + VADDUBM VADDUBM_VUC_VBC + vss __builtin_vec_add (vbs, vss); + VADDUHM VADDUHM_VBS_VSS + vss __builtin_vec_add (vss, vbs); + VADDUHM VADDUHM_VSS_VBS + vus __builtin_vec_add (vbs, vus); + VADDUHM VADDUHM_VBS_VUS + vus __builtin_vec_add (vus, vbs); + VADDUHM VADDUHM_VUS_VBS + vsi __builtin_vec_add (vbi, vsi); + VADDUWM VADDUWM_VBI_VSI + vsi __builtin_vec_add (vsi, vbi); + VADDUWM VADDUWM_VSI_VBI + vui __builtin_vec_add (vbi, vui); + VADDUWM VADDUWM_VBI_VUI + vui __builtin_vec_add (vui, vbi); + VADDUWM VADDUWM_VUI_VBI + vsll __builtin_vec_add (vbll, vsll); + VADDUDM VADDUDM_VBLL_VSLL + vsll __builtin_vec_add (vsll, vbll); + VADDUDM VADDUDM_VSLL_VBLL + vull __builtin_vec_add (vbll, vull); + VADDUDM VADDUDM_VBLL_VULL + vull __builtin_vec_add (vull, vbll); + VADDUDM VADDUDM_VULL_VBLL + +[VEC_ADDC, vec_addc, __builtin_vec_addc] + vsi __builtin_vec_addc (vsi, vsi); + VADDCUW VADDCUW_VSI + vui __builtin_vec_addc (vui, vui); + VADDCUW VADDCUW_VUI + vsq __builtin_vec_addc (vsq, vsq); + VADDCUQ VADDCUQ_VSQ + vuq __builtin_vec_addc (vuq, vuq); + VADDCUQ VADDCUQ_VUQ + +; TODO: Note that the entry for VEC_ADDE currently gets ignored in +; altivec_resolve_overloaded_builtin. Revisit whether we can remove +; that. We still need to register the legal builtin forms here. +[VEC_ADDE, vec_adde, __builtin_vec_adde] + vsq __builtin_vec_adde (vsq, vsq, vsq); + VADDEUQM VADDEUQM_VSQ + vuq __builtin_vec_adde (vuq, vuq, vuq); + VADDEUQM VADDEUQM_VUQ + +; TODO: Note that the entry for VEC_ADDEC currently gets ignored in +; altivec_resolve_overloaded_builtin. Revisit whether we can remove +; that. We still need to register the legal builtin forms here. +[VEC_ADDEC, vec_addec, __builtin_vec_addec] + vsq __builtin_vec_addec (vsq, vsq, vsq); + VADDECUQ VADDECUQ_VSQ + vuq __builtin_vec_addec (vuq, vuq, vuq); + VADDECUQ VADDECUQ_VUQ + +[VEC_ADDS, vec_adds, __builtin_vec_adds] + vuc __builtin_vec_adds (vuc, vuc); + VADDUBS + vsc __builtin_vec_adds (vsc, vsc); + VADDSBS + vus __builtin_vec_adds (vus, vus); + VADDUHS + vss __builtin_vec_adds (vss, vss); + VADDSHS + vui __builtin_vec_adds (vui, vui); + VADDUWS + vsi __builtin_vec_adds (vsi, vsi); + VADDSWS +; The following variants are deprecated. + vuc __builtin_vec_adds (vbc, vuc); + VADDUBS VADDUBS_BU + vuc __builtin_vec_adds (vuc, vbc); + VADDUBS VADDUBS_UB + vsc __builtin_vec_adds (vbc, vsc); + VADDSBS VADDSBS_BS + vsc __builtin_vec_adds (vsc, vbc); + VADDSBS VADDSBS_SB + vus __builtin_vec_adds (vbs, vus); + VADDUHS VADDUHS_BU + vus __builtin_vec_adds (vus, vbs); + VADDUHS VADDUHS_UB + vss __builtin_vec_adds (vbs, vss); + VADDSHS VADDSHS_BS + vss __builtin_vec_adds (vss, vbs); + VADDSHS VADDSHS_SB + vui __builtin_vec_adds (vbi, vui); + VADDUWS VADDUWS_BU + vui __builtin_vec_adds (vui, vbi); + VADDUWS VADDUWS_UB + vsi __builtin_vec_adds (vbi, vsi); + VADDSWS VADDSWS_BS + vsi __builtin_vec_adds (vsi, vbi); + VADDSWS VADDSWS_SB + +[VEC_AND, vec_and, __builtin_vec_and] + vsc __builtin_vec_and (vsc, vsc); + VAND_V16QI + vuc __builtin_vec_and (vuc, vuc); + VAND_V16QI_UNS VAND_VUC + vbc __builtin_vec_and (vbc, vbc); + VAND_V16QI_UNS VAND_VBC + vss __builtin_vec_and (vss, vss); + VAND_V8HI + vus __builtin_vec_and (vus, vus); + VAND_V8HI_UNS VAND_VUS + vbs __builtin_vec_and (vbs, vbs); + VAND_V8HI_UNS VAND_VBS + vsi __builtin_vec_and (vsi, vsi); + VAND_V4SI + vui __builtin_vec_and (vui, vui); + VAND_V4SI_UNS VAND_VUI + vbi __builtin_vec_and (vbi, vbi); + VAND_V4SI_UNS VAND_VBI + vsll __builtin_vec_and (vsll, vsll); + VAND_V2DI + vull __builtin_vec_and (vull, vull); + VAND_V2DI_UNS VAND_VULL + vbll __builtin_vec_and (vbll, vbll); + VAND_V2DI_UNS VAND_VBLL + vf __builtin_vec_and (vf, vf); + VAND_V4SF + vd __builtin_vec_and (vd, vd); + VAND_V2DF +; The following variants are deprecated. + vsc __builtin_vec_and (vsc, vbc); + VAND_V16QI VAND_VSC_VBC + vsc __builtin_vec_and (vbc, vsc); + VAND_V16QI VAND_VBC_VSC + vuc __builtin_vec_and (vuc, vbc); + VAND_V16QI_UNS VAND_VUC_VBC + vuc __builtin_vec_and (vbc, vuc); + VAND_V16QI_UNS VAND_VBC_VUC + vss __builtin_vec_and (vss, vbs); + VAND_V8HI VAND_VSS_VBS + vss __builtin_vec_and (vbs, vss); + VAND_V8HI VAND_VBS_VSS + vus __builtin_vec_and (vus, vbs); + VAND_V8HI_UNS VAND_VUS_VBS + vus __builtin_vec_and (vbs, vus); + VAND_V8HI_UNS VAND_VBS_VUS + vsi __builtin_vec_and (vsi, vbi); + VAND_V4SI VAND_VSI_VBI + vsi __builtin_vec_and (vbi, vsi); + VAND_V4SI VAND_VBI_VSI + vui __builtin_vec_and (vui, vbi); + VAND_V4SI_UNS VAND_VUI_VBI + vui __builtin_vec_and (vbi, vui); + VAND_V4SI_UNS VAND_VBI_VUI + vsll __builtin_vec_and (vsll, vbll); + VAND_V2DI VAND_VSLL_VBLL + vsll __builtin_vec_and (vbll, vsll); + VAND_V2DI VAND_VBLL_VSLL + vull __builtin_vec_and (vull, vbll); + VAND_V2DI_UNS VAND_VULL_VBLL + vull __builtin_vec_and (vbll, vull); + VAND_V2DI_UNS VAND_VBLL_VULL + vf __builtin_vec_and (vf, vbi); + VAND_V4SF VAND_VF_VBI + vf __builtin_vec_and (vbi, vf); + VAND_V4SF VAND_VBI_VF + vd __builtin_vec_and (vd, vbll); + VAND_V2DF VAND_VD_VBLL + vd __builtin_vec_and (vbll, vd); + VAND_V2DF VAND_VBLL_VD + +[VEC_ANDC, vec_andc, __builtin_vec_andc] + vbc __builtin_vec_andc (vbc, vbc); + VANDC_V16QI_UNS VANDC_VBC + vsc __builtin_vec_andc (vsc, vsc); + VANDC_V16QI + vuc __builtin_vec_andc (vuc, vuc); + VANDC_V16QI_UNS VANDC_VUC + vbs __builtin_vec_andc (vbs, vbs); + VANDC_V8HI_UNS VANDC_VBS + vss __builtin_vec_andc (vss, vss); + VANDC_V8HI + vus __builtin_vec_andc (vus, vus); + VANDC_V8HI_UNS VANDC_VUS + vbi __builtin_vec_andc (vbi, vbi); + VANDC_V4SI_UNS VANDC_VBI + vsi __builtin_vec_andc (vsi, vsi); + VANDC_V4SI + vui __builtin_vec_andc (vui, vui); + VANDC_V4SI_UNS VANDC_VUI + vbll __builtin_vec_andc (vbll, vbll); + VANDC_V2DI_UNS VANDC_VBLL + vsll __builtin_vec_andc (vsll, vsll); + VANDC_V2DI + vull __builtin_vec_andc (vull, vull); + VANDC_V2DI_UNS VANDC_VULL + vf __builtin_vec_andc (vf, vf); + VANDC_V4SF + vd __builtin_vec_andc (vd, vd); + VANDC_V2DF +; The following variants are deprecated. + vsc __builtin_vec_andc (vsc, vbc); + VANDC_V16QI VANDC_VSC_VBC + vsc __builtin_vec_andc (vbc, vsc); + VANDC_V16QI VANDC_VBC_VSC + vuc __builtin_vec_andc (vuc, vbc); + VANDC_V16QI_UNS VANDC_VUC_VBC + vuc __builtin_vec_andc (vbc, vuc); + VANDC_V16QI_UNS VANDC_VBC_VUC + vss __builtin_vec_andc (vss, vbs); + VANDC_V8HI VANDC_VSS_VBS + vss __builtin_vec_andc (vbs, vss); + VANDC_V8HI VANDC_VBS_VSS + vus __builtin_vec_andc (vus, vbs); + VANDC_V8HI_UNS VANDC_VUS_VBS + vus __builtin_vec_andc (vbs, vus); + VANDC_V8HI_UNS VANDC_VBS_VUS + vsi __builtin_vec_andc (vsi, vbi); + VANDC_V4SI VANDC_VSI_VBI + vsi __builtin_vec_andc (vbi, vsi); + VANDC_V4SI VANDC_VBI_VSI + vui __builtin_vec_andc (vui, vbi); + VANDC_V4SI_UNS VANDC_VUI_VBI + vui __builtin_vec_andc (vbi, vui); + VANDC_V4SI_UNS VANDC_VBI_VUI + vsll __builtin_vec_andc (vsll, vbll); + VANDC_V2DI VANDC_VSLL_VBLL + vsll __builtin_vec_andc (vbll, vsll); + VANDC_V2DI VANDC_VBLL_VSLL + vull __builtin_vec_andc (vull, vbll); + VANDC_V2DI_UNS VANDC_VULL_VBLL + vull __builtin_vec_andc (vbll, vull); + VANDC_V2DI_UNS VANDC_VBLL_VULL + vf __builtin_vec_andc (vf, vbi); + VANDC_V4SF VANDC_VF_VBI + vf __builtin_vec_andc (vbi, vf); + VANDC_V4SF VANDC_VBI_VF + vd __builtin_vec_andc (vd, vbll); + VANDC_V2DF VANDC_VD_VBLL + vd __builtin_vec_andc (vbll, vd); + VANDC_V2DF VANDC_VBLL_VD + +[VEC_AVG, vec_avg, __builtin_vec_avg] + vsc __builtin_vec_avg (vsc, vsc); + VAVGSB + vuc __builtin_vec_avg (vuc, vuc); + VAVGUB + vss __builtin_vec_avg (vss, vss); + VAVGSH + vus __builtin_vec_avg (vus, vus); + VAVGUH + vsi __builtin_vec_avg (vsi, vsi); + VAVGSW + vui __builtin_vec_avg (vui, vui); + VAVGUW + +[VEC_BLENDV, vec_blendv, __builtin_vec_xxblend, _ARCH_PWR10] + vsc __builtin_vec_xxblend (vsc, vsc, vuc); + VXXBLEND_V16QI VXXBLEND_VSC + vuc __builtin_vec_xxblend (vuc, vuc, vuc); + VXXBLEND_V16QI VXXBLEND_VUC + vss __builtin_vec_xxblend (vss, vss, vus); + VXXBLEND_V8HI VXXBLEND_VSS + vus __builtin_vec_xxblend (vus, vus, vus); + VXXBLEND_V8HI VXXBLEND_VUS + vsi __builtin_vec_xxblend (vsi, vsi, vui); + VXXBLEND_V4SI VXXBLEND_VSI + vui __builtin_vec_xxblend (vui, vui, vui); + VXXBLEND_V4SI VXXBLEND_VUI + vsll __builtin_vec_xxblend (vsll, vsll, vull); + VXXBLEND_V2DI VXXBLEND_VSLL + vull __builtin_vec_xxblend (vull, vull, vull); + VXXBLEND_V2DI VXXBLEND_VULL + vf __builtin_vec_xxblend (vf, vf, vui); + VXXBLEND_V4SF + vd __builtin_vec_xxblend (vd, vd, vull); + VXXBLEND_V2DF + +[VEC_BPERM, vec_bperm, __builtin_vec_vbperm_api, _ARCH_PWR8] + vull __builtin_vec_vbperm_api (vull, vuc); + VBPERMD VBPERMD_VULL + vull __builtin_vec_vbperm_api (vuq, vuc); + VBPERMQ VBPERMQ_VUQ + vuc __builtin_vec_vbperm_api (vuc, vuc); + VBPERMQ2 VBPERMQ2_U + vsc __builtin_vec_vbperm_api (vsc, vsc); + VBPERMQ2 VBPERMQ2_S + +; #### XVRSPIP{TARGET_VSX};VRFIP +[VEC_CEIL, vec_ceil, __builtin_vec_ceil] + vf __builtin_vec_ceil (vf); + VRFIP + vd __builtin_vec_ceil (vd); + XVRDPIP + +[VEC_CFUGE, vec_cfuge, __builtin_vec_cfuge, _ARCH_PWR10] + vull __builtin_vec_cfuge (vull, vull); + VCFUGED + +[VEC_CIPHER_BE, vec_cipher_be, __builtin_vec_vcipher_be, _ARCH_PWR8] + vuc __builtin_vec_vcipher_be (vuc, vuc); + VCIPHER_BE + +[VEC_CIPHERLAST_BE, vec_cipherlast_be, __builtin_vec_vcipherlast_be, _ARCH_PWR8] + vuc __builtin_vec_vcipherlast_be (vuc, vuc); + VCIPHERLAST_BE + +[VEC_CLRL, vec_clrl, __builtin_vec_clrl, _ARCH_PWR10] + vsc __builtin_vec_clrl (vsc, unsigned int); + VCLRLB VCLRLB_S + vuc __builtin_vec_clrl (vuc, unsigned int); + VCLRLB VCLRLB_U + +[VEC_CLRR, vec_clrr, __builtin_vec_clrr, _ARCH_PWR10] + vsc __builtin_vec_clrr (vsc, unsigned int); + VCLRRB VCLRRB_S + vuc __builtin_vec_clrr (vuc, unsigned int); + VCLRRB VCLRRB_U + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +[VEC_CMPAE_P, SKIP, __builtin_vec_vcmpae_p] + signed int __builtin_vec_vcmpae_p (vsc, vsc); + VCMPAEB_P VCMPAEB_VSC_P + signed int __builtin_vec_vcmpae_p (vuc, vuc); + VCMPAEB_P VCMPAEB_VUC_P + signed int __builtin_vec_vcmpae_p (vbc, vbc); + VCMPAEB_P VCMPAEB_VBC_P + signed int __builtin_vec_vcmpae_p (vss, vss); + VCMPAEH_P VCMPAEH_VSS_P + signed int __builtin_vec_vcmpae_p (vus, vus); + VCMPAEH_P VCMPAEH_VUS_P + signed int __builtin_vec_vcmpae_p (vbs, vbs); + VCMPAEH_P VCMPAEH_VBS_P + signed int __builtin_vec_vcmpae_p (vp, vp); + VCMPAEH_P VCMPAEH_VP_P + signed int __builtin_vec_vcmpae_p (vsi, vsi); + VCMPAEW_P VCMPAEW_VSI_P + signed int __builtin_vec_vcmpae_p (vui, vui); + VCMPAEW_P VCMPAEW_VUI_P + signed int __builtin_vec_vcmpae_p (vbi, vbi); + VCMPAEW_P VCMPAEW_VBI_P + signed int __builtin_vec_vcmpae_p (vsll, vsll); + VCMPAED_P VCMPAED_VSLL_P + signed int __builtin_vec_vcmpae_p (vull, vull); + VCMPAED_P VCMPAED_VULL_P + signed int __builtin_vec_vcmpae_p (vbll, vbll); + VCMPAED_P VCMPAED_VBLL_P + signed int __builtin_vec_vcmpae_p (vsq, vsq); + VCMPAET_P VCMPAET_VSQ_P + signed int __builtin_vec_vcmpae_p (vuq, vuq); + VCMPAET_P VCMPAET_VUQ_P + signed int __builtin_vec_vcmpae_p (vf, vf); + VCMPAEFP_P + signed int __builtin_vec_vcmpae_p (vd, vd); + VCMPAEDP_P +; The following variants are deprecated. + signed int __builtin_vec_vcmpae_p (signed int, vbc, vuc); + VCMPAEB_P VCMPAEB_P_BU + signed int __builtin_vec_vcmpae_p (signed int, vuc, vbc); + VCMPAEB_P VCMPAEB_P_UB + signed int __builtin_vec_vcmpae_p (signed int, vbc, vsc); + VCMPAEB_P VCMPAEB_P_BS + signed int __builtin_vec_vcmpae_p (signed int, vsc, vbc); + VCMPAEB_P VCMPAEB_P_SB + signed int __builtin_vec_vcmpae_p (signed int, vbs, vus); + VCMPAEH_P VCMPAEH_P_BU + signed int __builtin_vec_vcmpae_p (signed int, vus, vbs); + VCMPAEH_P VCMPAEH_P_UB + signed int __builtin_vec_vcmpae_p (signed int, vbs, vss); + VCMPAEH_P VCMPAEH_P_BS + signed int __builtin_vec_vcmpae_p (signed int, vss, vbs); + VCMPAEH_P VCMPAEH_P_SB + signed int __builtin_vec_vcmpae_p (signed int, vbi, vui); + VCMPAEW_P VCMPAEW_P_BU + signed int __builtin_vec_vcmpae_p (signed int, vui, vbi); + VCMPAEW_P VCMPAEW_P_UB + signed int __builtin_vec_vcmpae_p (signed int, vbi, vsi); + VCMPAEW_P VCMPAEW_P_BS + signed int __builtin_vec_vcmpae_p (signed int, vsi, vbi); + VCMPAEW_P VCMPAEW_P_SB + signed int __builtin_vec_vcmpae_p (signed int, vbll, vull); + VCMPAED_P VCMPAED_P_BU + signed int __builtin_vec_vcmpae_p (signed int, vull, vbll); + VCMPAED_P VCMPAED_P_UB + signed int __builtin_vec_vcmpae_p (signed int, vbll, vsll); + VCMPAED_P VCMPAED_P_BS + signed int __builtin_vec_vcmpae_p (signed int, vbll, vsll); + VCMPAED_P VCMPAED_P_SB + +[VEC_CMPB, vec_cmpb, __builtin_vec_cmpb] + vsi __builtin_vec_cmpb (vf, vf); + VCMPBFP + +[VEC_CMPEQ, vec_cmpeq, __builtin_vec_cmpeq] +; #### XVCMPEQSP{TARGET_VSX};VCMPEQFP + vbc __builtin_vec_cmpeq (vsc, vsc); + VCMPEQUB VCMPEQUB_VSC + vbc __builtin_vec_cmpeq (vuc, vuc); + VCMPEQUB VCMPEQUB_VUC + vbc __builtin_vec_cmpeq (vbc, vbc); + VCMPEQUB VCMPEQUB_VBC + vbs __builtin_vec_cmpeq (vss, vss); + VCMPEQUH VCMPEQUH_VSS + vbs __builtin_vec_cmpeq (vus, vus); + VCMPEQUH VCMPEQUH_VUS + vbs __builtin_vec_cmpeq (vbs, vbs); + VCMPEQUH VCMPEQUH_VBS + vbi __builtin_vec_cmpeq (vsi, vsi); + VCMPEQUW VCMPEQUW_VSI + vbi __builtin_vec_cmpeq (vui, vui); + VCMPEQUW VCMPEQUW_VUI + vbi __builtin_vec_cmpeq (vbi, vbi); + VCMPEQUW VCMPEQUW_VBI + vbll __builtin_vec_cmpeq (vsll, vsll); + VCMPEQUD VCMPEQUD_VSLL + vbll __builtin_vec_cmpeq (vull, vull); + VCMPEQUD VCMPEQUD_VULL + vbll __builtin_vec_cmpeq (vbll, vbll); + VCMPEQUD VCMPEQUD_VBLL + vbq __builtin_vec_cmpeq (vsq, vsq); + VCMPEQUT VCMPEQUT_VSQ + vbq __builtin_vec_cmpeq (vuq, vuq); + VCMPEQUT VCMPEQUT_VUQ + vbi __builtin_vec_cmpeq (vf, vf); + VCMPEQFP + vbll __builtin_vec_cmpeq (vd, vd); + XVCMPEQDP + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +; #### XVCMPEQSP_P{TARGET_VSX};VCMPEQFP_P +[VEC_CMPEQ_P, SKIP, __builtin_vec_vcmpeq_p] + signed int __builtin_vec_vcmpeq_p (signed int, vuc, vuc); + VCMPEQUB_P VCMPEQUB_PU + signed int __builtin_vec_vcmpeq_p (signed int, vsc, vsc); + VCMPEQUB_P VCMPEQUB_PS + signed int __builtin_vec_vcmpeq_p (signed int, vbc, vbc); + VCMPEQUB_P VCMPEQUB_PB + signed int __builtin_vec_vcmpeq_p (signed int, vus, vus); + VCMPEQUH_P VCMPEQUH_PU + signed int __builtin_vec_vcmpeq_p (signed int, vss, vss); + VCMPEQUH_P VCMPEQUH_PS + signed int __builtin_vec_vcmpeq_p (signed int, vbs, vbs); + VCMPEQUH_P VCMPEQUH_PB + signed int __builtin_vec_vcmpeq_p (signed int, vp, vp); + VCMPEQUH_P VCMPEQUH_PP + signed int __builtin_vec_vcmpeq_p (signed int, vui, vui); + VCMPEQUW_P VCMPEQUW_PU + signed int __builtin_vec_vcmpeq_p (signed int, vsi, vsi); + VCMPEQUW_P VCMPEQUW_PS + signed int __builtin_vec_vcmpeq_p (signed int, vbi, vbi); + VCMPEQUW_P VCMPEQUW_PB + signed int __builtin_vec_vcmpeq_p (signed int, vull, vull); + VCMPEQUD_P VCMPEQUD_PU + signed int __builtin_vec_vcmpeq_p (signed int, vsll, vsll); + VCMPEQUD_P VCMPEQUD_PS + signed int __builtin_vec_vcmpeq_p (signed int, vbll, vbll); + VCMPEQUD_P VCMPEQUD_PB + signed int __builtin_vec_vcmpeq_p (signed int, vsq, vsq); + VCMPEQUT_P VCMPEQUT_P_VSQ + signed int __builtin_vec_vcmpeq_p (signed int, vuq, vuq); + VCMPEQUT_P VCMPEQUT_P_VUQ + signed int __builtin_vec_vcmpeq_p (signed int, vf, vf); + VCMPEQFP_P + signed int __builtin_vec_vcmpeq_p (signed int, vd, vd); + XVCMPEQDP_P +; The following variants are deprecated. + signed int __builtin_vec_vcmpeq_p (signed int, vbc, vuc); + VCMPEQUB_P VCMPEQUB_P_BU + signed int __builtin_vec_vcmpeq_p (signed int, vuc, vbc); + VCMPEQUB_P VCMPEQUB_P_UB + signed int __builtin_vec_vcmpeq_p (signed int, vbc, vsc); + VCMPEQUB_P VCMPEQUB_P_BS + signed int __builtin_vec_vcmpeq_p (signed int, vsc, vbc); + VCMPEQUB_P VCMPEQUB_P_SB + signed int __builtin_vec_vcmpeq_p (signed int, vbs, vus); + VCMPEQUH_P VCMPEQUH_P_BU + signed int __builtin_vec_vcmpeq_p (signed int, vus, vbs); + VCMPEQUH_P VCMPEQUH_P_UB + signed int __builtin_vec_vcmpeq_p (signed int, vbs, vss); + VCMPEQUH_P VCMPEQUH_P_BS + signed int __builtin_vec_vcmpeq_p (signed int, vss, vbs); + VCMPEQUH_P VCMPEQUH_P_SB + signed int __builtin_vec_vcmpeq_p (signed int, vbi, vui); + VCMPEQUW_P VCMPEQUW_P_BU + signed int __builtin_vec_vcmpeq_p (signed int, vui, vbi); + VCMPEQUW_P VCMPEQUW_P_UB + signed int __builtin_vec_vcmpeq_p (signed int, vbi, vsi); + VCMPEQUW_P VCMPEQUW_P_BS + signed int __builtin_vec_vcmpeq_p (signed int, vsi, vbi); + VCMPEQUW_P VCMPEQUW_P_SB + signed int __builtin_vec_vcmpeq_p (signed int, vbll, vull); + VCMPEQUD_P VCMPEQUD_P_BU + signed int __builtin_vec_vcmpeq_p (signed int, vull, vbll); + VCMPEQUD_P VCMPEQUD_P_UB + signed int __builtin_vec_vcmpeq_p (signed int, vbll, vsll); + VCMPEQUD_P VCMPEQUD_P_BS + signed int __builtin_vec_vcmpeq_p (signed int, vbll, vsll); + VCMPEQUD_P VCMPEQUD_P_SB + +[VEC_CMPEQB, SKIP, __builtin_byte_in_set] + signed int __builtin_byte_in_set (unsigned int, unsigned long long); + CMPEQB + +; #### XVCMPGESP{TARGET_VSX};VCMPGEFP +[VEC_CMPGE, vec_cmpge, __builtin_vec_cmpge] + vbc __builtin_vec_cmpge (vsc, vsc); + CMPGE_16QI CMPGE_16QI_VSC + vbc __builtin_vec_cmpge (vuc, vuc); + CMPGE_U16QI CMPGE_16QI_VUC + vbs __builtin_vec_cmpge (vss, vss); + CMPGE_8HI CMPGE_8HI_VSS + vbs __builtin_vec_cmpge (vus, vus); + CMPGE_U8HI CMPGE_8HI_VUS + vbi __builtin_vec_cmpge (vsi, vsi); + CMPGE_4SI CMPGE_4SI_VSI + vbi __builtin_vec_cmpge (vui, vui); + CMPGE_U4SI CMPGE_4SI_VUI + vbll __builtin_vec_cmpge (vsll, vsll); + CMPGE_2DI CMPGE_2DI_VSLL + vbll __builtin_vec_cmpge (vull, vull); + CMPGE_U2DI CMPGE_2DI_VULL + vbq __builtin_vec_cmpge (vsq, vsq); + CMPGE_1TI + vbq __builtin_vec_cmpge (vuq, vuq); + CMPGE_U1TI + vbi __builtin_vec_cmpge (vf, vf); + VCMPGEFP + vbll __builtin_vec_cmpge (vd, vd); + XVCMPGEDP + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +; See altivec_build_resolved_builtin for how we deal with VEC_CMPGE_P. +; It's quite strange and horrible! +; #### XVCMPGESP_P{TARGET_VSX};VCMPGEFP_P +[VEC_CMPGE_P, SKIP, __builtin_vec_vcmpge_p] + signed int __builtin_vec_vcmpge_p (signed int, vuc, vuc); + VCMPGTUB_P VCMPGTUB_PR + signed int __builtin_vec_vcmpge_p (signed int, vsc, vsc); + VCMPGTSB_P VCMPGTSB_PR + signed int __builtin_vec_vcmpge_p (signed int, vus, vus); + VCMPGTUH_P VCMPGTUH_PR + signed int __builtin_vec_vcmpge_p (signed int, vss, vss); + VCMPGTSH_P VCMPGTSH_PR + signed int __builtin_vec_vcmpge_p (signed int, vui, vui); + VCMPGTUW_P VCMPGTUW_PR + signed int __builtin_vec_vcmpge_p (signed int, vsi, vsi); + VCMPGTSW_P VCMPGTSW_PR + signed int __builtin_vec_vcmpge_p (signed int, vull, vull); + VCMPGTUD_P VCMPGTUD_PR + signed int __builtin_vec_vcmpge_p (signed int, vsll, vsll); + VCMPGTSD_P VCMPGTSD_PR + signed int __builtin_vec_vcmpge_p (signed int, vuq, vuq); + VCMPGTUT_P VCMPGTUT_PR + signed int __builtin_vec_vcmpge_p (signed int, vsq, vsq); + VCMPGTST_P VCMPGTST_PR + signed int __builtin_vec_vcmpge_p (signed int, vf, vf); + VCMPGEFP_P + signed int __builtin_vec_vcmpge_p (signed int, vd, vd); + XVCMPGEDP_P +; The following variants are deprecated. + signed int __builtin_vec_vcmpge_p (signed int, vbc, vuc); + VCMPGTUB_P VCMPGTUB_PR_BU + signed int __builtin_vec_vcmpge_p (signed int, vuc, vbc); + VCMPGTUB_P VCMPGTUB_PR_UB + signed int __builtin_vec_vcmpge_p (signed int, vbc, vsc); + VCMPGTSB_P VCMPGTSB_PR_BS + signed int __builtin_vec_vcmpge_p (signed int, vsc, vbc); + VCMPGTSB_P VCMPGTSB_PR_SB + signed int __builtin_vec_vcmpge_p (signed int, vbs, vus); + VCMPGTUH_P VCMPGTUH_PR_BU + signed int __builtin_vec_vcmpge_p (signed int, vus, vbs); + VCMPGTUH_P VCMPGTUH_PR_UB + signed int __builtin_vec_vcmpge_p (signed int, vbs, vss); + VCMPGTSH_P VCMPGTSH_PR_BS + signed int __builtin_vec_vcmpge_p (signed int, vss, vbs); + VCMPGTSH_P VCMPGTSH_PR_SB + signed int __builtin_vec_vcmpge_p (signed int, vbi, vui); + VCMPGTUW_P VCMPGTUW_PR_BU + signed int __builtin_vec_vcmpge_p (signed int, vui, vbi); + VCMPGTUW_P VCMPGTUW_PR_UB + signed int __builtin_vec_vcmpge_p (signed int, vbi, vsi); + VCMPGTSW_P VCMPGTSW_PR_BS + signed int __builtin_vec_vcmpge_p (signed int, vsi, vbi); + VCMPGTSW_P VCMPGTSW_PR_SB + signed int __builtin_vec_vcmpge_p (signed int, vbll, vull); + VCMPGTUD_P VCMPGTUD_PR_BU + signed int __builtin_vec_vcmpge_p (signed int, vull, vbll); + VCMPGTUD_P VCMPGTUD_PR_UB + signed int __builtin_vec_vcmpge_p (signed int, vbll, vsll); + VCMPGTSD_P VCMPGTSD_PR_BS + signed int __builtin_vec_vcmpge_p (signed int, vsll, vbll); + VCMPGTSD_P VCMPGTSD_PR_SB + +; #### XVCMPGTSP{TARGET_VSX};VCMPGTFP +[VEC_CMPGT, vec_cmpgt, __builtin_vec_cmpgt] + vbc __builtin_vec_cmpgt (vsc, vsc); + VCMPGTSB + vbc __builtin_vec_cmpgt (vuc, vuc); + VCMPGTUB + vbs __builtin_vec_cmpgt (vss, vss); + VCMPGTSH + vbs __builtin_vec_cmpgt (vus, vus); + VCMPGTUH + vbi __builtin_vec_cmpgt (vsi, vsi); + VCMPGTSW + vbi __builtin_vec_cmpgt (vui, vui); + VCMPGTUW + vbll __builtin_vec_cmpgt (vsll, vsll); + VCMPGTSD + vbll __builtin_vec_cmpgt (vull, vull); + VCMPGTUD + vbq __builtin_vec_cmpgt (vsq, vsq); + VCMPGTST + vbq __builtin_vec_cmpgt (vuq, vuq); + VCMPGTUT + vbi __builtin_vec_cmpgt (vf, vf); + VCMPGTFP + vbll __builtin_vec_cmpgt (vd, vd); + XVCMPGTDP + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +; #### XVCMPGTSP_P{TARGET_VSX};VCMPGTFP_P +[VEC_CMPGT_P, SKIP, __builtin_vec_vcmpgt_p] + signed int __builtin_vec_vcmpgt_p (signed int, vuc, vuc); + VCMPGTUB_P + signed int __builtin_vec_vcmpgt_p (signed int, vsc, vsc); + VCMPGTSB_P + signed int __builtin_vec_vcmpgt_p (signed int, vus, vus); + VCMPGTUH_P + signed int __builtin_vec_vcmpgt_p (signed int, vss, vss); + VCMPGTSH_P + signed int __builtin_vec_vcmpgt_p (signed int, vui, vui); + VCMPGTUW_P + signed int __builtin_vec_vcmpgt_p (signed int, vsi, vsi); + VCMPGTSW_P + signed int __builtin_vec_vcmpgt_p (signed int, vull, vull); + VCMPGTUD_P + signed int __builtin_vec_vcmpgt_p (signed int, vsll, vsll); + VCMPGTSD_P + signed int __builtin_vec_vcmpgt_p (signed int, vuq, vuq); + VCMPGTUT_P + signed int __builtin_vec_vcmpgt_p (signed int, vsq, vsq); + VCMPGTST_P + signed int __builtin_vec_vcmpgt_p (signed int, vf, vf); + VCMPGTFP_P + signed int __builtin_vec_vcmpgt_p (signed int, vd, vd); + XVCMPGTDP_P +; The following variants are deprecated. + signed int __builtin_vec_vcmpgt_p (signed int, vbc, vuc); + VCMPGTUB_P VCMPGTUB_P_BU + signed int __builtin_vec_vcmpgt_p (signed int, vuc, vbc); + VCMPGTUB_P VCMPGTUB_P_UB + signed int __builtin_vec_vcmpgt_p (signed int, vbc, vsc); + VCMPGTSB_P VCMPGTSB_P_BS + signed int __builtin_vec_vcmpgt_p (signed int, vsc, vbc); + VCMPGTSB_P VCMPGTSB_P_SB + signed int __builtin_vec_vcmpgt_p (signed int, vbs, vus); + VCMPGTUH_P VCMPGTUH_P_BU + signed int __builtin_vec_vcmpgt_p (signed int, vus, vbs); + VCMPGTUH_P VCMPGTUH_P_UB + signed int __builtin_vec_vcmpgt_p (signed int, vbs, vss); + VCMPGTSH_P VCMPGTSH_P_BS + signed int __builtin_vec_vcmpgt_p (signed int, vss, vbs); + VCMPGTSH_P VCMPGTSH_P_SB + signed int __builtin_vec_vcmpgt_p (signed int, vbi, vui); + VCMPGTUW_P VCMPGTUW_P_BU + signed int __builtin_vec_vcmpgt_p (signed int, vui, vbi); + VCMPGTUW_P VCMPGTUW_P_UB + signed int __builtin_vec_vcmpgt_p (signed int, vbi, vsi); + VCMPGTSW_P VCMPGTSW_P_BS + signed int __builtin_vec_vcmpgt_p (signed int, vsi, vbi); + VCMPGTSW_P VCMPGTSW_P_SB + signed int __builtin_vec_vcmpgt_p (signed int, vbll, vull); + VCMPGTUD_P VCMPGTUD_P_BU + signed int __builtin_vec_vcmpgt_p (signed int, vull, vbll); + VCMPGTUD_P VCMPGTUD_P_UB + signed int __builtin_vec_vcmpgt_p (signed int, vbll, vsll); + VCMPGTSD_P VCMPGTSD_P_BS + signed int __builtin_vec_vcmpgt_p (signed int, vsll, vbll); + VCMPGTSD_P VCMPGTSD_P_SB + +; Note that there is no entry for VEC_CMPLE. VEC_CMPLE is implemented +; using VEC_CMPGE with reversed arguments in altivec.h. + +; Note that there is no entry for VEC_CMPLT. VEC_CMPLT is implemented +; using VEC_CMPGT with reversed arguments in altivec.h. + +[VEC_CMPNE, vec_cmpne, __builtin_vec_cmpne] + vbc __builtin_vec_cmpne (vbc, vbc); + VCMPNEB VCMPNEB_VBC + vbc __builtin_vec_cmpne (vsc, vsc); + VCMPNEB VCMPNEB_VSC + vbc __builtin_vec_cmpne (vuc, vuc); + VCMPNEB VCMPNEB_VUC + vbs __builtin_vec_cmpne (vbs, vbs); + VCMPNEH VCMPNEH_VBS + vbs __builtin_vec_cmpne (vss, vss); + VCMPNEH VCMPNEH_VSS + vbs __builtin_vec_cmpne (vus, vus); + VCMPNEH VCMPNEH_VUS + vbi __builtin_vec_cmpne (vbi, vbi); + VCMPNEW VCMPNEW_VBI + vbi __builtin_vec_cmpne (vsi, vsi); + VCMPNEW VCMPNEW_VSI + vbi __builtin_vec_cmpne (vui, vui); + VCMPNEW VCMPNEW_VUI + vbq __builtin_vec_cmpne (vsq, vsq); + VCMPNET VCMPNET_VSQ + vbq __builtin_vec_cmpne (vuq, vuq); + VCMPNET VCMPNET_VUQ + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +[VEC_CMPNE_P, SKIP, __builtin_vec_vcmpne_p] + signed int __builtin_vec_vcmpne_p (vsc, vsc); + VCMPNEB_P VCMPNEB_VSC_P + signed int __builtin_vec_vcmpne_p (vuc, vuc); + VCMPNEB_P VCMPNEB_VUC_P + signed int __builtin_vec_vcmpne_p (vbc, vbc); + VCMPNEB_P VCMPNEB_VBC_P + signed int __builtin_vec_vcmpne_p (vss, vss); + VCMPNEH_P VCMPNEH_VSS_P + signed int __builtin_vec_vcmpne_p (vus, vus); + VCMPNEH_P VCMPNEH_VUS_P + signed int __builtin_vec_vcmpne_p (vbs, vbs); + VCMPNEH_P VCMPNEH_VBS_P + signed int __builtin_vec_vcmpne_p (vp, vp); + VCMPNEH_P VCMPNEH_VP_P + signed int __builtin_vec_vcmpne_p (vsi, vsi); + VCMPNEW_P VCMPNEW_VSI_P + signed int __builtin_vec_vcmpne_p (vui, vui); + VCMPNEW_P VCMPNEW_VUI_P + signed int __builtin_vec_vcmpne_p (vbi, vbi); + VCMPNEW_P VCMPNEW_VBI_P + signed int __builtin_vec_vcmpne_p (vsll, vsll); + VCMPNED_P VCMPNED_VSLL_P + signed int __builtin_vec_vcmpne_p (vull, vull); + VCMPNED_P VCMPNED_VULL_P + signed int __builtin_vec_vcmpne_p (vbll, vbll); + VCMPNED_P VCMPNED_VBLL_P + signed int __builtin_vec_vcmpne_p (vsq, vsq); + VCMPNET_P VCMPNET_VSQ_P + signed int __builtin_vec_vcmpne_p (vuq, vuq); + VCMPNET_P VCMPNET_VUQ_P + signed int __builtin_vec_vcmpne_p (vf, vf); + VCMPNEFP_P + signed int __builtin_vec_vcmpne_p (vd, vd); + VCMPNEDP_P +; The following variants are deprecated. + signed int __builtin_vec_vcmpne_p (signed int, vbc, vuc); + VCMPNEB_P VCMPNEB_P_BU + signed int __builtin_vec_vcmpne_p (signed int, vuc, vbc); + VCMPNEB_P VCMPNEB_P_UB + signed int __builtin_vec_vcmpne_p (signed int, vbc, vsc); + VCMPNEB_P VCMPNEB_P_BS + signed int __builtin_vec_vcmpne_p (signed int, vsc, vbc); + VCMPNEB_P VCMPNEB_P_SB + signed int __builtin_vec_vcmpne_p (signed int, vbs, vus); + VCMPNEH_P VCMPNEH_P_BU + signed int __builtin_vec_vcmpne_p (signed int, vus, vbs); + VCMPNEH_P VCMPNEH_P_UB + signed int __builtin_vec_vcmpne_p (signed int, vbs, vss); + VCMPNEH_P VCMPNEH_P_BS + signed int __builtin_vec_vcmpne_p (signed int, vss, vbs); + VCMPNEH_P VCMPNEH_P_SB + signed int __builtin_vec_vcmpne_p (signed int, vbi, vui); + VCMPNEW_P VCMPNEW_P_BU + signed int __builtin_vec_vcmpne_p (signed int, vui, vbi); + VCMPNEW_P VCMPNEW_P_UB + signed int __builtin_vec_vcmpne_p (signed int, vbi, vsi); + VCMPNEW_P VCMPNEW_P_BS + signed int __builtin_vec_vcmpne_p (signed int, vsi, vbi); + VCMPNEW_P VCMPNEW_P_SB + signed int __builtin_vec_vcmpne_p (signed int, vbll, vull); + VCMPNED_P VCMPNED_P_BU + signed int __builtin_vec_vcmpne_p (signed int, vull, vbll); + VCMPNED_P VCMPNED_P_UB + signed int __builtin_vec_vcmpne_p (signed int, vbll, vsll); + VCMPNED_P VCMPNED_P_BS + signed int __builtin_vec_vcmpne_p (signed int, vbll, vsll); + VCMPNED_P VCMPNED_P_SB + +[VEC_CMPNEZ, vec_cmpnez, __builtin_vec_vcmpnez, _ARCH_PWR9] + vbc __builtin_vec_cmpnez (vsc, vsc); + CMPNEZB CMPNEZB_S + vbc __builtin_vec_cmpnez (vuc, vuc); + CMPNEZB CMPNEZB_U + vbs __builtin_vec_cmpnez (vss, vss); + CMPNEZH CMPNEZH_S + vbs __builtin_vec_cmpnez (vus, vus); + CMPNEZH CMPNEZH_U + vbi __builtin_vec_cmpnez (vsi, vsi); + CMPNEZW CMPNEZW_S + vbi __builtin_vec_cmpnez (vui, vui); + CMPNEZW CMPNEZW_U + +; We skip generating a #define because of the C-versus-C++ complexity +; in altivec.h. Look there for the template-y details. +[VEC_CMPNEZ_P, SKIP, __builtin_vec_vcmpnez_p] + signed int __builtin_vec_vcmpnez_p (signed int, vsc, vsc); + VCMPNEZB_P VCMPNEZB_VSC_P + signed int __builtin_vec_vcmpnez_p (signed int, vuc, vuc); + VCMPNEZB_P VCMPNEZB_VUC_P + signed int __builtin_vec_vcmpnez_p (signed int, vss, vss); + VCMPNEZH_P VCMPNEZH_VSS_P + signed int __builtin_vec_vcmpnez_p (signed int, vus, vus); + VCMPNEZH_P VCMPNEZH_VUS_P + signed int __builtin_vec_vcmpnez_p (signed int, vsi, vsi); + VCMPNEZW_P VCMPNEZW_VSI_P + signed int __builtin_vec_vcmpnez_p (signed int, vui, vui); + VCMPNEZW_P VCMPNEZW_VUI_P + +[VEC_CMPRB, SKIP, __builtin_byte_in_range] + signed int __builtin_byte_in_range (unsigned int, unsigned int); + CMPRB + +[VEC_CMPRB2, SKIP, __builtin_byte_in_either_range] + signed int __builtin_byte_in_range (unsigned int, unsigned int); + CMPRB2 + +[VEC_CNTLZ, vec_cntlz, __builtin_vec_vclz, _ARCH_PWR8] + vsc __builtin_vec_vclz (vsc); + VCLZB VCLZB_S + vuc __builtin_vec_vclz (vuc); + VCLZB VCLZB_U + vss __builtin_vec_vclz (vss); + VCLZH VCLZH_S + vus __builtin_vec_vclz (vus); + VCLZH VCLZH_U + vsi __builtin_vec_vclz (vsi); + VCLZW VCLZW_S + vui __builtin_vec_vclz (vui); + VCLZW VCLZW_U + vsll __builtin_vec_vclz (vsll); + VCLZD VCLZD_S + vull __builtin_vec_vclz (vull); + VCLZD VCLZD_U + +[VEC_CNTLZM, vec_cntlzm, __builtin_vec_vclzdm, _ARCH_PWR10] + vull __builtin_vec_vclzdm (vull, vull); + VCLZDM + +[VEC_CNTTZM, vec_cnttzm, __builtin_vec_vctzdm, _ARCH_PWR10] + vull __builtin_vec_vctzdm (vull, vull); + VCTZDM + +[VEC_CNTLZ_LSBB, vec_cntlz_lsbb, __builtin_vec_vclzlsbb, _ARCH_PWR9] + signed int __builtin_vec_vclzlsbb (vsc); + VCLZLSBB_V16QI VCLZLSBB_VSC + signed int __builtin_vec_vclzlsbb (vuc); + VCLZLSBB_V16QI VCLZLSBB_VUC + signed int __builtin_vec_vclzlsbb (vss); + VCLZLSBB_V8HI VCLZLSBB_VSS + signed int __builtin_vec_vclzlsbb (vus); + VCLZLSBB_V8HI VCLZLSBB_VUS + signed int __builtin_vec_vclzlsbb (vsi); + VCLZLSBB_V4SI VCLZLSBB_VSI + signed int __builtin_vec_vclzlsbb (vui); + VCLZLSBB_V4SI VCLZLSBB_VUI + +[VEC_CNTM, vec_cntm, __builtin_vec_cntm, _ARCH_PWR10] + unsigned long long __builtin_vec_cntm (vuc, const int); + VCNTMBB + unsigned long long __builtin_vec_cntm (vus, const int); + VCNTMBH + unsigned long long __builtin_vec_cntm (vui, const int); + VCNTMBW + unsigned long long __builtin_vec_cntm (vull, const int); + VCNTMBD + +[VEC_CNTTZ, vec_cnttz, __builtin_vec_vctz, _ARCH_PWR9] + vsc __builtin_vec_vctz (vsc); + VCTZB VCTZB_S + vuc __builtin_vec_vctz (vuc); + VCTZB VCTZB_U + vss __builtin_vec_vctz (vss); + VCTZH VCTZH_S + vus __builtin_vec_vctz (vus); + VCTZH VCTZH_U + vsi __builtin_vec_vctz (vsi); + VCTZW VCTZW_S + vui __builtin_vec_vctz (vui); + VCTZW VCTZW_U + vsll __builtin_vec_vctz (vsll); + VCTZD VCTZD_S + vull __builtin_vec_vctz (vull); + VCTZD VCTZD_U + +[VEC_CNTTZ_LSBB, vec_cnttz_lsbb, __builtin_vec_vctzlsbb, _ARCH_PWR9] + signed int __builtin_vec_vctzlsbb (vsc); + VCTZLSBB_V16QI VCTZLSBB_VSC + signed int __builtin_vec_vctzlsbb (vuc); + VCTZLSBB_V16QI VCTZLSBB_VUC + signed int __builtin_vec_vctzlsbb (vss); + VCTZLSBB_V8HI VCTZLSBB_VSS + signed int __builtin_vec_vctzlsbb (vus); + VCTZLSBB_V8HI VCTZLSBB_VUS + signed int __builtin_vec_vctzlsbb (vsi); + VCTZLSBB_V4SI VCTZLSBB_VSI + signed int __builtin_vec_vctzlsbb (vui); + VCTZLSBB_V4SI VCTZLSBB_VUI + +[VEC_CONVERT_4F32_8I16, SKIP, __builtin_vec_convert_4f32_8i16] + vus __builtin_vec_convert_4f32_8i16 (vf, vf); + CONVERT_4F32_8I16 + +[VEC_CONVERT_4F32_8F16, vec_pack_to_short_fp32, __builtin_vec_convert_4f32_8f16, _ARCH_PWR9] + vus __builtin_vec_convert_4f32_8f16 (vf, vf); + CONVERT_4F32_8F16 + +[VEC_COPYSIGN, vec_cpsgn, __builtin_vec_copysign] + vf __builtin_vec_copysign (vf, vf); + CPSGNSP + vd __builtin_vec_copysign (vd, vd); + CPSGNDP + +[VEC_CTF, vec_ctf, __builtin_vec_ctf] + vf __builtin_vec_ctf (vsi, const int); + VCFSX + vf __builtin_vec_ctf (vui, const int); + VCFUX + vd __builtin_vec_ctf (vsll, const int); + XVCVSXDDP_SCALE + vd __builtin_vec_ctf (vull, const int); + XVCVUXDDP_SCALE + +[VEC_CTS, vec_cts, __builtin_vec_cts] + vsi __builtin_vec_cts (vf, const int); + VCTSXS + vsll __builtin_vec_cts (vd, const int); + XVCVDPSXDS_SCALE + +[VEC_CTU, vec_ctu, __builtin_vec_ctu] + vui __builtin_vec_ctu (vf, const int); + VCTUXS + vull __builtin_vec_ctu (vd, const int); + XVCVDPUXDS_SCALE + +[VEC_DIV, vec_div, __builtin_vec_div, __VSX__] + vsi __builtin_vec_div (vsi, vsi); + VDIVSW + vui __builtin_vec_div (vui, vui); + VDIVUW + vsll __builtin_vec_div (vsll, vsll); + DIV_V2DI + vull __builtin_vec_div (vull, vull); + UDIV_V2DI + vsq __builtin_vec_div (vsq, vsq); + DIV_V1TI + vuq __builtin_vec_div (vuq, vuq); + UDIV_V1TI + vf __builtin_vec_div (vf, vf); + XVDIVSP + vd __builtin_vec_div (vd, vd); + XVDIVDP + +[VEC_DIVE, vec_dive, __builtin_vec_dive, _ARCH_PWR10] + vsi __builtin_vec_dive (vsi, vsi); + VDIVESW + vui __builtin_vec_dive (vui, vui); + VDIVEUW + vsll __builtin_vec_dive (vsll, vsll); + VDIVESD + vull __builtin_vec_dive (vull, vull); + VDIVEUD + vsq __builtin_vec_dive (vsq, vsq); + DIVES_V1TI + vuq __builtin_vec_dive (vuq, vuq); + DIVEU_V1TI + +[VEC_DOUBLE, vec_double, __builtin_vec_double] + vd __builtin_vec_double (vsll); + XVCVSXDDP + vd __builtin_vec_double (vull); + XVCVUXDDP + +[VEC_DOUBLEE, vec_doublee, __builtin_vec_doublee] + vd __builtin_vec_doublee (vsi); + DOUBLEE_V4SI + vd __builtin_vec_doublee (vui); + UNS_DOUBLEE_V4SI + vd __builtin_vec_doublee (vf); + DOUBLEE_V4SF + +[VEC_DOUBLEH, vec_doubleh, __builtin_vec_doubleh] + vd __builtin_vec_doubleh (vsi); + DOUBLEH_V4SI + vd __builtin_vec_doubleh (vui); + UNS_DOUBLEH_V4SI + vd __builtin_vec_doubleh (vf); + DOUBLEH_V4SF + +[VEC_DOUBLEL, vec_doublel, __builtin_vec_doublel] + vd __builtin_vec_doublel (vsi); + DOUBLEL_V4SI + vd __builtin_vec_doublel (vui); + UNS_DOUBLEL_V4SI + vd __builtin_vec_doublel (vf); + DOUBLEL_V4SF + +[VEC_DOUBLEO, vec_doubleo, __builtin_vec_doubleo] + vd __builtin_vec_doubleo (vsi); + DOUBLEO_V4SI + vd __builtin_vec_doubleo (vui); + UNS_DOUBLEO_V4SI + vd __builtin_vec_doubleo (vf); + DOUBLEO_V4SF + +[VEC_DST, vec_dst, __builtin_vec_dst] + void __builtin_vec_dst (unsigned char *, const int, const int); + DST DST_UC + void __builtin_vec_dst (signed char *, const int, const int); + DST DST_SC + void __builtin_vec_dst (unsigned short *, const int, const int); + DST DST_US + void __builtin_vec_dst (signed short *, const int, const int); + DST DST_SS + void __builtin_vec_dst (unsigned int *, const int, const int); + DST DST_UI + void __builtin_vec_dst (signed int *, const int, const int); + DST DST_SI + void __builtin_vec_dst (unsigned long *, const int, const int); + DST DST_UL + void __builtin_vec_dst (signed long *, const int, const int); + DST DST_SL + void __builtin_vec_dst (unsigned long long *, const int, const int); + DST DST_ULL + void __builtin_vec_dst (signed long long *, const int, const int); + DST DST_SLL + void __builtin_vec_dst (float *, const int, const int); + DST DST_F + void __builtin_vec_dst (vuc *, const int, const int); + DST DST_VUC + void __builtin_vec_dst (vsc *, const int, const int); + DST DST_VSC + void __builtin_vec_dst (vbc *, const int, const int); + DST DST_VBC + void __builtin_vec_dst (vus *, const int, const int); + DST DST_VUS + void __builtin_vec_dst (vss *, const int, const int); + DST DST_VSS + void __builtin_vec_dst (vbs *, const int, const int); + DST DST_VBS + void __builtin_vec_dst (vp *, const int, const int); + DST DST_VP + void __builtin_vec_dst (vui *, const int, const int); + DST DST_VUI + void __builtin_vec_dst (vsi *, const int, const int); + DST DST_VSI + void __builtin_vec_dst (vbi *, const int, const int); + DST DST_VBI + void __builtin_vec_dst (vf *, const int, const int); + DST DST_VF + +[VEC_DSTST, vec_dstst, __builtin_vec_dstst] + void __builtin_vec_dstst (unsigned char *, const int, const int); + DSTST DSTST_UC + void __builtin_vec_dstst (signed char *, const int, const int); + DSTST DSTST_SC + void __builtin_vec_dstst (unsigned short *, const int, const int); + DSTST DSTST_US + void __builtin_vec_dstst (signed short *, const int, const int); + DSTST DSTST_SS + void __builtin_vec_dstst (unsigned int *, const int, const int); + DSTST DSTST_UI + void __builtin_vec_dstst (signed int *, const int, const int); + DSTST DSTST_SI + void __builtin_vec_dstst (unsigned long *, const int, const int); + DSTST DSTST_UL + void __builtin_vec_dstst (signed long *, const int, const int); + DSTST DSTST_SL + void __builtin_vec_dstst (unsigned long long *, const int, const int); + DSTST DSTST_ULL + void __builtin_vec_dstst (signed long long *, const int, const int); + DSTST DSTST_SLL + void __builtin_vec_dstst (float *, const int, const int); + DSTST DSTST_F + void __builtin_vec_dstst (vuc *, const int, const int); + DSTST DSTST_VUC + void __builtin_vec_dstst (vsc *, const int, const int); + DSTST DSTST_VSC + void __builtin_vec_dstst (vbc *, const int, const int); + DSTST DSTST_VBC + void __builtin_vec_dstst (vus *, const int, const int); + DSTST DSTST_VUS + void __builtin_vec_dstst (vss *, const int, const int); + DSTST DSTST_VSS + void __builtin_vec_dstst (vbs *, const int, const int); + DSTST DSTST_VBS + void __builtin_vec_dstst (vp *, const int, const int); + DSTST DSTST_VP + void __builtin_vec_dstst (vui *, const int, const int); + DSTST DSTST_VUI + void __builtin_vec_dstst (vsi *, const int, const int); + DSTST DSTST_VSI + void __builtin_vec_dstst (vbi *, const int, const int); + DSTST DSTST_VBI + void __builtin_vec_dstst (vf *, const int, const int); + DSTST DSTST_VF + +[VEC_DSTSTT, vec_dststt, __builtin_vec_dststt] + void __builtin_vec_dststt (unsigned char *, const int, const int); + DSTSTT DSTSTT_UC + void __builtin_vec_dststt (signed char *, const int, const int); + DSTSTT DSTSTT_SC + void __builtin_vec_dststt (unsigned short *, const int, const int); + DSTSTT DSTSTT_US + void __builtin_vec_dststt (signed short *, const int, const int); + DSTSTT DSTSTT_SS + void __builtin_vec_dststt (unsigned int *, const int, const int); + DSTSTT DSTSTT_UI + void __builtin_vec_dststt (signed int *, const int, const int); + DSTSTT DSTSTT_SI + void __builtin_vec_dststt (unsigned long *, const int, const int); + DSTSTT DSTSTT_UL + void __builtin_vec_dststt (signed long *, const int, const int); + DSTSTT DSTSTT_SL + void __builtin_vec_dststt (unsigned long long *, const int, const int); + DSTSTT DSTSTT_ULL + void __builtin_vec_dststt (signed long long *, const int, const int); + DSTSTT DSTSTT_SLL + void __builtin_vec_dststt (float *, const int, const int); + DSTSTT DSTSTT_F + void __builtin_vec_dststt (vuc *, const int, const int); + DSTSTT DSTSTT_VUC + void __builtin_vec_dststt (vsc *, const int, const int); + DSTSTT DSTSTT_VSC + void __builtin_vec_dststt (vbc *, const int, const int); + DSTSTT DSTSTT_VBC + void __builtin_vec_dststt (vus *, const int, const int); + DSTSTT DSTSTT_VUS + void __builtin_vec_dststt (vss *, const int, const int); + DSTSTT DSTSTT_VSS + void __builtin_vec_dststt (vbs *, const int, const int); + DSTSTT DSTSTT_VBS + void __builtin_vec_dststt (vp *, const int, const int); + DSTSTT DSTSTT_VP + void __builtin_vec_dststt (vui *, const int, const int); + DSTSTT DSTSTT_VUI + void __builtin_vec_dststt (vsi *, const int, const int); + DSTSTT DSTSTT_VSI + void __builtin_vec_dststt (vbi *, const int, const int); + DSTSTT DSTSTT_VBI + void __builtin_vec_dststt (vf *, const int, const int); + DSTSTT DSTSTT_VF + +[VEC_DSTT, vec_dstt, __builtin_vec_dstt] + void __builtin_vec_dstt (unsigned char *, const int, const int); + DSTT DSTT_UC + void __builtin_vec_dstt (signed char *, const int, const int); + DSTT DSTT_SC + void __builtin_vec_dstt (unsigned short *, const int, const int); + DSTT DSTT_US + void __builtin_vec_dstt (signed short *, const int, const int); + DSTT DSTT_SS + void __builtin_vec_dstt (unsigned int *, const int, const int); + DSTT DSTT_UI + void __builtin_vec_dstt (signed int *, const int, const int); + DSTT DSTT_SI + void __builtin_vec_dstt (unsigned long *, const int, const int); + DSTT DSTT_UL + void __builtin_vec_dstt (signed long *, const int, const int); + DSTT DSTT_SL + void __builtin_vec_dstt (unsigned long long *, const int, const int); + DSTT DSTT_ULL + void __builtin_vec_dstt (signed long long *, const int, const int); + DSTT DSTT_SLL + void __builtin_vec_dstt (float *, const int, const int); + DSTT DSTT_F + void __builtin_vec_dstt (vuc *, const int, const int); + DSTT DSTT_VUC + void __builtin_vec_dstt (vsc *, const int, const int); + DSTT DSTT_VSC + void __builtin_vec_dstt (vbc *, const int, const int); + DSTT DSTT_VBC + void __builtin_vec_dstt (vus *, const int, const int); + DSTT DSTT_VUS + void __builtin_vec_dstt (vss *, const int, const int); + DSTT DSTT_VSS + void __builtin_vec_dstt (vbs *, const int, const int); + DSTT DSTT_VBS + void __builtin_vec_dstt (vp *, const int, const int); + DSTT DSTT_VP + void __builtin_vec_dstt (vui *, const int, const int); + DSTT DSTT_VUI + void __builtin_vec_dstt (vsi *, const int, const int); + DSTT DSTT_VSI + void __builtin_vec_dstt (vbi *, const int, const int); + DSTT DSTT_VBI + void __builtin_vec_dstt (vf *, const int, const int); + DSTT DSTT_VF + +[VEC_EQV, vec_eqv, __builtin_vec_eqv, _ARCH_PWR8] + vsc __builtin_vec_eqv (vsc, vsc); + EQV_V16QI + vuc __builtin_vec_eqv (vuc, vuc); + EQV_V16QI_UNS EQV_V16QI_VUC + vbc __builtin_vec_eqv (vbc, vbc); + EQV_V16QI_UNS EQV_V16QI_VBC + vss __builtin_vec_eqv (vss, vss); + EQV_V8HI + vus __builtin_vec_eqv (vus, vus); + EQV_V8HI_UNS EQV_V8HI_VUS + vbs __builtin_vec_eqv (vbs, vbs); + EQV_V8HI_UNS EQV_V8HI_VBS + vsi __builtin_vec_eqv (vsi, vsi); + EQV_V4SI + vui __builtin_vec_eqv (vui, vui); + EQV_V4SI_UNS EQV_V4SI_VUI + vbi __builtin_vec_eqv (vbi, vbi); + EQV_V4SI_UNS EQV_V4SI_VBI + vsll __builtin_vec_eqv (vsll, vsll); + EQV_V2DI + vull __builtin_vec_eqv (vull, vull); + EQV_V2DI_UNS EQV_V2DI_VULL + vbll __builtin_vec_eqv (vbll, vbll); + EQV_V2DI_UNS EQV_V2DI_VBLL + vf __builtin_vec_eqv (vf, vf); + EQV_V4SF + vd __builtin_vec_eqv (vd, vd); + EQV_V2DF +; The following variants are deprecated. + vsc __builtin_vec_eqv (vbc, vsc); + EQV_V16QI EQV_VBC_VSC + vsc __builtin_vec_eqv (vsc, vbc); + EQV_V16QI EQV_VSC_VBC + vuc __builtin_vec_eqv (vbc, vuc); + EQV_V16QI_UNS EQV_VBC_VUC + vuc __builtin_vec_eqv (vuc, vbc); + EQV_V16QI_UNS EQV_VUC_VBC + vss __builtin_vec_eqv (vbs, vss); + EQV_V8HI EQV_VBS_VSS + vss __builtin_vec_eqv (vss, vbs); + EQV_V8HI EQV_VSS_VBS + vus __builtin_vec_eqv (vbs, vus); + EQV_V8HI_UNS EQV_VBS_VUS + vus __builtin_vec_eqv (vus, vbs); + EQV_V8HI_UNS EQV_VUS_VBS + vsi __builtin_vec_eqv (vbi, vsi); + EQV_V4SI EQV_VBI_VSI + vsi __builtin_vec_eqv (vsi, vbi); + EQV_V4SI EQV_VSI_VBI + vui __builtin_vec_eqv (vbi, vui); + EQV_V4SI_UNS EQV_VBI_VUI + vui __builtin_vec_eqv (vui, vbi); + EQV_V4SI_UNS EQV_VUI_VBI + vsll __builtin_vec_eqv (vbll, vsll); + EQV_V2DI EQV_VBLL_VSLL + vsll __builtin_vec_eqv (vsll, vbll); + EQV_V2DI EQV_VSLL_VBLL + vull __builtin_vec_eqv (vbll, vull); + EQV_V2DI_UNS EQV_VBLL_VULL + vull __builtin_vec_eqv (vull, vbll); + EQV_V2DI_UNS EQV_VULL_VBLL + +[VEC_EXPANDM, vec_expandm, __builtin_vec_vexpandm, _ARCH_PWR10] + vuc __builtin_vec_vexpandm (vuc); + VEXPANDMB + vus __builtin_vec_vexpandm (vus); + VEXPANDMH + vui __builtin_vec_vexpandm (vui); + VEXPANDMW + vull __builtin_vec_vexpandm (vull); + VEXPANDMD + vuq __builtin_vec_vexpandm (vuq); + VEXPANDMQ + +[VEC_EXPTE, vec_expte, __builtin_vec_expte] + vf __builtin_vec_expte (vf); + VEXPTEFP + +; There are no actual builtins for vec_extract. There is special handling for +; this in altivec_resolve_overloaded_builtin in rs6000-c.c, where the call +; is replaced by "pointer tricks." The single overload here causes +; __builtin_vec_extract to be registered with the front end so this can +; happen. +[VEC_EXTRACT, vec_extract, __builtin_vec_extract] + vsi __builtin_vec_extract (vsi, signed int); + VSPLTW EXTRACT_FAKERY + +[VEC_EXTRACT_FP_FROM_SHORTH, vec_extract_fp32_from_shorth, __builtin_vec_vextract_fp_from_shorth, _ARCH_PWR9] + vf __builtin_vec_vextract_fp_from_shorth (vus); + VEXTRACT_FP_FROM_SHORTH + +[VEC_EXTRACT_FP_FROM_SHORTL, vec_extract_fp32_from_shortl, __builtin_vec_vextract_fp_from_shortl, _ARCH_PWR9] + vf __builtin_vec_vextract_fp_from_shortl (vus); + VEXTRACT_FP_FROM_SHORTL + +[VEC_EXTRACTH, vec_extracth, __builtin_vec_extracth, _ARCH_PWR10] + vull __builtin_vec_extracth (vuc, vuc, unsigned char); + VEXTRACTBR + vull __builtin_vec_extracth (vus, vus, unsigned char); + VEXTRACTHR + vull __builtin_vec_extracth (vui, vui, unsigned char); + VEXTRACTWR + vull __builtin_vec_extracth (vull, vull, unsigned char); + VEXTRACTDR + +[VEC_EXTRACTL, vec_extractl, __builtin_vec_extractl, _ARCH_PWR10] + vull __builtin_vec_extractl (vuc, vuc, unsigned char); + VEXTRACTBL + vull __builtin_vec_extractl (vus, vus, unsigned char); + VEXTRACTHL + vull __builtin_vec_extractl (vui, vui, unsigned char); + VEXTRACTWL + vull __builtin_vec_extractl (vull, vull, unsigned char); + VEXTRACTDL + +[VEC_EXTRACTM, vec_extractm, __builtin_vec_vextractm, _ARCH_PWR10] + signed int __builtin_vec_vextractm (vuc); + VEXTRACTMB + signed int __builtin_vec_vextractm (vus); + VEXTRACTMH + signed int __builtin_vec_vextractm (vui); + VEXTRACTMW + signed int __builtin_vec_vextractm (vull); + VEXTRACTMD + signed int __builtin_vec_vextractm (vuq); + VEXTRACTMQ + +[VEC_EXTRACT4B, vec_extract4b, __builtin_vec_extract4b, _ARCH_PWR9] + vull __builtin_vec_extract4b (vuc, const int); + EXTRACT4B + +[VEC_EXTULX, vec_xlx, __builtin_vec_vextulx, _ARCH_PWR9] + signed char __builtin_vec_vextulx (unsigned int, vsc); + VEXTUBLX VEXTUBLX_S + unsigned char __builtin_vec_vextulx (unsigned int, vuc); + VEXTUBLX VEXTUBLX_U + signed short __builtin_vec_vextulx (unsigned int, vss); + VEXTUHLX VEXTUHLX_S + unsigned short __builtin_vec_vextulx (unsigned int, vus); + VEXTUHLX VEXTUHLX_U + signed int __builtin_vec_vextulx (unsigned int, vsi); + VEXTUWLX VEXTUWLX_S + unsigned int __builtin_vec_vextulx (unsigned int, vui); + VEXTUWLX VEXTUWLX_U + float __builtin_vec_vextulx (unsigned int, vf); + VEXTUWLX VEXTUWLX_F + +[VEC_EXTURX, vec_xrx, __builtin_vec_vexturx, _ARCH_PWR9] + signed char __builtin_vec_vexturx (unsigned int, vsc); + VEXTUBRX VEXTUBRX_S + unsigned char __builtin_vec_vexturx (unsigned int, vuc); + VEXTUBRX VEXTUBRX_U + signed short __builtin_vec_vexturx (unsigned int, vss); + VEXTUHRX VEXTUHRX_S + unsigned short __builtin_vec_vexturx (unsigned int, vus); + VEXTUHRX VEXTUHRX_U + signed int __builtin_vec_vexturx (unsigned int, vsi); + VEXTUWRX VEXTUWRX_S + unsigned int __builtin_vec_vexturx (unsigned int, vui); + VEXTUWRX VEXTUWRX_U + float __builtin_vec_vexturx (unsigned int, vf); + VEXTUWRX VEXTUWRX_F + +[VEC_FIRSTMATCHINDEX, vec_first_match_index, __builtin_vec_first_match_index, _ARCH_PWR9] + unsigned int __builtin_vec_first_match_index (vsc, vsc); + VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VSC + unsigned int __builtin_vec_first_match_index (vuc, vuc); + VFIRSTMATCHINDEX_V16QI FIRSTMATCHINDEX_VUC + unsigned int __builtin_vec_first_match_index (vss, vss); + VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VSS + unsigned int __builtin_vec_first_match_index (vus, vus); + VFIRSTMATCHINDEX_V8HI FIRSTMATCHINDEX_VUS + unsigned int __builtin_vec_first_match_index (vsi, vsi); + VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VSI + unsigned int __builtin_vec_first_match_index (vui, vui); + VFIRSTMATCHINDEX_V4SI FIRSTMATCHINDEX_VUI + +[VEC_FIRSTMATCHOREOSINDEX, vec_first_match_or_eos_index, __builtin_vec_first_match_or_eos_index, _ARCH_PWR9] + unsigned int __builtin_vec_first_match_or_eos_index (vsc, vsc); + VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VSC + unsigned int __builtin_vec_first_match_or_eos_index (vuc, vuc); + VFIRSTMATCHOREOSINDEX_V16QI FIRSTMATCHOREOSINDEX_VUC + unsigned int __builtin_vec_first_match_or_eos_index (vss, vss); + VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VSS + unsigned int __builtin_vec_first_match_or_eos_index (vus, vus); + VFIRSTMATCHOREOSINDEX_V8HI FIRSTMATCHOREOSINDEX_VUS + unsigned int __builtin_vec_first_match_or_eos_index (vsi, vsi); + VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VSI + unsigned int __builtin_vec_first_match_or_eos_index (vui, vui); + VFIRSTMATCHOREOSINDEX_V4SI FIRSTMATCHOREOSINDEX_VUI + +[VEC_FIRSTMISMATCHINDEX, vec_first_mismatch_index, __builtin_vec_first_mismatch_index, _ARCH_PWR9] + unsigned int __builtin_vec_first_mismatch_index (vsc, vsc); + VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VSC + unsigned int __builtin_vec_first_mismatch_index (vuc, vuc); + VFIRSTMISMATCHINDEX_V16QI FIRSTMISMATCHINDEX_VUC + unsigned int __builtin_vec_first_mismatch_index (vss, vss); + VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VSS + unsigned int __builtin_vec_first_mismatch_index (vus, vus); + VFIRSTMISMATCHINDEX_V8HI FIRSTMISMATCHINDEX_VUS + unsigned int __builtin_vec_first_mismatch_index (vsi, vsi); + VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VSI + unsigned int __builtin_vec_first_mismatch_index (vui, vui); + VFIRSTMISMATCHINDEX_V4SI FIRSTMISMATCHINDEX_VUI + +[VEC_FIRSTMISMATCHOREOSINDEX, vec_first_mismatch_or_eos_index, __builtin_vec_first_mismatch_or_eos_index, _ARCH_PWR9] + unsigned int __builtin_vec_first_mismatch_or_eos_index (vsc, vsc); + VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VSC + unsigned int __builtin_vec_first_mismatch_or_eos_index (vuc, vuc); + VFIRSTMISMATCHOREOSINDEX_V16QI FIRSTMISMATCHOREOSINDEX_VUC + unsigned int __builtin_vec_first_mismatch_or_eos_index (vss, vss); + VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VSS + unsigned int __builtin_vec_first_mismatch_or_eos_index (vus, vus); + VFIRSTMISMATCHOREOSINDEX_V8HI FIRSTMISMATCHOREOSINDEX_VUS + unsigned int __builtin_vec_first_mismatch_or_eos_index (vsi, vsi); + VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VSI + unsigned int __builtin_vec_first_mismatch_or_eos_index (vui, vui); + VFIRSTMISMATCHOREOSINDEX_V4SI FIRSTMISMATCHOREOSINDEX_VUI + +[VEC_FLOAT, vec_float, __builtin_vec_float] + vf __builtin_vec_float (vsi); + XVCVSXWSP + vf __builtin_vec_float (vui); + XVCVUXWSP + +[VEC_FLOAT2, vec_float2, __builtin_vec_float2] + vf __builtin_vec_float2 (vsll, vsll); + FLOAT2_V2DI + vf __builtin_vec_float2 (vull, vull); + UNS_FLOAT2_V2DI + vf __builtin_vec_float2 (vd, vd); + FLOAT2_V2DF + +[VEC_FLOATE, vec_floate, __builtin_vec_floate] + vf __builtin_vec_floate (vsll); + FLOATE_V2DI + vf __builtin_vec_floate (vull); + UNS_FLOATE_V2DI + vf __builtin_vec_floate (vd); + FLOATE_V2DF + +[VEC_FLOATO, vec_floato, __builtin_vec_floato] + vf __builtin_vec_floato (vsll); + FLOATO_V2DI + vf __builtin_vec_floato (vull); + UNS_FLOATO_V2DI + vf __builtin_vec_floato (vd); + FLOATO_V2DF + +; #### XVRSPIM{TARGET_VSX}; VRFIM +[VEC_FLOOR, vec_floor, __builtin_vec_floor] + vf __builtin_vec_floor (vf); + VRFIM + vd __builtin_vec_floor (vd); + XVRDPIM + +[VEC_GB, vec_gb, __builtin_vec_vgbbd, _ARCH_PWR8] + vsc __builtin_vec_vgbbd (vsc); + VGBBD VGBBD_S + vuc __builtin_vec_vgbbd (vuc); + VGBBD VGBBD_U + +[VEC_GENBM, vec_genbm, __builtin_vec_mtvsrbm, _ARCH_PWR10] + vuc __builtin_vec_mtvsrbm (unsigned long long); + MTVSRBM + +[VEC_GENHM, vec_genhm, __builtin_vec_mtvsrhm, _ARCH_PWR10] + vus __builtin_vec_mtvsrhm (unsigned long long); + MTVSRHM + +[VEC_GENWM, vec_genwm, __builtin_vec_mtvsrwm, _ARCH_PWR10] + vui __builtin_vec_mtvsrwm (unsigned long long); + MTVSRWM + +[VEC_GENDM, vec_gendm, __builtin_vec_mtvsrdm, _ARCH_PWR10] + vull __builtin_vec_mtvsrdm (unsigned long long); + MTVSRDM + +[VEC_GENQM, vec_genqm, __builtin_vec_mtvsrqm, _ARCH_PWR10] + vuq __builtin_vec_mtvsrqm (unsigned long long); + MTVSRQM + +[VEC_GENPCVM, vec_genpcvm, __builtin_vec_xxgenpcvm, _ARCH_PWR10] + vuc __builtin_vec_xxgenpcvm (vuc, const int); + XXGENPCVM_V16QI + vus __builtin_vec_xxgenpcvm (vus, const int); + XXGENPCVM_V8HI + vui __builtin_vec_xxgenpcvm (vui, const int); + XXGENPCVM_V4SI + vull __builtin_vec_xxgenpcvm (vull, const int); + XXGENPCVM_V2DI + +[VEC_GNB, vec_gnb, __builtin_vec_gnb, _ARCH_PWR10] + unsigned long long __builtin_vec_gnb (vuq, const int); + VGNB + +; There are no actual builtins for vec_insert. There is special handling for +; this in altivec_resolve_overloaded_builtin in rs6000-c.c, where the call +; is replaced by "pointer tricks." The single overload here causes +; __builtin_vec_insert to be registered with the front end so this can happen. +[VEC_INSERT, vec_insert, __builtin_vec_insert] + vsi __builtin_vec_insert (vsi, vsi, signed int); + XXPERMDI_4SI INSERT_FAKERY + +[VEC_INSERTH, vec_inserth, __builtin_vec_inserth, _ARCH_PWR10] + vuc __builtin_vec_inserth (unsigned char, vuc, unsigned int); + VINSERTGPRBR + vuc __builtin_vec_inserth (vuc, vuc, unsigned int); + VINSERTVPRBR + vus __builtin_vec_inserth (unsigned short, vus, unsigned int); + VINSERTGPRHR + vus __builtin_vec_inserth (vus, vus, unsigned int); + VINSERTVPRHR + vui __builtin_vec_inserth (unsigned int, vui, unsigned int); + VINSERTGPRWR + vui __builtin_vec_inserth (vui, vui, unsigned int); + VINSERTVPRWR + vull __builtin_vec_inserth (unsigned long long, vull, unsigned int); + VINSERTGPRDR + +[VEC_INSERTL, vec_insertl, __builtin_vec_insertl, _ARCH_PWR10] + vuc __builtin_vec_insertl (unsigned char, vuc, unsigned int); + VINSERTGPRBL + vuc __builtin_vec_insertl (vuc, vuc, unsigned int); + VINSERTVPRBL + vus __builtin_vec_insertl (unsigned short, vus, unsigned int); + VINSERTGPRHL + vus __builtin_vec_insertl (vus, vus, unsigned int); + VINSERTVPRHL + vui __builtin_vec_insertl (unsigned int, vui, unsigned int); + VINSERTGPRWL + vui __builtin_vec_insertl (vui, vui, unsigned int); + VINSERTVPRWL + vull __builtin_vec_insertl (unsigned long long, vull, unsigned int); + VINSERTGPRDL + +[VEC_INSERT4B, vec_insert4b, __builtin_vec_insert4b, _ARCH_PWR9] + vuc __builtin_vec_insert4b (vsi, vuc, const int); + INSERT4B INSERT4B_S + vuc __builtin_vec_insert4b (vui, vuc, const int); + INSERT4B INSERT4B_U + +[VEC_LD, vec_ld, __builtin_vec_ld] + vsc __builtin_vec_ld (signed long, const vsc *); + LVX_V16QI LVX_V16QI_VSC + vsc __builtin_vec_ld (signed long, const signed char *); + LVX_V16QI LVX_V16QI_SC + vuc __builtin_vec_ld (signed long, const vuc *); + LVX_V16QI LVX_V16QI_VUC + vuc __builtin_vec_ld (signed long, const unsigned char *); + LVX_V16QI LVX_V16QI_UC + vbc __builtin_vec_ld (signed long, const vbc *); + LVX_V16QI LVX_V16QI_VBC + vss __builtin_vec_ld (signed long, const vss *); + LVX_V8HI LVX_V8HI_VSS + vss __builtin_vec_ld (signed long, const signed short *); + LVX_V8HI LVX_V8HI_SS + vus __builtin_vec_ld (signed long, const vus *); + LVX_V8HI LVX_V8HI_VUS + vus __builtin_vec_ld (signed long, const unsigned short *); + LVX_V8HI LVX_V8HI_US + vbs __builtin_vec_ld (signed long, const vbs *); + LVX_V8HI LVX_V8HI_VBS + vp __builtin_vec_ld (signed long, const vp *); + LVX_V8HI LVX_V8HI_VP + vsi __builtin_vec_ld (signed long, const vsi *); + LVX_V4SI LVX_V4SI_VSI + vsi __builtin_vec_ld (signed long, const signed int *); + LVX_V4SI LVX_V4SI_SI + vui __builtin_vec_ld (signed long, const vui *); + LVX_V4SI LVX_V4SI_VUI + vui __builtin_vec_ld (signed long, const unsigned int *); + LVX_V4SI LVX_V4SI_UI + vbi __builtin_vec_ld (signed long, const vbi *); + LVX_V4SI LVX_V4SI_VBI + vsll __builtin_vec_ld (signed long, const vsll *); + LVX_V2DI LVX_V2DI_VSLL + vsll __builtin_vec_ld (signed long, const signed long long *); + LVX_V2DI LVX_V2DI_SLL + vull __builtin_vec_ld (signed long, const vull *); + LVX_V2DI LVX_V2DI_VULL + vull __builtin_vec_ld (signed long, const unsigned long long *); + LVX_V2DI LVX_V2DI_ULL + vbll __builtin_vec_ld (signed long, const vbll *); + LVX_V2DI LVX_V2DI_VBLL + vsq __builtin_vec_ld (signed long, const vsq *); + LVX_V1TI LVX_V1TI_VSQ + vuq __builtin_vec_ld (signed long, const vuq *); + LVX_V1TI LVX_V1TI_VUQ + vsq __builtin_vec_ld (signed long, const __int128 *); + LVX_V1TI LVX_V1TI_TI + vuq __builtin_vec_ld (signed long, const unsigned __int128 *); + LVX_V1TI LVX_V1TI_UTI + vf __builtin_vec_ld (signed long, const vf *); + LVX_V4SF LVX_V4SF_VF + vf __builtin_vec_ld (signed long, const float *); + LVX_V4SF LVX_V4SF_F + vd __builtin_vec_ld (signed long, const vd *); + LVX_V2DF LVX_V2DF_VD + vd __builtin_vec_ld (signed long, const double *); + LVX_V2DF LVX_V2DF_D +; The following variants are deprecated. + vsi __builtin_vec_ld (signed long, const long *); + LVX_V4SI LVX_V4SI_SL + vui __builtin_vec_ld (signed long, const unsigned long *); + LVX_V4SI LVX_V4SI_UL + +[VEC_LDE, vec_lde, __builtin_vec_lde] + vsc __builtin_vec_lde (signed long, const signed char *); + LVEBX LVEBX_SC + vuc __builtin_vec_lde (signed long, const unsigned char *); + LVEBX LVEBX_UC + vss __builtin_vec_lde (signed long, const signed short *); + LVEHX LVEHX_SS + vus __builtin_vec_lde (signed long, const unsigned short *); + LVEHX LVEHX_US + vsi __builtin_vec_lde (signed long, const signed int *); + LVEWX LVEWX_SI + vui __builtin_vec_lde (signed long, const unsigned int *); + LVEWX LVEWX_UI + vf __builtin_vec_lde (signed long, const float *); + LVEWX LVEWX_F +; The following variants are deprecated. + vsi __builtin_vec_lde (signed long, const long *); + LVEWX LVEWX_SL + vui __builtin_vec_lde (signed long, const unsigned long *); + LVEWX LVEWX_UL + +[VEC_LDL, vec_ldl, __builtin_vec_ldl] + vsc __builtin_vec_ldl (signed long, const vsc *); + LVXL_V16QI LVXL_V16QI_VSC + vsc __builtin_vec_ldl (signed long, const signed char *); + LVXL_V16QI LVXL_V16QI_SC + vuc __builtin_vec_ldl (signed long, const vuc *); + LVXL_V16QI LVXL_V16QI_VUC + vuc __builtin_vec_ldl (signed long, const unsigned char *); + LVXL_V16QI LVXL_V16QI_UC + vbc __builtin_vec_ldl (signed long, const vbc *); + LVXL_V16QI LVXL_V16QI_VBC + vss __builtin_vec_ldl (signed long, const vss *); + LVXL_V8HI LVXL_V8HI_VSS + vss __builtin_vec_ldl (signed long, const signed short *); + LVXL_V8HI LVXL_V8HI_SS + vus __builtin_vec_ldl (signed long, const vus *); + LVXL_V8HI LVXL_V8HI_VUS + vus __builtin_vec_ldl (signed long, const unsigned short *); + LVXL_V8HI LVXL_V8HI_US + vbs __builtin_vec_ldl (signed long, const vbs *); + LVXL_V8HI LVXL_V8HI_VBS + vp __builtin_vec_ldl (signed long, const vp *); + LVXL_V8HI LVXL_V8HI_VP + vsi __builtin_vec_ldl (signed long, const vsi *); + LVXL_V4SI LVXL_V4SI_VSI + vsi __builtin_vec_ldl (signed long, const signed int *); + LVXL_V4SI LVXL_V4SI_SI + vui __builtin_vec_ldl (signed long, const vui *); + LVXL_V4SI LVXL_V4SI_VUI + vui __builtin_vec_ldl (signed long, const unsigned int *); + LVXL_V4SI LVXL_V4SI_UI + vbi __builtin_vec_ldl (signed long, const vbi *); + LVXL_V4SI LVXL_V4SI_VBI + vsll __builtin_vec_ldl (signed long, const vsll *); + LVXL_V2DI LVXL_V2DI_VSLL + vsll __builtin_vec_ldl (signed long, const signed long long *); + LVXL_V2DI LVXL_V2DI_SLL + vull __builtin_vec_ldl (signed long, const vull *); + LVXL_V2DI LVXL_V2DI_VULL + vull __builtin_vec_ldl (signed long, const unsigned long long *); + LVXL_V2DI LVXL_V2DI_ULL + vbll __builtin_vec_ldl (signed long, const vbll *); + LVXL_V2DI LVXL_V2DI_VBLL + vf __builtin_vec_ldl (signed long, const vf *); + LVXL_V4SF LVXL_V4SF_VF + vf __builtin_vec_ldl (signed long, const float *); + LVXL_V4SF LVXL_V4SF_F + vd __builtin_vec_ldl (signed long, const vd *); + LVXL_V2DF LVXL_V2DF_VD + vd __builtin_vec_ldl (signed long, const double *); + LVXL_V2DF LVXL_V2DF_D + +[VEC_LOGE, vec_loge, __builtin_vec_loge] + vf __builtin_vec_loge (vf); + VLOGEFP + +[VEC_LVLX, vec_lvlx, __builtin_vec_lvlx, __PPU__] + vbc __builtin_vec_lvlx (signed long, const vbc *); + LVLX LVLX_VBC + vsc __builtin_vec_lvlx (signed long, const vsc *); + LVLX LVLX_VSC + vsc __builtin_vec_lvlx (signed long, const signed char *); + LVLX LVLX_SC + vuc __builtin_vec_lvlx (signed long, const vuc *); + LVLX LVLX_VUC + vuc __builtin_vec_lvlx (signed long, const unsigned char *); + LVLX LVLX_UC + vbs __builtin_vec_lvlx (signed long, const vbs *); + LVLX LVLX_VBS + vss __builtin_vec_lvlx (signed long, const vss *); + LVLX LVLX_VSS + vss __builtin_vec_lvlx (signed long, const signed short *); + LVLX LVLX_SS + vus __builtin_vec_lvlx (signed long, const vus *); + LVLX LVLX_VUS + vus __builtin_vec_lvlx (signed long, const unsigned short *); + LVLX LVLX_US + vp __builtin_vec_lvlx (signed long, const vp *); + LVLX LVLX_VP + vbi __builtin_vec_lvlx (signed long, const vbi *); + LVLX LVLX_VBI + vsi __builtin_vec_lvlx (signed long, const vsi *); + LVLX LVLX_VSI + vsi __builtin_vec_lvlx (signed long, const signed int *); + LVLX LVLX_SI + vui __builtin_vec_lvlx (signed long, const vui *); + LVLX LVLX_VUI + vui __builtin_vec_lvlx (signed long, const unsigned int *); + LVLX LVLX_UI + vf __builtin_vec_lvlx (signed long, const vf *); + LVLX LVLX_VF + vf __builtin_vec_lvlx (signed long, const float *); + LVLX LVLX_F + +[VEC_LVLXL, vec_lvlxl, __builtin_vec_lvlxl, __PPU__] + vbc __builtin_vec_lvlxl (signed long, const vbc *); + LVLXL LVLXL_VBC + vsc __builtin_vec_lvlxl (signed long, const vsc *); + LVLXL LVLXL_VSC + vsc __builtin_vec_lvlxl (signed long, const signed char *); + LVLXL LVLXL_SC + vuc __builtin_vec_lvlxl (signed long, const vuc *); + LVLXL LVLXL_VUC + vuc __builtin_vec_lvlxl (signed long, const unsigned char *); + LVLXL LVLXL_UC + vbs __builtin_vec_lvlxl (signed long, const vbs *); + LVLXL LVLXL_VBS + vss __builtin_vec_lvlxl (signed long, const vss *); + LVLXL LVLXL_VSS + vss __builtin_vec_lvlxl (signed long, const signed short *); + LVLXL LVLXL_SS + vus __builtin_vec_lvlxl (signed long, const vus *); + LVLXL LVLXL_VUS + vus __builtin_vec_lvlxl (signed long, const unsigned short *); + LVLXL LVLXL_US + vp __builtin_vec_lvlxl (signed long, const vp *); + LVLXL LVLXL_VP + vbi __builtin_vec_lvlxl (signed long, const vbi *); + LVLXL LVLXL_VBI + vsi __builtin_vec_lvlxl (signed long, const vsi *); + LVLXL LVLXL_VSI + vsi __builtin_vec_lvlxl (signed long, const signed int *); + LVLXL LVLXL_SI + vui __builtin_vec_lvlxl (signed long, const vui *); + LVLXL LVLXL_VUI + vui __builtin_vec_lvlxl (signed long, const unsigned int *); + LVLXL LVLXL_UI + vf __builtin_vec_lvlxl (signed long, const vf *); + LVLXL LVLXL_VF + vf __builtin_vec_lvlxl (signed long, const float *); + LVLXL LVLXL_F + +[VEC_LVRX, vec_lvrx, __builtin_vec_lvrx, __PPU__] + vbc __builtin_vec_lvrx (signed long, const vbc *); + LVRX LVRX_VBC + vsc __builtin_vec_lvrx (signed long, const vsc *); + LVRX LVRX_VSC + vsc __builtin_vec_lvrx (signed long, const signed char *); + LVRX LVRX_SC + vuc __builtin_vec_lvrx (signed long, const vuc *); + LVRX LVRX_VUC + vuc __builtin_vec_lvrx (signed long, const unsigned char *); + LVRX LVRX_UC + vbs __builtin_vec_lvrx (signed long, const vbs *); + LVRX LVRX_VBS + vss __builtin_vec_lvrx (signed long, const vss *); + LVRX LVRX_VSS + vss __builtin_vec_lvrx (signed long, const signed short *); + LVRX LVRX_SS + vus __builtin_vec_lvrx (signed long, const vus *); + LVRX LVRX_VUS + vus __builtin_vec_lvrx (signed long, const unsigned short *); + LVRX LVRX_US + vp __builtin_vec_lvrx (signed long, const vp *); + LVRX LVRX_VP + vbi __builtin_vec_lvrx (signed long, const vbi *); + LVRX LVRX_VBI + vsi __builtin_vec_lvrx (signed long, const vsi *); + LVRX LVRX_VSI + vsi __builtin_vec_lvrx (signed long, const signed int *); + LVRX LVRX_SI + vui __builtin_vec_lvrx (signed long, const vui *); + LVRX LVRX_VUI + vui __builtin_vec_lvrx (signed long, const unsigned int *); + LVRX LVRX_UI + vf __builtin_vec_lvrx (signed long, const vf *); + LVRX LVRX_VF + vf __builtin_vec_lvrx (signed long, const float *); + LVRX LVRX_F + +[VEC_LVRXL, vec_lvrxl, __builtin_vec_lvrxl, __PPU__] + vbc __builtin_vec_lvrxl (signed long, const vbc *); + LVRXL LVRXL_VBC + vsc __builtin_vec_lvrxl (signed long, const vsc *); + LVRXL LVRXL_VSC + vsc __builtin_vec_lvrxl (signed long, const signed char *); + LVRXL LVRXL_SC + vuc __builtin_vec_lvrxl (signed long, const vuc *); + LVRXL LVRXL_VUC + vuc __builtin_vec_lvrxl (signed long, const unsigned char *); + LVRXL LVRXL_UC + vbs __builtin_vec_lvrxl (signed long, const vbs *); + LVRXL LVRXL_VBS + vss __builtin_vec_lvrxl (signed long, const vss *); + LVRXL LVRXL_VSS + vss __builtin_vec_lvrxl (signed long, const signed short *); + LVRXL LVRXL_SS + vus __builtin_vec_lvrxl (signed long, const vus *); + LVRXL LVRXL_VUS + vus __builtin_vec_lvrxl (signed long, const unsigned short *); + LVRXL LVRXL_US + vp __builtin_vec_lvrxl (signed long, const vp *); + LVRXL LVRXL_VP + vbi __builtin_vec_lvrxl (signed long, const vbi *); + LVRXL LVRXL_VBI + vsi __builtin_vec_lvrxl (signed long, const vsi *); + LVRXL LVRXL_VSI + vsi __builtin_vec_lvrxl (signed long, const signed int *); + LVRXL LVRXL_SI + vui __builtin_vec_lvrxl (signed long, const vui *); + LVRXL LVRXL_VUI + vui __builtin_vec_lvrxl (signed long, const unsigned int *); + LVRXL LVRXL_UI + vf __builtin_vec_lvrxl (signed long, const vf *); + LVRXL LVRXL_VF + vf __builtin_vec_lvrxl (signed long, const float *); + LVRXL LVRXL_F + +[VEC_LVSL, vec_lvsl, __builtin_vec_lvsl] + vuc __builtin_vec_lvsl (signed long, const unsigned char *); + LVSL LVSL_UC + vuc __builtin_vec_lvsl (signed long, const signed char *); + LVSL LVSL_SC + vuc __builtin_vec_lvsl (signed long, const char *); + LVSL LVSL_STR + vuc __builtin_vec_lvsl (signed long, const unsigned short *); + LVSL LVSL_US + vuc __builtin_vec_lvsl (signed long, const signed short *); + LVSL LVSL_SS + vuc __builtin_vec_lvsl (signed long, const unsigned int *); + LVSL LVSL_UI + vuc __builtin_vec_lvsl (signed long, const signed int *); + LVSL LVSL_SI + vuc __builtin_vec_lvsl (signed long, const unsigned long *); + LVSL LVSL_UL + vuc __builtin_vec_lvsl (signed long, const signed long *); + LVSL LVSL_SL + vuc __builtin_vec_lvsl (signed long, const unsigned long long *); + LVSL LVSL_ULL + vuc __builtin_vec_lvsl (signed long, const signed long long *); + LVSL LVSL_SLL + vuc __builtin_vec_lvsl (signed long, const float *); + LVSL LVSL_F + vuc __builtin_vec_lvsl (signed long, const double *); + LVSL LVSL_D + +[VEC_LVSR, vec_lvsr, __builtin_vec_lvsr] + vuc __builtin_vec_lvsr (signed long, const unsigned char *); + LVSR LVSR_UC + vuc __builtin_vec_lvsr (signed long, const signed char *); + LVSR LVSR_SC + vuc __builtin_vec_lvsr (signed long, const char *); + LVSR LVSR_STR + vuc __builtin_vec_lvsr (signed long, const unsigned short *); + LVSR LVSR_US + vuc __builtin_vec_lvsr (signed long, const signed short *); + LVSR LVSR_SS + vuc __builtin_vec_lvsr (signed long, const unsigned int *); + LVSR LVSR_UI + vuc __builtin_vec_lvsr (signed long, const signed int *); + LVSR LVSR_SI + vuc __builtin_vec_lvsr (signed long, const unsigned long *); + LVSR LVSR_UL + vuc __builtin_vec_lvsr (signed long, const signed long *); + LVSR LVSR_SL + vuc __builtin_vec_lvsr (signed long, const unsigned long long *); + LVSR LVSR_ULL + vuc __builtin_vec_lvsr (signed long, const signed long long *); + LVSR LVSR_SLL + vuc __builtin_vec_lvsr (signed long, const float *); + LVSR LVSR_F + vuc __builtin_vec_lvsr (signed long, const double *); + LVSR LVSR_D + +[VEC_LXVL, vec_xl_len, __builtin_vec_lxvl, _ARCH_PPC64_PWR9] + vsc __builtin_vec_lxvl (const signed char *, unsigned int); + LXVL LXVL_VSC + vuc __builtin_vec_lxvl (const unsigned char *, unsigned int); + LXVL LXVL_VUC + vss __builtin_vec_lxvl (const signed short *, unsigned int); + LXVL LXVL_VSS + vus __builtin_vec_lxvl (const unsigned short *, unsigned int); + LXVL LXVL_VUS + vsi __builtin_vec_lxvl (const signed int *, unsigned int); + LXVL LXVL_VSI + vui __builtin_vec_lxvl (const unsigned int *, unsigned int); + LXVL LXVL_VUI + vsll __builtin_vec_lxvl (const signed long long *, unsigned int); + LXVL LXVL_VSLL + vull __builtin_vec_lxvl (const unsigned long long *, unsigned int); + LXVL LXVL_VULL + vsq __builtin_vec_lxvl (const signed __int128 *, unsigned int); + LXVL LXVL_VSQ + vuq __builtin_vec_lxvl (const unsigned __int128 *, unsigned int); + LXVL LXVL_VUQ + vf __builtin_vec_lxvl (const float *, unsigned int); + LXVL LXVL_VF + vd __builtin_vec_lxvl (const double *, unsigned int); + LXVL LXVL_VD + +; #### XVMADDSP(TARGET_VSX);VMADDFP +[VEC_MADD, vec_madd, __builtin_vec_madd] + vss __builtin_vec_madd (vss, vss, vss); + VMLADDUHM VMLADDUHM_VSS + vss __builtin_vec_madd (vss, vus, vus); + VMLADDUHM VMLADDUHM_VSSVUS + vss __builtin_vec_madd (vus, vss, vss); + VMLADDUHM VMLADDUHM_VUSVSS + vus __builtin_vec_madd (vus, vus, vus); + VMLADDUHM VMLADDUHM_VUS + vf __builtin_vec_madd (vf, vf, vf); + VMADDFP + vd __builtin_vec_madd (vd, vd, vd); + XVMADDDP + +[VEC_MADDS, vec_madds, __builtin_vec_madds] + vss __builtin_vec_madds (vss, vss, vss); + VMHADDSHS + +; #### XVMAXSP{TARGET_VSX};VMAXFP +[VEC_MAX, vec_max, __builtin_vec_max] + vsc __builtin_vec_max (vsc, vsc); + VMAXSB + vuc __builtin_vec_max (vuc, vuc); + VMAXUB + vss __builtin_vec_max (vss, vss); + VMAXSH + vus __builtin_vec_max (vus, vus); + VMAXUH + vsi __builtin_vec_max (vsi, vsi); + VMAXSW + vui __builtin_vec_max (vui, vui); + VMAXUW + vsll __builtin_vec_max (vsll, vsll); + VMAXSD + vull __builtin_vec_max (vull, vull); + VMAXUD + vf __builtin_vec_max (vf, vf); + VMAXFP + vd __builtin_vec_max (vd, vd); + XVMAXDP +; The following variants are deprecated. + vsc __builtin_vec_max (vsc, vbc); + VMAXSB VMAXSB_SB + vsc __builtin_vec_max (vbc, vsc); + VMAXSB VMAXSB_BS + vuc __builtin_vec_max (vuc, vbc); + VMAXUB VMAXUB_UB + vuc __builtin_vec_max (vbc, vuc); + VMAXUB VMAXUB_BU + vss __builtin_vec_max (vss, vbs); + VMAXSH VMAXSH_SB + vss __builtin_vec_max (vbs, vss); + VMAXSH VMAXSH_BS + vus __builtin_vec_max (vus, vbs); + VMAXUH VMAXUH_UB + vus __builtin_vec_max (vbs, vus); + VMAXUH VMAXUH_BU + vsi __builtin_vec_max (vsi, vbi); + VMAXSW VMAXSW_SB + vsi __builtin_vec_max (vbi, vsi); + VMAXSW VMAXSW_BS + vui __builtin_vec_max (vui, vbi); + VMAXUW VMAXUW_UB + vui __builtin_vec_max (vbi, vui); + VMAXUW VMAXUW_BU + vsll __builtin_vec_max (vsll, vbll); + VMAXSD VMAXSD_SB + vsll __builtin_vec_max (vbll, vsll); + VMAXSD VMAXSD_BS + vull __builtin_vec_max (vull, vbll); + VMAXUD VMAXUD_UB + vull __builtin_vec_max (vbll, vull); + VMAXUD VMAXUD_BU + +[VEC_MERGEE, vec_mergee, __builtin_vec_vmrgew, _ARCH_PWR8] + vsi __builtin_vec_vmrgew (vsi, vsi); + VMRGEW_V4SI VMRGEW_VSI + vui __builtin_vec_vmrgew (vui, vui); + VMRGEW_V4SI VMRGEW_VUI + vbi __builtin_vec_vmrgew (vbi, vbi); + VMRGEW_V4SI VMRGEW_VBI + vsll __builtin_vec_vmrgew (vsll, vsll); + VMRGEW_V2DI VMRGEW_VSLL + vull __builtin_vec_vmrgew (vull, vull); + VMRGEW_V2DI VMRGEW_VULL + vbll __builtin_vec_vmrgew (vbll, vbll); + VMRGEW_V2DI VMRGEW_VBLL + vf __builtin_vec_vmrgew (vf, vf); + VMRGEW_V4SF + vd __builtin_vec_vmrgew (vd, vd); + VMRGEW_V2DF + +[VEC_MERGEH, vec_mergeh, __builtin_vec_mergeh] + vbc __builtin_vec_mergeh (vbc, vbc); + VMRGHB VMRGHB_VBC + vsc __builtin_vec_mergeh (vsc, vsc); + VMRGHB VMRGHB_VSC + vuc __builtin_vec_mergeh (vuc, vuc); + VMRGHB VMRGHB_VUC + vbs __builtin_vec_mergeh (vbs, vbs); + VMRGHH VMRGHH_VBS + vss __builtin_vec_mergeh (vss, vss); + VMRGHH VMRGHH_VSS + vus __builtin_vec_mergeh (vus, vus); + VMRGHH VMRGHH_VUS + vp __builtin_vec_mergeh (vp, vp); + VMRGHH VMRGHH_VP + vbi __builtin_vec_mergeh (vbi, vbi); + VMRGHW VMRGHW_VBI + vsi __builtin_vec_mergeh (vsi, vsi); + VMRGHW VMRGHW_VSI + vui __builtin_vec_mergeh (vui, vui); + VMRGHW VMRGHW_VUI + vbll __builtin_vec_mergeh (vbll, vbll); + VEC_MERGEH_V2DI VEC_MERGEH_VBLL + vsll __builtin_vec_mergeh (vsll, vsll); + VEC_MERGEH_V2DI VEC_MERGEH_VSLL + vull __builtin_vec_mergeh (vull, vull); + VEC_MERGEH_V2DI VEC_MERGEH_VULL + vf __builtin_vec_mergeh (vf, vf); + VMRGHW VMRGHW_VF + vd __builtin_vec_mergeh (vd, vd); + VEC_MERGEH_V2DF +; The following variants are deprecated. + vsll __builtin_vec_mergeh (vsll, vbll); + VEC_MERGEH_V2DI VEC_MERGEH_VSLL_VBLL + vsll __builtin_vec_mergeh (vbll, vsll); + VEC_MERGEH_V2DI VEC_MERGEH_VBLL_VSLL + vull __builtin_vec_mergeh (vull, vbll); + VEC_MERGEH_V2DI VEC_MERGEH_VULL_VBLL + vull __builtin_vec_mergeh (vbll, vull); + VEC_MERGEH_V2DI VEC_MERGEH_VBLL_VULL + +[VEC_MERGEL, vec_mergel, __builtin_vec_mergel] + vbc __builtin_vec_mergel (vbc, vbc); + VMRGLB VMRGLB_VBC + vsc __builtin_vec_mergel (vsc, vsc); + VMRGLB VMRGLB_VSC + vuc __builtin_vec_mergel (vuc, vuc); + VMRGLB VMRGLB_VUC + vbs __builtin_vec_mergel (vbs, vbs); + VMRGLH VMRGLH_VBS + vss __builtin_vec_mergel (vss, vss); + VMRGLH VMRGLH_VSS + vus __builtin_vec_mergel (vus, vus); + VMRGLH VMRGLH_VUS + vp __builtin_vec_mergel (vp, vp); + VMRGLH VMRGLH_VP + vbi __builtin_vec_mergel (vbi, vbi); + VMRGLW VMRGLW_VBI + vsi __builtin_vec_mergel (vsi, vsi); + VMRGLW VMRGLW_VSI + vui __builtin_vec_mergel (vui, vui); + VMRGLW VMRGLW_VUI + vbll __builtin_vec_mergel (vbll, vbll); + VEC_MERGEL_V2DI VEC_MERGEL_VBLL + vsll __builtin_vec_mergel (vsll, vsll); + VEC_MERGEL_V2DI VEC_MERGEL_VSLL + vull __builtin_vec_mergel (vull, vull); + VEC_MERGEL_V2DI VEC_MERGEL_VULL + vf __builtin_vec_mergel (vf, vf); + VMRGLW VMRGLW_VF + vd __builtin_vec_mergel (vd, vd); + VEC_MERGEL_V2DF +; The following variants are deprecated. + vsll __builtin_vec_mergel (vsll, vbll); + VEC_MERGEL_V2DI VEC_MERGEL_VSLL_VBLL + vsll __builtin_vec_mergel (vbll, vsll); + VEC_MERGEL_V2DI VEC_MERGEL_VBLL_VSLL + vull __builtin_vec_mergel (vull, vbll); + VEC_MERGEL_V2DI VEC_MERGEL_VULL_VBLL + vull __builtin_vec_mergel (vbll, vull); + VEC_MERGEL_V2DI VEC_MERGEL_VBLL_VULL + +[VEC_MERGEO, vec_mergeo, __builtin_vec_vmrgow, _ARCH_PWR8] + vsi __builtin_vec_vmrgow (vsi, vsi); + VMRGOW_V4SI VMRGOW_VSI + vui __builtin_vec_vmrgow (vui, vui); + VMRGOW_V4SI VMRGOW_VUI + vbi __builtin_vec_vmrgow (vbi, vbi); + VMRGOW_V4SI VMRGOW_VBI + vsll __builtin_vec_vmrgow (vsll, vsll); + VMRGOW_V2DI VMRGOW_VSLL + vull __builtin_vec_vmrgow (vull, vull); + VMRGOW_V2DI VMRGOW_VULL + vbll __builtin_vec_vmrgow (vbll, vbll); + VMRGOW_V2DI VMRGOW_VBLL + vf __builtin_vec_vmrgow (vf, vf); + VMRGOW_V4SF + vd __builtin_vec_vmrgow (vd, vd); + VMRGOW_V2DF + +[VEC_MFVSCR, vec_mfvscr, __builtin_vec_mfvscr] + vus __builtin_vec_mfvscr (); + MFVSCR + +; #### XVMINSP{TARGET_VSX};VMINFP +[VEC_MIN, vec_min, __builtin_vec_min] + vsc __builtin_vec_min (vsc, vsc); + VMINSB + vuc __builtin_vec_min (vuc, vuc); + VMINUB + vss __builtin_vec_min (vss, vss); + VMINSH + vus __builtin_vec_min (vus, vus); + VMINUH + vsi __builtin_vec_min (vsi, vsi); + VMINSW + vui __builtin_vec_min (vui, vui); + VMINUW + vsll __builtin_vec_min (vsll, vsll); + VMINSD + vull __builtin_vec_min (vull, vull); + VMINUD + vf __builtin_vec_min (vf, vf); + VMINFP + vd __builtin_vec_min (vd, vd); + XVMINDP +; The following variants are deprecated. + vsc __builtin_vec_min (vsc, vbc); + VMINSB VMINSB_SB + vsc __builtin_vec_min (vbc, vsc); + VMINSB VMINSB_BS + vuc __builtin_vec_min (vuc, vbc); + VMINUB VMINUB_UB + vuc __builtin_vec_min (vbc, vuc); + VMINUB VMINUB_BU + vss __builtin_vec_min (vss, vbs); + VMINSH VMINSH_SB + vss __builtin_vec_min (vbs, vss); + VMINSH VMINSH_BS + vus __builtin_vec_min (vus, vbs); + VMINUH VMINUH_UB + vus __builtin_vec_min (vbs, vus); + VMINUH VMINUH_BU + vsi __builtin_vec_min (vsi, vbi); + VMINSW VMINSW_SB + vsi __builtin_vec_min (vbi, vsi); + VMINSW VMINSW_BS + vui __builtin_vec_min (vui, vbi); + VMINUW VMINUW_UB + vui __builtin_vec_min (vbi, vui); + VMINUW VMINUW_BU + vsll __builtin_vec_min (vsll, vbll); + VMINSD VMINSD_SB + vsll __builtin_vec_min (vbll, vsll); + VMINSD VMINSD_BS + vull __builtin_vec_min (vull, vbll); + VMINUD VMINUD_UB + vull __builtin_vec_min (vbll, vull); + VMINUD VMINUD_BU + +[VEC_MLADD, vec_mladd, __builtin_vec_mladd] + vss __builtin_vec_mladd (vss, vss, vss); + VMLADDUHM VMLADDUHM_VSS2 + vss __builtin_vec_mladd (vss, vus, vus); + VMLADDUHM VMLADDUHM_VSSVUS2 + vss __builtin_vec_mladd (vus, vss, vss); + VMLADDUHM VMLADDUHM_VUSVSS2 + vus __builtin_vec_mladd (vus, vus, vus); + VMLADDUHM VMLADDUHM_VUS2 + +[VEC_MOD, vec_mod, __builtin_vec_mod, _ARCH_PWR10] + vsi __builtin_vec_mod (vsi, vsi); + VMODSW + vui __builtin_vec_mod (vui, vui); + VMODUW + vsll __builtin_vec_mod (vsll, vsll); + VMODSD + vull __builtin_vec_mod (vull, vull); + VMODUD + vsq __builtin_vec_mod (vsq, vsq); + MODS_V1TI + vuq __builtin_vec_mod (vuq, vuq); + MODU_V1TI + +[VEC_MRADDS, vec_mradds, __builtin_vec_mradds] + vss __builtin_vec_mradds (vss, vss, vss); + VMHRADDSHS + +[VEC_MSUB, vec_msub, __builtin_vec_msub, __VSX__] + vf __builtin_vec_msub (vf, vf, vf); + XVMSUBSP + vd __builtin_vec_msub (vd, vd, vd); + XVMSUBDP + +[VEC_MSUM, vec_msum, __builtin_vec_msum] + vui __builtin_vec_msum (vuc, vuc, vui); + VMSUMUBM + vsi __builtin_vec_msum (vsc, vuc, vsi); + VMSUMMBM + vui __builtin_vec_msum (vus, vus, vui); + VMSUMUHM + vsi __builtin_vec_msum (vss, vss, vsi); + VMSUMSHM + vsq __builtin_vec_msum (vsll, vsll, vsq); + VMSUMUDM VMSUMUDM_S + vuq __builtin_vec_msum (vull, vull, vuq); + VMSUMUDM VMSUMUDM_U + +[VEC_MSUMS, vec_msums, __builtin_vec_msums] + vui __builtin_vec_msums (vus, vus, vui); + VMSUMUHS + vsi __builtin_vec_msums (vss, vss, vsi); + VMSUMSHS + +[VEC_MTVSCR, vec_mtvscr, __builtin_vec_mtvscr] + void __builtin_vec_mtvscr (vbc); + MTVSCR MTVSCR_VBC + void __builtin_vec_mtvscr (vsc); + MTVSCR MTVSCR_VSC + void __builtin_vec_mtvscr (vuc); + MTVSCR MTVSCR_VUC + void __builtin_vec_mtvscr (vbs); + MTVSCR MTVSCR_VBS + void __builtin_vec_mtvscr (vss); + MTVSCR MTVSCR_VSS + void __builtin_vec_mtvscr (vus); + MTVSCR MTVSCR_VUS + void __builtin_vec_mtvscr (vp); + MTVSCR MTVSCR_VP + void __builtin_vec_mtvscr (vbi); + MTVSCR MTVSCR_VBI + void __builtin_vec_mtvscr (vsi); + MTVSCR MTVSCR_VSI + void __builtin_vec_mtvscr (vui); + MTVSCR MTVSCR_VUI + +; Note that the entries for VEC_MUL are currently ignored. See rs6000-c.c: +; altivec_resolve_overloaded_builtin, where there is special-case code for +; VEC_MUL. TODO: Is this really necessary? Investigate. Seven missing +; prototypes here...no corresponding builtins. Also added "vmulld" in P10 +; which could be used instead of MUL_V2DI, conditionally? +[VEC_MUL, vec_mul, __builtin_vec_mul] + vsll __builtin_vec_mul (vsll, vsll); + MUL_V2DI + vf __builtin_vec_mul (vf, vf); + XVMULSP + vd __builtin_vec_mul (vd, vd); + XVMULDP + +[VEC_MULE, vec_mule, __builtin_vec_mule] + vss __builtin_vec_mule (vsc, vsc); + VMULESB + vus __builtin_vec_mule (vuc, vuc); + VMULEUB + vsi __builtin_vec_mule (vss, vss); + VMULESH + vui __builtin_vec_mule (vus, vus); + VMULEUH + vsll __builtin_vec_mule (vsi, vsi); + VMULESW + vull __builtin_vec_mule (vui, vui); + VMULEUW + vsq __builtin_vec_mule (vsll, vsll); + VMULESD + vuq __builtin_vec_mule (vull, vull); + VMULEUD + +[VEC_MULH, vec_mulh, __builtin_vec_mulh, _ARCH_PWR10] + vsi __builtin_vec_mulh (vsi, vsi); + VMULHSW + vui __builtin_vec_mulh (vui, vui); + VMULHUW + vsll __builtin_vec_mulh (vsll, vsll); + VMULHSD + vull __builtin_vec_mulh (vull, vull); + VMULHUD + +[VEC_MULO, vec_mulo, __builtin_vec_mulo] + vss __builtin_vec_mulo (vsc, vsc); + VMULOSB + vus __builtin_vec_mulo (vuc, vuc); + VMULOUB + vsi __builtin_vec_mulo (vss, vss); + VMULOSH + vui __builtin_vec_mulo (vus, vus); + VMULOUH + vsll __builtin_vec_mulo (vsi, vsi); + VMULOSW + vull __builtin_vec_mulo (vui, vui); + VMULOUW + vsq __builtin_vec_mulo (vsll, vsll); + VMULOSD + vuq __builtin_vec_mulo (vull, vull); + VMULOUD + +[VEC_NABS, vec_nabs, __builtin_vec_nabs] + vsc __builtin_vec_nabs (vsc); + NABS_V16QI + vss __builtin_vec_nabs (vss); + NABS_V8HI + vsi __builtin_vec_nabs (vsi); + NABS_V4SI + vsll __builtin_vec_nabs (vsll); + NABS_V2DI + vf __builtin_vec_nabs (vf); + NABS_V4SF + vd __builtin_vec_nabs (vd); + NABS_V2DF + +[VEC_NAND, vec_nand, __builtin_vec_nand, _ARCH_PWR8] + vsc __builtin_vec_nand (vsc, vsc); + NAND_V16QI + vuc __builtin_vec_nand (vuc, vuc); + NAND_V16QI_UNS NAND_VUC + vbc __builtin_vec_nand (vbc, vbc); + NAND_V16QI_UNS NAND_VBC + vss __builtin_vec_nand (vss, vss); + NAND_V8HI + vus __builtin_vec_nand (vus, vus); + NAND_V8HI_UNS NAND_VUS + vbs __builtin_vec_nand (vbs, vbs); + NAND_V8HI_UNS NAND_VBS + vsi __builtin_vec_nand (vsi, vsi); + NAND_V4SI + vui __builtin_vec_nand (vui, vui); + NAND_V4SI_UNS NAND_VUI + vbi __builtin_vec_nand (vbi, vbi); + NAND_V4SI_UNS NAND_VBI + vsll __builtin_vec_nand (vsll, vsll); + NAND_V2DI + vull __builtin_vec_nand (vull, vull); + NAND_V2DI_UNS NAND_VULL + vbll __builtin_vec_nand (vbll, vbll); + NAND_V2DI_UNS NAND_VBLL + vf __builtin_vec_nand (vf, vf); + NAND_V4SF + vd __builtin_vec_nand (vd, vd); + NAND_V2DF +; The following variants are deprecated. + vsc __builtin_vec_nand (vbc, vsc); + NAND_V16QI NAND_VBC_VSC + vsc __builtin_vec_nand (vsc, vbc); + NAND_V16QI NAND_VSC_VBC + vuc __builtin_vec_nand (vbc, vuc); + NAND_V16QI_UNS NAND_VBC_VUC + vuc __builtin_vec_nand (vuc, vbc); + NAND_V16QI_UNS NAND_VUC_VBC + vss __builtin_vec_nand (vbs, vss); + NAND_V8HI NAND_VBS_VSS + vss __builtin_vec_nand (vss, vbs); + NAND_V8HI NAND_VSS_VBS + vus __builtin_vec_nand (vbs, vus); + NAND_V8HI_UNS NAND_VBS_VUS + vus __builtin_vec_nand (vus, vbs); + NAND_V8HI_UNS NAND_VUS_VBS + vsi __builtin_vec_nand (vbi, vsi); + NAND_V4SI NAND_VBI_VSI + vsi __builtin_vec_nand (vsi, vbi); + NAND_V4SI NAND_VSI_VBI + vui __builtin_vec_nand (vbi, vui); + NAND_V4SI_UNS NAND_VBI_VUI + vui __builtin_vec_nand (vui, vbi); + NAND_V4SI_UNS NAND_VUI_VBI + vsll __builtin_vec_nand (vbll, vsll); + NAND_V2DI NAND_VBLL_VSLL + vsll __builtin_vec_nand (vsll, vbll); + NAND_V2DI NAND_VSLL_VBLL + vull __builtin_vec_nand (vbll, vull); + NAND_V2DI_UNS NAND_VBLL_VULL + vull __builtin_vec_nand (vull, vbll); + NAND_V2DI_UNS NAND_VULL_VBLL + +[VEC_NCIPHER_BE, vec_ncipher_be, __builtin_vec_vncipher_be, _ARCH_PWR8] + vuc __builtin_vec_vncipher_be (vuc, vuc); + VNCIPHER_BE + +[VEC_NCIPHERLAST_BE, vec_ncipherlast_be, __builtin_vec_vncipherlast_be, _ARCH_PWR8] + vuc __builtin_vec_vncipherlast_be (vuc, vuc); + VNCIPHERLAST_BE + +[VEC_NEARBYINT, vec_nearbyint, __builtin_vec_nearbyint, __VSX__] + vf __builtin_vec_nearbyint (vf); + XVRSPI XVRSPI_NBI + vd __builtin_vec_nearbyint (vd); + XVRDPI XVRDPI_NBI + +[VEC_NEG, vec_neg, __builtin_vec_neg] + vsc __builtin_vec_neg (vsc); + NEG_V16QI + vss __builtin_vec_neg (vss); + NEG_V8HI + vsi __builtin_vec_neg (vsi); + NEG_V4SI + vsll __builtin_vec_neg (vsll); + NEG_V2DI + vf __builtin_vec_neg (vf); + NEG_V4SF + vd __builtin_vec_neg (vd); + NEG_V2DF + +[VEC_NMADD, vec_nmadd, __builtin_vec_nmadd, __VSX__] + vf __builtin_vec_nmadd (vf, vf, vf); + XVNMADDSP + vd __builtin_vec_nmadd (vd, vd, vd); + XVNMADDDP + +; #### XVNMSUBDP{TARGET_VSX};VNMSUBFP +[VEC_NMSUB, vec_nmsub, __builtin_vec_nmsub] + vf __builtin_vec_nmsub (vf, vf, vf); + VNMSUBFP + vd __builtin_vec_nmsub (vd, vd, vd); + XVNMSUBDP + +[VEC_NOR, vec_nor, __builtin_vec_nor] + vsc __builtin_vec_nor (vsc, vsc); + VNOR_V16QI + vuc __builtin_vec_nor (vuc, vuc); + VNOR_V16QI_UNS VNOR_V16QI_U + vbc __builtin_vec_nor (vbc, vbc); + VNOR_V16QI_UNS VNOR_V16QI_B + vss __builtin_vec_nor (vss, vss); + VNOR_V8HI + vus __builtin_vec_nor (vus, vus); + VNOR_V8HI_UNS VNOR_V8HI_U + vbs __builtin_vec_nor (vbs, vbs); + VNOR_V8HI_UNS VNOR_V8HI_B + vsi __builtin_vec_nor (vsi, vsi); + VNOR_V4SI + vui __builtin_vec_nor (vui, vui); + VNOR_V4SI_UNS VNOR_V4SI_U + vbi __builtin_vec_nor (vbi, vbi); + VNOR_V4SI_UNS VNOR_V4SI_B + vsll __builtin_vec_nor (vsll, vsll); + VNOR_V2DI + vull __builtin_vec_nor (vull, vull); + VNOR_V2DI_UNS VNOR_V2DI_U + vbll __builtin_vec_nor (vbll, vbll); + VNOR_V2DI_UNS VNOR_V2DI_B + vsq __builtin_vec_nor (vsq, vsq); + VNOR_V1TI VNOR_V1TI_S + vuq __builtin_vec_nor (vuq, vuq); + VNOR_V1TI_UNS VNOR_V1TI_U + vf __builtin_vec_nor (vf, vf); + VNOR_V4SF + vd __builtin_vec_nor (vd, vd); + VNOR_V2DF +; The following variants are deprecated. + vsll __builtin_vec_nor (vsll, vbll); + VNOR_V2DI VNOR_VSLL_VBLL + vsll __builtin_vec_nor (vbll, vsll); + VNOR_V2DI VNOR_VBLL_VSLL + vull __builtin_vec_nor (vull, vbll); + VNOR_V2DI_UNS VNOR_VULL_VBLL + vull __builtin_vec_nor (vbll, vull); + VNOR_V2DI_UNS VNOR_VBLL_VULL + vsq __builtin_vec_nor (vsq, vbq); + VNOR_V1TI VNOR_VSQ_VBQ + vsq __builtin_vec_nor (vbq, vsq); + VNOR_V1TI VNOR_VBQ_VSQ + vuq __builtin_vec_nor (vuq, vbq); + VNOR_V1TI_UNS VNOR_VUQ_VBQ + vuq __builtin_vec_nor (vbq, vuq); + VNOR_V1TI_UNS VNOR_VBQ_VUQ + +[VEC_OR, vec_or, __builtin_vec_or] + vsc __builtin_vec_or (vsc, vsc); + VOR_V16QI + vuc __builtin_vec_or (vuc, vuc); + VOR_V16QI_UNS VOR_V16QI_U + vbc __builtin_vec_or (vbc, vbc); + VOR_V16QI_UNS VOR_V16QI_B + vss __builtin_vec_or (vss, vss); + VOR_V8HI + vus __builtin_vec_or (vus, vus); + VOR_V8HI_UNS VOR_V8HI_U + vbs __builtin_vec_or (vbs, vbs); + VOR_V8HI_UNS VOR_V8HI_B + vsi __builtin_vec_or (vsi, vsi); + VOR_V4SI + vui __builtin_vec_or (vui, vui); + VOR_V4SI_UNS VOR_V4SI_U + vbi __builtin_vec_or (vbi, vbi); + VOR_V4SI_UNS VOR_V4SI_B + vsll __builtin_vec_or (vsll, vsll); + VOR_V2DI + vull __builtin_vec_or (vull, vull); + VOR_V2DI_UNS VOR_V2DI_U + vbll __builtin_vec_or (vbll, vbll); + VOR_V2DI_UNS VOR_V2DI_B + vf __builtin_vec_or (vf, vf); + VOR_V4SF + vd __builtin_vec_or (vd, vd); + VOR_V2DF +; The following variants are deprecated. + vsc __builtin_vec_or (vsc, vbc); + VOR_V16QI VOR_VSC_VBC + vsc __builtin_vec_or (vbc, vsc); + VOR_V16QI VOR_VBC_VSC + vuc __builtin_vec_or (vuc, vbc); + VOR_V16QI_UNS VOR_V16QI_UB + vuc __builtin_vec_or (vbc, vuc); + VOR_V16QI_UNS VOR_V16QI_BU + vss __builtin_vec_or (vss, vbs); + VOR_V8HI VOR_VSS_VBS + vss __builtin_vec_or (vbs, vss); + VOR_V8HI VOR_VBS_VSS + vus __builtin_vec_or (vus, vbs); + VOR_V8HI_UNS VOR_V8HI_UB + vus __builtin_vec_or (vbs, vus); + VOR_V8HI_UNS VOR_V8HI_BU + vsi __builtin_vec_or (vsi, vbi); + VOR_V4SI VOR_VSI_VBI + vsi __builtin_vec_or (vbi, vsi); + VOR_V4SI VOR_VBI_VSI + vui __builtin_vec_or (vui, vbi); + VOR_V4SI_UNS VOR_V4SI_UB + vui __builtin_vec_or (vbi, vui); + VOR_V4SI_UNS VOR_V4SI_BU + vsll __builtin_vec_or (vsll, vbll); + VOR_V2DI VOR_VSLL_VBLL + vsll __builtin_vec_or (vbll, vsll); + VOR_V2DI VOR_VBLL_VSLL + vull __builtin_vec_or (vull, vbll); + VOR_V2DI_UNS VOR_V2DI_UB + vull __builtin_vec_or (vbll, vull); + VOR_V2DI_UNS VOR_V2DI_BU + vf __builtin_vec_or (vf, vbi); + VOR_V4SF VOR_VF_VBI + vf __builtin_vec_or (vbi, vf); + VOR_V4SF VOR_VBI_VF + vd __builtin_vec_or (vd, vbll); + VOR_V2DF VOR_VD_VBLL + vd __builtin_vec_or (vbll, vd); + VOR_V2DF VOR_VBLL_VD + +[VEC_ORC, vec_orc, __builtin_vec_orc, _ARCH_PWR8] + vsc __builtin_vec_orc (vsc, vsc); + ORC_V16QI + vuc __builtin_vec_orc (vuc, vuc); + ORC_V16QI_UNS ORC_VUC + vbc __builtin_vec_orc (vbc, vbc); + ORC_V16QI_UNS ORC_VBC + vss __builtin_vec_orc (vss, vss); + ORC_V8HI + vus __builtin_vec_orc (vus, vus); + ORC_V8HI_UNS ORC_VUS + vbs __builtin_vec_orc (vbs, vbs); + ORC_V8HI_UNS ORC_VBS + vsi __builtin_vec_orc (vsi, vsi); + ORC_V4SI + vui __builtin_vec_orc (vui, vui); + ORC_V4SI_UNS ORC_VUI + vbi __builtin_vec_orc (vbi, vbi); + ORC_V4SI_UNS ORC_VBI + vsll __builtin_vec_orc (vsll, vsll); + ORC_V2DI + vull __builtin_vec_orc (vull, vull); + ORC_V2DI_UNS ORC_VULL + vbll __builtin_vec_orc (vbll, vbll); + ORC_V2DI_UNS ORC_VBLL + vf __builtin_vec_orc (vf, vf); + ORC_V4SF + vd __builtin_vec_orc (vd, vd); + ORC_V2DF +; The following variants are deprecated. + vsc __builtin_vec_orc (vbc, vsc); + ORC_V16QI ORC_VBC_VSC + vsc __builtin_vec_orc (vsc, vbc); + ORC_V16QI ORC_VSC_VBC + vuc __builtin_vec_orc (vbc, vuc); + ORC_V16QI_UNS ORC_VBC_VUC + vuc __builtin_vec_orc (vuc, vbc); + ORC_V16QI_UNS ORC_VUC_VBC + vss __builtin_vec_orc (vbs, vss); + ORC_V8HI ORC_VBS_VSS + vss __builtin_vec_orc (vss, vbs); + ORC_V8HI ORC_VSS_VBS + vus __builtin_vec_orc (vbs, vus); + ORC_V8HI_UNS ORC_VBS_VUS + vus __builtin_vec_orc (vus, vbs); + ORC_V8HI_UNS ORC_VUS_VBS + vsi __builtin_vec_orc (vbi, vsi); + ORC_V4SI ORC_VBI_VSI + vsi __builtin_vec_orc (vsi, vbi); + ORC_V4SI ORC_VSI_VBI + vui __builtin_vec_orc (vbi, vui); + ORC_V4SI_UNS ORC_VBI_VUI + vui __builtin_vec_orc (vui, vbi); + ORC_V4SI_UNS ORC_VUI_VBI + vsll __builtin_vec_orc (vbll, vsll); + ORC_V2DI ORC_VBLL_VSLL + vsll __builtin_vec_orc (vsll, vbll); + ORC_V2DI ORC_VSLL_VBLL + vull __builtin_vec_orc (vbll, vull); + ORC_V2DI_UNS ORC_VBLL_VULL + vull __builtin_vec_orc (vull, vbll); + ORC_V2DI_UNS ORC_VULL_VBLL + +[VEC_PACK, vec_pack, __builtin_vec_pack] + vsc __builtin_vec_pack (vss, vss); + VPKUHUM VPKUHUM_VSS + vuc __builtin_vec_pack (vus, vus); + VPKUHUM VPKUHUM_VUS + vbc __builtin_vec_pack (vbs, vbs); + VPKUHUM VPKUHUM_VBS + vss __builtin_vec_pack (vsi, vsi); + VPKUWUM VPKUWUM_VSI + vus __builtin_vec_pack (vui, vui); + VPKUWUM VPKUWUM_VUI + vbs __builtin_vec_pack (vbi, vbi); + VPKUWUM VPKUWUM_VBI + vsi __builtin_vec_pack (vsll, vsll); + VPKUDUM VPKUDUM_VSLL + vui __builtin_vec_pack (vull, vull); + VPKUDUM VPKUDUM_VULL + vbi __builtin_vec_pack (vbll, vbll); + VPKUDUM VPKUDUM_VBLL + vf __builtin_vec_pack (vd, vd); + FLOAT2_V2DF FLOAT2_V2DF_PACK + +[VEC_PACKPX, vec_packpx, __builtin_vec_packpx] + vp __builtin_vec_packpx (vui, vui); + VPKPX + +[VEC_PACKS, vec_packs, __builtin_vec_packs] + vuc __builtin_vec_packs (vus, vus); + VPKUHUS VPKUHUS_S + vsc __builtin_vec_packs (vss, vss); + VPKSHSS + vus __builtin_vec_packs (vui, vui); + VPKUWUS VPKUWUS_S + vss __builtin_vec_packs (vsi, vsi); + VPKSWSS + vui __builtin_vec_packs (vull, vull); + VPKUDUS VPKUDUS_S + vsi __builtin_vec_packs (vsll, vsll); + VPKSDSS + +[VEC_PACKSU, vec_packsu, __builtin_vec_packsu] + vuc __builtin_vec_packsu (vus, vus); + VPKUHUS VPKUHUS_U + vuc __builtin_vec_packsu (vss, vss); + VPKSHUS + vus __builtin_vec_packsu (vui, vui); + VPKUWUS VPKUWUS_U + vus __builtin_vec_packsu (vsi, vsi); + VPKSWUS + vui __builtin_vec_packsu (vull, vull); + VPKUDUS VPKUDUS_U + vui __builtin_vec_packsu (vsll, vsll); + VPKSDUS + +[VEC_PDEP, vec_pdep, __builtin_vec_vpdepd, _ARCH_PWR10] + vull __builtin_vec_vpdepd (vull, vull); + VPDEPD + +[VEC_PERM, vec_perm, __builtin_vec_perm] + vsc __builtin_vec_perm (vsc, vsc, vuc); + VPERM_16QI + vuc __builtin_vec_perm (vuc, vuc, vuc); + VPERM_16QI_UNS VPERM_16QI_VUC + vbc __builtin_vec_perm (vbc, vbc, vuc); + VPERM_16QI_UNS VPERM_16QI_VBC + vss __builtin_vec_perm (vss, vss, vuc); + VPERM_8HI + vus __builtin_vec_perm (vus, vus, vuc); + VPERM_8HI_UNS VPERM_8HI_VUS + vbs __builtin_vec_perm (vbs, vbs, vuc); + VPERM_8HI_UNS VPERM_8HI_VBS + vp __builtin_vec_perm (vp, vp, vuc); + VPERM_8HI_UNS VPERM_8HI_VP + vsi __builtin_vec_perm (vsi, vsi, vuc); + VPERM_4SI + vui __builtin_vec_perm (vui, vui, vuc); + VPERM_4SI_UNS VPERM_4SI_VUI + vbi __builtin_vec_perm (vbi, vbi, vuc); + VPERM_4SI_UNS VPERM_4SI_VBI + vsll __builtin_vec_perm (vsll, vsll, vuc); + VPERM_2DI + vull __builtin_vec_perm (vull, vull, vuc); + VPERM_2DI_UNS VPERM_2DI_VULL + vbll __builtin_vec_perm (vbll, vbll, vuc); + VPERM_2DI_UNS VPERM_2DI_VBLL + vf __builtin_vec_perm (vf, vf, vuc); + VPERM_4SF + vd __builtin_vec_perm (vd, vd, vuc); + VPERM_2DF + vsq __builtin_vec_perm (vsq, vsq, vuc); + VPERM_1TI + vuq __builtin_vec_perm (vuq, vuq, vuc); + VPERM_1TI_UNS +; The following variants are deprecated. + vsc __builtin_vec_perm (vsc, vuc, vuc); + VPERM_16QI VPERM_VSC_VUC_VUC + vbc __builtin_vec_perm (vbc, vbc, vbc); + VPERM_16QI VPERM_VBC_VBC_VBC + +[VEC_PERMX, vec_permx, __builtin_vec_xxpermx, _ARCH_PWR10] + vsc __builtin_vec_xxpermx (vsc, vsc, vuc, const int); + XXPERMX_UV2DI XXPERMX_VSC + vuc __builtin_vec_xxpermx (vuc, vuc, vuc, const int); + XXPERMX_UV2DI XXPERMX_VUC + vss __builtin_vec_xxpermx (vss, vss, vuc, const int); + XXPERMX_UV2DI XXPERMX_VSS + vus __builtin_vec_xxpermx (vus, vus, vuc, const int); + XXPERMX_UV2DI XXPERMX_VUS + vsi __builtin_vec_xxpermx (vsi, vsi, vuc, const int); + XXPERMX_UV2DI XXPERMX_VSI + vui __builtin_vec_xxpermx (vui, vui, vuc, const int); + XXPERMX_UV2DI XXPERMX_VUI + vsll __builtin_vec_xxpermx (vsll, vsll, vuc, const int); + XXPERMX_UV2DI XXPERMX_VSLL + vull __builtin_vec_xxpermx (vull, vull, vuc, const int); + XXPERMX_UV2DI XXPERMX_VULL + vf __builtin_vec_xxpermx (vf, vf, vuc, const int); + XXPERMX_UV2DI XXPERMX_VF + vd __builtin_vec_xxpermx (vd, vd, vuc, const int); + XXPERMX_UV2DI XXPERMX_VD + +[VEC_PERMXOR, vec_permxor, __builtin_vec_vpermxor] + vsc __builtin_vec_vpermxor (vsc, vsc, vsc); + VPERMXOR VPERMXOR_VSC + vuc __builtin_vec_vpermxor (vuc, vuc, vuc); + VPERMXOR VPERMXOR_VUC + vbc __builtin_vec_vpermxor (vbc, vbc, vbc); + VPERMXOR VPERMXOR_VBC + +[VEC_PEXT, vec_pext, __builtin_vec_vpextd, _ARCH_PWR10] + vull __builtin_vec_vpextd (vull, vull); + VPEXTD + +[VEC_PMSUM, vec_pmsum_be, __builtin_vec_vpmsum] + vus __builtin_vec_vpmsum (vuc, vuc); + VPMSUMB VPMSUMB_V + vui __builtin_vec_vpmsum (vus, vus); + VPMSUMH VPMSUMH_V + vull __builtin_vec_vpmsum (vui, vui); + VPMSUMW VPMSUMW_V + vuq __builtin_vec_vpmsum (vull, vull); + VPMSUMD VPMSUMD_V + +[VEC_POPCNT, vec_popcnt, __builtin_vec_vpopcntu, _ARCH_PWR8] + vuc __builtin_vec_vpopcntu (vsc); + VPOPCNTB + vuc __builtin_vec_vpopcntu (vuc); + VPOPCNTUB + vus __builtin_vec_vpopcntu (vss); + VPOPCNTH + vus __builtin_vec_vpopcntu (vus); + VPOPCNTUH + vui __builtin_vec_vpopcntu (vsi); + VPOPCNTW + vui __builtin_vec_vpopcntu (vui); + VPOPCNTUW + vull __builtin_vec_vpopcntu (vsll); + VPOPCNTD + vull __builtin_vec_vpopcntu (vull); + VPOPCNTUD + +[VEC_PARITY_LSBB, vec_parity_lsbb, __builtin_vec_vparity_lsbb, _ARCH_PWR9] + vui __builtin_vec_vparity_lsbb (vsi); + VPRTYBW VPRTYBW_S + vui __builtin_vec_vparity_lsbb (vui); + VPRTYBW VPRTYBW_U + vull __builtin_vec_vparity_lsbb (vsll); + VPRTYBD VPRTYBD_S + vull __builtin_vec_vparity_lsbb (vull); + VPRTYBD VPRTYBD_U + vuq __builtin_vec_vparity_lsbb (vsq); + VPRTYBQ VPRTYBQ_S + vuq __builtin_vec_vparity_lsbb (vuq); + VPRTYBQ VPRTYBQ_U + +; There are no actual builtins for vec_promote. There is special handling for +; this in altivec_resolve_overloaded_builtin in rs6000-c.c, where the call +; is replaced by a constructor. The single overload here causes +; __builtin_vec_promote to be registered with the front end so that can happen. +[VEC_PROMOTE, vec_promote, __builtin_vec_promote] + vsi __builtin_vec_promote (vsi); + ABS_V4SI PROMOTE_FAKERY + +; Opportunity for improvement: We can use XVRESP instead of VREFP for +; TARGET_VSX. We would need conditional dispatch to allow two possibilities. +; Some syntax like "XVRESP{TARGET_VSX};VREFP". +; TODO. #### +[VEC_RE, vec_re, __builtin_vec_re] + vf __builtin_vec_re (vf); + VREFP + vd __builtin_vec_re (vd); + XVREDP + +[VEC_RECIP, vec_recipdiv, __builtin_vec_recipdiv] + vf __builtin_vec_recipdiv (vf, vf); + RECIP_V4SF + vd __builtin_vec_recipdiv (vd, vd); + RECIP_V2DF + +[VEC_REPLACE_ELT, vec_replace_elt, __builtin_vec_replace_elt, _ARCH_PWR10] + vui __builtin_vec_replace_elt (vui, unsigned int, const int); + VREPLACE_ELT_UV4SI + vsi __builtin_vec_replace_elt (vsi, signed int, const int); + VREPLACE_ELT_V4SI + vull __builtin_vec_replace_elt (vull, unsigned long long, const int); + VREPLACE_ELT_UV2DI + vsll __builtin_vec_replace_elt (vsll, signed long long, const int); + VREPLACE_ELT_V2DI + vf __builtin_vec_replace_elt (vf, float, const int); + VREPLACE_ELT_V4SF + vd __builtin_vec_replace_elt (vd, double, const int); + VREPLACE_ELT_V2DF + +[VEC_REPLACE_UN, vec_replace_unaligned, __builtin_vec_replace_un, _ARCH_PWR10] + vui __builtin_vec_replace_un (vui, unsigned int, const int); + VREPLACE_UN_UV4SI + vsi __builtin_vec_replace_un (vsi, signed int, const int); + VREPLACE_UN_V4SI + vull __builtin_vec_replace_un (vull, unsigned long long, const int); + VREPLACE_UN_UV2DI + vsll __builtin_vec_replace_un (vsll, signed long long, const int); + VREPLACE_UN_V2DI + vf __builtin_vec_replace_un (vf, float, const int); + VREPLACE_UN_V4SF + vd __builtin_vec_replace_un (vd, double, const int); + VREPLACE_UN_V2DF + +[VEC_REVB, vec_revb, __builtin_vec_revb, _ARCH_PWR8] + vss __builtin_vec_revb (vss); + REVB_V8HI REVB_VSS + vus __builtin_vec_revb (vus); + REVB_V8HI REVB_VUS + vsi __builtin_vec_revb (vsi); + REVB_V4SI REVB_VSI + vui __builtin_vec_revb (vui); + REVB_V4SI REVB_VUI + vsll __builtin_vec_revb (vsll); + REVB_V2DI REVB_VSLL + vull __builtin_vec_revb (vull); + REVB_V2DI REVB_VULL + vsq __builtin_vec_revb (vsq); + REVB_V1TI REVB_VSQ + vuq __builtin_vec_revb (vuq); + REVB_V1TI REVB_VUQ + vf __builtin_vec_revb (vf); + REVB_V4SF + vd __builtin_vec_revb (vd); + REVB_V2DF +; The following variants are deprecated. + vsc __builtin_vec_revb (vsc); + REVB_V16QI REVB_VSC + vuc __builtin_vec_revb (vuc); + REVB_V16QI REVB_VUC + vbc __builtin_vec_revb (vbc); + REVB_V16QI REVB_VBC + vbs __builtin_vec_revb (vbs); + REVB_V8HI REVB_VBS + vbi __builtin_vec_revb (vbi); + REVB_V4SI REVB_VBI + vbll __builtin_vec_revb (vbll); + REVB_V2DI REVB_VBLL + +[VEC_REVE, vec_reve, __builtin_vec_vreve] + vsc __builtin_vec_vreve (vsc); + VREVE_V16QI VREVE_VSC + vuc __builtin_vec_vreve (vuc); + VREVE_V16QI VREVE_VUC + vbc __builtin_vec_vreve (vbc); + VREVE_V16QI VREVE_VBC + vss __builtin_vec_vreve (vss); + VREVE_V8HI VREVE_VSS + vus __builtin_vec_vreve (vus); + VREVE_V8HI VREVE_VUS + vbs __builtin_vec_vreve (vbs); + VREVE_V8HI VREVE_VBS + vsi __builtin_vec_vreve (vsi); + VREVE_V4SI VREVE_VSI + vui __builtin_vec_vreve (vui); + VREVE_V4SI VREVE_VUI + vbi __builtin_vec_vreve (vbi); + VREVE_V4SI VREVE_VBI + vsll __builtin_vec_vreve (vsll); + VREVE_V2DI VREVE_VSLL + vull __builtin_vec_vreve (vull); + VREVE_V2DI VREVE_VULL + vbll __builtin_vec_vreve (vbll); + VREVE_V2DI VREVE_VBLL + vf __builtin_vec_vreve (vf); + VREVE_V4SF + vd __builtin_vec_vreve (vd); + VREVE_V2DF + +[VEC_RINT, vec_rint, __builtin_vec_rint, __VSX__] + vf __builtin_vec_rint (vf); + XVRSPIC + vd __builtin_vec_rint (vd); + XVRDPIC + +[VEC_RL, vec_rl, __builtin_vec_rl] + vsc __builtin_vec_rl (vsc, vuc); + VRLB VRLB_VSC + vuc __builtin_vec_rl (vuc, vuc); + VRLB VRLB_VUC + vss __builtin_vec_rl (vss, vus); + VRLH VRLH_VSS + vus __builtin_vec_rl (vus, vus); + VRLH VRLH_VUS + vsi __builtin_vec_rl (vsi, vui); + VRLW VRLW_VSI + vui __builtin_vec_rl (vui, vui); + VRLW VRLW_VUI + vsll __builtin_vec_rl (vsll, vull); + VRLD VRLD_VSLL + vull __builtin_vec_rl (vull, vull); + VRLD VRLD_VULL + vsq __builtin_vec_rl (vsq, vuq); + VRLQ VRLQ_VSQ + vuq __builtin_vec_rl (vuq, vuq); + VRLQ VRLQ_VUQ + +[VEC_RLMI, vec_rlmi, __builtin_vec_rlmi, _ARCH_PWR9] + vui __builtin_vec_rlmi (vui, vui, vui); + VRLWMI + vull __builtin_vec_rlmi (vull, vull, vull); + VRLDMI + vsq __builtin_vec_rlmi (vsq, vsq, vuq); + VRLQMI VRLQMI_VSQ + vuq __builtin_vec_rlmi (vuq, vuq, vuq); + VRLQMI VRLQMI_VUQ + +[VEC_RLNM, vec_vrlnm, __builtin_vec_rlnm, _ARCH_PWR9] + vui __builtin_vec_rlnm (vui, vui); + VRLWNM + vull __builtin_vec_rlnm (vull, vull); + VRLDNM + vsq __builtin_vec_rlnm (vsq, vuq); + VRLQNM VRLQNM_VSQ + vuq __builtin_vec_rlnm (vuq, vuq); + VRLQNM VRLQNM_VUQ + +; #### XVRSPI{TARGET_VSX};VRFIN +[VEC_ROUND, vec_round, __builtin_vec_round] + vf __builtin_vec_round (vf); + VRFIN + vd __builtin_vec_round (vd); + XVRDPI + +[VEC_RSQRT, vec_rsqrt, __builtin_vec_rsqrt] + vf __builtin_vec_rsqrt (vf); + RSQRT_4SF + vd __builtin_vec_rsqrt (vd); + RSQRT_2DF + +; #### XVRSQRTESP{TARGET_VSX};VRSQRTEFP +[VEC_RSQRTE, vec_rsqrte, __builtin_vec_rsqrte] + vf __builtin_vec_rsqrte (vf); + VRSQRTEFP + vd __builtin_vec_rsqrte (vd); + XVRSQRTEDP + +[VEC_SBOX_BE, vec_sbox_be, __builtin_vec_sbox_be, _ARCH_PWR8] + vuc __builtin_vec_sbox_be (vuc); + VSBOX_BE + +[VEC_SEL, vec_sel, __builtin_vec_sel] + vsc __builtin_vec_sel (vsc, vsc, vbc); + VSEL_16QI VSEL_16QI_B + vsc __builtin_vec_sel (vsc, vsc, vuc); + VSEL_16QI VSEL_16QI_U + vuc __builtin_vec_sel (vuc, vuc, vbc); + VSEL_16QI_UNS VSEL_16QI_UB + vuc __builtin_vec_sel (vuc, vuc, vuc); + VSEL_16QI_UNS VSEL_16QI_UU + vbc __builtin_vec_sel (vbc, vbc, vbc); + VSEL_16QI_UNS VSEL_16QI_BB + vbc __builtin_vec_sel (vbc, vbc, vuc); + VSEL_16QI_UNS VSEL_16QI_BU + vss __builtin_vec_sel (vss, vss, vbs); + VSEL_8HI VSEL_8HI_B + vss __builtin_vec_sel (vss, vss, vus); + VSEL_8HI VSEL_8HI_U + vus __builtin_vec_sel (vus, vus, vbs); + VSEL_8HI_UNS VSEL_8HI_UB + vus __builtin_vec_sel (vus, vus, vus); + VSEL_8HI_UNS VSEL_8HI_UU + vbs __builtin_vec_sel (vbs, vbs, vbs); + VSEL_8HI_UNS VSEL_8HI_BB + vbs __builtin_vec_sel (vbs, vbs, vus); + VSEL_8HI_UNS VSEL_8HI_BU + vsi __builtin_vec_sel (vsi, vsi, vbi); + VSEL_4SI VSEL_4SI_B + vsi __builtin_vec_sel (vsi, vsi, vui); + VSEL_4SI VSEL_4SI_U + vui __builtin_vec_sel (vui, vui, vbi); + VSEL_4SI_UNS VSEL_4SI_UB + vui __builtin_vec_sel (vui, vui, vui); + VSEL_4SI_UNS VSEL_4SI_UU + vbi __builtin_vec_sel (vbi, vbi, vbi); + VSEL_4SI_UNS VSEL_4SI_BB + vbi __builtin_vec_sel (vbi, vbi, vui); + VSEL_4SI_UNS VSEL_4SI_BU + vsll __builtin_vec_sel (vsll, vsll, vbll); + VSEL_2DI_B VSEL_2DI_B + vsll __builtin_vec_sel (vsll, vsll, vull); + VSEL_2DI_B VSEL_2DI_U + vull __builtin_vec_sel (vull, vull, vbll); + VSEL_2DI_UNS VSEL_2DI_UB + vull __builtin_vec_sel (vull, vull, vull); + VSEL_2DI_UNS VSEL_2DI_UU + vbll __builtin_vec_sel (vbll, vbll, vbll); + VSEL_2DI_UNS VSEL_2DI_BB + vbll __builtin_vec_sel (vbll, vbll, vull); + VSEL_2DI_UNS VSEL_2DI_BU + vf __builtin_vec_sel (vf, vf, vbi); + VSEL_4SF VSEL_4SF_B + vf __builtin_vec_sel (vf, vf, vui); + VSEL_4SF VSEL_4SF_U + vd __builtin_vec_sel (vd, vd, vbll); + VSEL_2DF VSEL_2DF_B + vd __builtin_vec_sel (vd, vd, vull); + VSEL_2DF VSEL_2DF_U +; The following variants are deprecated. + vsll __builtin_vec_sel (vsll, vsll, vsll); + VSEL_2DI_B VSEL_2DI_S + vull __builtin_vec_sel (vull, vull, vsll); + VSEL_2DI_UNS VSEL_2DI_US + vf __builtin_vec_sel (vf, vf, vf); + VSEL_4SF VSEL_4SF_F + vf __builtin_vec_sel (vf, vf, vsi); + VSEL_4SF VSEL_4SF_S + vd __builtin_vec_sel (vd, vd, vsll); + VSEL_2DF VSEL_2DF_S + vd __builtin_vec_sel (vd, vd, vd); + VSEL_2DF VSEL_2DF_D + +[VEC_SHASIGMA_BE, vec_shasigma_be, __builtin_crypto_vshasigma] + vui __builtin_crypto_vshasigma (vui, const int, const int); + VSHASIGMAW + vull __builtin_crypto_vshasigma (vull, const int, const int); + VSHASIGMAD + +[VEC_SIGNED, vec_signed, __builtin_vec_vsigned] + vsi __builtin_vec_vsigned (vf); + VEC_VSIGNED_V4SF + vsll __builtin_vec_vsigned (vd); + VEC_VSIGNED_V2DF + +[VEC_SIGNED2, vec_signed2, __builtin_vec_vsigned2] + vsi __builtin_vec_vsigned2 (vd, vd); + VEC_VSIGNED2_V2DF + +[VEC_SIGNEDE, vec_signede, __builtin_vec_vsignede] + vsi __builtin_vec_vsignede (vd); + VEC_VSIGNEDE_V2DF + +[VEC_SIGNEDO, vec_signedo, __builtin_vec_vsignedo] + vsi __builtin_vec_vsignedo (vd); + VEC_VSIGNEDO_V2DF + +[VEC_SIGNEXTI, vec_signexti, __builtin_vec_signexti, _ARCH_PWR9] + vsi __builtin_vec_signexti (vsc); + VSIGNEXTSB2W + vsi __builtin_vec_signexti (vss); + VSIGNEXTSH2W + +[VEC_SIGNEXTLL, vec_signextll, __builtin_vec_signextll, _ARCH_PWR9] + vsll __builtin_vec_signextll (vsc); + VSIGNEXTSB2D + vsll __builtin_vec_signextll (vss); + VSIGNEXTSH2D + vsll __builtin_vec_signextll (vsi); + VSIGNEXTSW2D + +[VEC_SIGNEXTQ, vec_signextq, __builtin_vec_signextq, _ARCH_PWR10] + vsq __builtin_vec_signextq (vsll); + VSIGNEXTSD2Q + +[VEC_SL, vec_sl, __builtin_vec_sl] + vsc __builtin_vec_sl (vsc, vuc); + VSLB VSLB_VSC + vuc __builtin_vec_sl (vuc, vuc); + VSLB VSLB_VUC + vss __builtin_vec_sl (vss, vus); + VSLH VSLH_VSS + vus __builtin_vec_sl (vus, vus); + VSLH VSLH_VUS + vsi __builtin_vec_sl (vsi, vui); + VSLW VSLW_VSI + vui __builtin_vec_sl (vui, vui); + VSLW VSLW_VUI + vsll __builtin_vec_sl (vsll, vull); + VSLD VSLD_VSLL + vull __builtin_vec_sl (vull, vull); + VSLD VSLD_VULL + vsq __builtin_vec_sl (vsq, vuq); + VSLQ VSLQ_VSQ + vuq __builtin_vec_sl (vuq, vuq); + VSLQ VSLQ_VUQ + +[VEC_SLD, vec_sld, __builtin_vec_sld] + vsc __builtin_vec_sld (vsc, vsc, const int); + VSLDOI_16QI VSLDOI_VSC + vbc __builtin_vec_sld (vbc, vbc, const int); + VSLDOI_16QI VSLDOI_VBC + vuc __builtin_vec_sld (vuc, vuc, const int); + VSLDOI_16QI VSLDOI_VUC + vss __builtin_vec_sld (vss, vss, const int); + VSLDOI_8HI VSLDOI_VSS + vbs __builtin_vec_sld (vbs, vbs, const int); + VSLDOI_8HI VSLDOI_VBS + vus __builtin_vec_sld (vus, vus, const int); + VSLDOI_8HI VSLDOI_VUS + vp __builtin_vec_sld (vp, vp, const int); + VSLDOI_8HI VSLDOI_VP + vsi __builtin_vec_sld (vsi, vsi, const int); + VSLDOI_4SI VSLDOI_VSI + vbi __builtin_vec_sld (vbi, vbi, const int); + VSLDOI_4SI VSLDOI_VBI + vui __builtin_vec_sld (vui, vui, const int); + VSLDOI_4SI VSLDOI_VUI + vsll __builtin_vec_sld (vsll, vsll, const int); + VSLDOI_2DI VSLDOI_VSLL + vbll __builtin_vec_sld (vbll, vbll, const int); + VSLDOI_2DI VSLDOI_VBLL + vull __builtin_vec_sld (vull, vull, const int); + VSLDOI_2DI VSLDOI_VULL + vf __builtin_vec_sld (vf, vf, const int); + VSLDOI_4SF + vd __builtin_vec_sld (vd, vd, const int); + VSLDOI_2DF + +[VEC_SLDB, vec_sldb, __builtin_vec_sldb, _ARCH_PWR10] + vsc __builtin_vec_sldb (vsc, vsc, const int); + VSLDB_V16QI VSLDB_VSC + vuc __builtin_vec_sldb (vuc, vuc, const int); + VSLDB_V16QI VSLDB_VUC + vss __builtin_vec_sldb (vss, vss, const int); + VSLDB_V8HI VSLDB_VSS + vus __builtin_vec_sldb (vus, vus, const int); + VSLDB_V8HI VSLDB_VUS + vsi __builtin_vec_sldb (vsi, vsi, const int); + VSLDB_V4SI VSLDB_VSI + vui __builtin_vec_sldb (vui, vui, const int); + VSLDB_V4SI VSLDB_VUI + vsll __builtin_vec_sldb (vsll, vsll, const int); + VSLDB_V2DI VSLDB_VSLL + vull __builtin_vec_sldb (vull, vull, const int); + VSLDB_V2DI VSLDB_VULL + +[VEC_SLDW, vec_sldw, __builtin_vec_sldw] + vsc __builtin_vec_sldw (vsc, vsc, const int); + XXSLDWI_16QI XXSLDWI_VSC + vuc __builtin_vec_sldw (vuc, vuc, const int); + XXSLDWI_16QI XXSLDWI_VUC + vss __builtin_vec_sldw (vss, vss, const int); + XXSLDWI_8HI XXSLDWI_VSS + vus __builtin_vec_sldw (vus, vus, const int); + XXSLDWI_8HI XXSLDWI_VUS + vsi __builtin_vec_sldw (vsi, vsi, const int); + XXSLDWI_4SI XXSLDWI_VSI + vui __builtin_vec_sldw (vui, vui, const int); + XXSLDWI_4SI XXSLDWI_VUI + vsll __builtin_vec_sldw (vsll, vsll, const int); + XXSLDWI_2DI XXSLDWI_VSLL + vull __builtin_vec_sldw (vull, vull, const int); + XXSLDWI_2DI XXSLDWI_VULL + +[VEC_SLL, vec_sll, __builtin_vec_sll] + vsc __builtin_vec_sll (vsc, vuc); + VSL VSL_VSC + vuc __builtin_vec_sll (vuc, vuc); + VSL VSL_VUC + vss __builtin_vec_sll (vss, vuc); + VSL VSL_VSS + vus __builtin_vec_sll (vus, vuc); + VSL VSL_VUS + vp __builtin_vec_sll (vp, vuc); + VSL VSL_VP + vsi __builtin_vec_sll (vsi, vuc); + VSL VSL_VSI + vui __builtin_vec_sll (vui, vuc); + VSL VSL_VUI + vsll __builtin_vec_sll (vsll, vuc); + VSL VSL_VSLL + vull __builtin_vec_sll (vull, vuc); + VSL VSL_VULL +; The following variants are deprecated. + vsc __builtin_vec_sll (vsc, vus); + VSL VSL_VSC_VUS + vsc __builtin_vec_sll (vsc, vui); + VSL VSL_VSC_VUI + vuc __builtin_vec_sll (vuc, vus); + VSL VSL_VUC_VUS + vuc __builtin_vec_sll (vuc, vui); + VSL VSL_VUC_VUI + vbc __builtin_vec_sll (vbc, vuc); + VSL VSL_VBC_VUC + vbc __builtin_vec_sll (vbc, vus); + VSL VSL_VBC_VUS + vbc __builtin_vec_sll (vbc, vui); + VSL VSL_VBC_VUI + vss __builtin_vec_sll (vss, vus); + VSL VSL_VSS_VUS + vss __builtin_vec_sll (vss, vui); + VSL VSL_VSS_VUI + vus __builtin_vec_sll (vus, vus); + VSL VSL_VUS_VUS + vus __builtin_vec_sll (vus, vui); + VSL VSL_VUS_VUI + vbs __builtin_vec_sll (vbs, vuc); + VSL VSL_VBS_VUC + vbs __builtin_vec_sll (vbs, vus); + VSL VSL_VBS_VUS + vbs __builtin_vec_sll (vbs, vui); + VSL VSL_VBS_VUI + vp __builtin_vec_sll (vp, vus); + VSL VSL_VP_VUS + vp __builtin_vec_sll (vp, vui); + VSL VSL_VP_VUI + vsi __builtin_vec_sll (vsi, vus); + VSL VSL_VSI_VUS + vsi __builtin_vec_sll (vsi, vui); + VSL VSL_VSI_VUI + vui __builtin_vec_sll (vui, vus); + VSL VSL_VUI_VUS + vui __builtin_vec_sll (vui, vui); + VSL VSL_VUI_VUI + vbi __builtin_vec_sll (vbi, vuc); + VSL VSL_VBI_VUC + vbi __builtin_vec_sll (vbi, vus); + VSL VSL_VBI_VUS + vbi __builtin_vec_sll (vbi, vui); + VSL VSL_VBI_VUI + vbll __builtin_vec_sll (vbll, vuc); + VSL VSL_VBLL_VUC + vbll __builtin_vec_sll (vbll, vus); + VSL VSL_VBLL_VUS + vbll __builtin_vec_sll (vbll, vull); + VSL VSL_VBLL_VULL + +[VEC_SLO, vec_slo, __builtin_vec_slo] + vsc __builtin_vec_slo (vsc, vsc); + VSLO VSLO_VSCS + vsc __builtin_vec_slo (vsc, vuc); + VSLO VSLO_VSCU + vuc __builtin_vec_slo (vuc, vsc); + VSLO VSLO_VUCS + vuc __builtin_vec_slo (vuc, vuc); + VSLO VSLO_VUCU + vss __builtin_vec_slo (vss, vsc); + VSLO VSLO_VSSS + vss __builtin_vec_slo (vss, vuc); + VSLO VSLO_VSSU + vus __builtin_vec_slo (vus, vsc); + VSLO VSLO_VUSS + vus __builtin_vec_slo (vus, vuc); + VSLO VSLO_VUSU + vp __builtin_vec_slo (vp, vsc); + VSLO VSLO_VPS + vp __builtin_vec_slo (vp, vuc); + VSLO VSLO_VPU + vsi __builtin_vec_slo (vsi, vsc); + VSLO VSLO_VSIS + vsi __builtin_vec_slo (vsi, vuc); + VSLO VSLO_VSIU + vui __builtin_vec_slo (vui, vsc); + VSLO VSLO_VUIS + vui __builtin_vec_slo (vui, vuc); + VSLO VSLO_VUIU + vsll __builtin_vec_slo (vsll, vsc); + VSLO VSLO_VSLLS + vsll __builtin_vec_slo (vsll, vuc); + VSLO VSLO_VSLLU + vull __builtin_vec_slo (vull, vsc); + VSLO VSLO_VULLS + vull __builtin_vec_slo (vull, vuc); + VSLO VSLO_VULLU + vf __builtin_vec_slo (vf, vsc); + VSLO VSLO_VFS + vf __builtin_vec_slo (vf, vuc); + VSLO VSLO_VFU + +[VEC_SLV, vec_slv, __builtin_vec_vslv, _ARCH_PWR9] + vuc __builtin_vec_vslv (vuc, vuc); + VSLV + +[VEC_SPLAT, vec_splat, __builtin_vec_splat] + vsc __builtin_vec_splat (vsc, signed int); + VSPLTB VSPLTB_VSC + vuc __builtin_vec_splat (vuc, signed int); + VSPLTB VSPLTB_VUC + vbc __builtin_vec_splat (vbc, signed int); + VSPLTB VSPLTB_VBC + vss __builtin_vec_splat (vss, signed int); + VSPLTH VSPLTH_VSS + vus __builtin_vec_splat (vus, signed int); + VSPLTH VSPLTH_VUS + vbs __builtin_vec_splat (vbs, signed int); + VSPLTH VSPLTH_VBS + vp __builtin_vec_splat (vp, signed int); + VSPLTH VSPLTH_VP + vf __builtin_vec_splat (vf, signed int); + VSPLTW VSPLTW_VF + vsi __builtin_vec_splat (vsi, signed int); + VSPLTW VSPLTW_VSI + vui __builtin_vec_splat (vui, signed int); + VSPLTW VSPLTW_VUI + vbi __builtin_vec_splat (vbi, signed int); + VSPLTW VSPLTW_VBI + vd __builtin_vec_splat (vd, signed int); + XXSPLTD_V2DF + vsll __builtin_vec_splat (vsll, signed int); + XXSPLTD_V2DI XXSPLTD_VSLL + vull __builtin_vec_splat (vull, signed int); + XXSPLTD_V2DI XXSPLTD_VULL + vbll __builtin_vec_splat (vbll, signed int); + XXSPLTD_V2DI XXSPLTD_VBLL + +[VEC_SPLAT_S8, vec_splat_s8, __builtin_vec_splat_s8] + vsc __builtin_vec_splat_s8 (signed int); + VSPLTISB + +[VEC_SPLAT_S16, vec_splat_s16, __builtin_vec_splat_s16] + vss __builtin_vec_splat_s16 (signed int); + VSPLTISH + +[VEC_SPLAT_S32, vec_splat_s32, __builtin_vec_splat_s32] + vsi __builtin_vec_splat_s32 (signed int); + VSPLTISW + +; There are no entries for vec_splat_u{8,16,32}. These are handled +; in altivec.h with a #define and a cast. + +[VEC_SPLATI, vec_splati, __builtin_vec_xxspltiw, _ARCH_PWR10] + vsi __builtin_vec_xxspltiw (signed int); + VXXSPLTIW_V4SI + vf __builtin_vec_xxspltiw (float); + VXXSPLTIW_V4SF + +[VEC_SPLATID, vec_splatid, __builtin_vec_xxspltid, _ARCH_PWR10] + vd __builtin_vec_xxspltid (float); + VXXSPLTIDP + +[VEC_SPLATI_INS, vec_splati_ins, __builtin_vec_xxsplti32dx, _ARCH_PWR10] + vsi __builtin_vec_xxsplti32dx (vsi, const int, signed int); + VXXSPLTI32DX_V4SI VXXSPLTI32DX_VSI + vui __builtin_vec_xxsplti32dx (vui, const int, unsigned int); + VXXSPLTI32DX_V4SI VXXSPLTI32DX_VUI + vf __builtin_vec_xxsplti32dx (vf, const int, float); + VXXSPLTI32DX_V4SF + +; There are no actual builtins for vec_splats. There is special handling for +; this in altivec_resolve_overloaded_builtin in rs6000-c.c, where the call +; is replaced by a constructor. The single overload here causes +; __builtin_vec_splats to be registered with the front end so that can happen. +[VEC_SPLATS, vec_splats, __builtin_vec_splats] + vsi __builtin_vec_splats (vsi); + ABS_V4SI SPLATS_FAKERY + +[VEC_SQRT, vec_sqrt, __builtin_vec_sqrt, __VSX__] + vf __builtin_vec_sqrt (vf); + XVSQRTSP + vd __builtin_vec_sqrt (vd); + XVSQRTDP + +[VEC_SR, vec_sr, __builtin_vec_sr] + vsc __builtin_vec_sr (vsc, vuc); + VSRB VSRB_VSC + vuc __builtin_vec_sr (vuc, vuc); + VSRB VSRB_VUC + vss __builtin_vec_sr (vss, vus); + VSRH VSRH_VSS + vus __builtin_vec_sr (vus, vus); + VSRH VSRH_VUS + vsi __builtin_vec_sr (vsi, vui); + VSRW VSRW_VSI + vui __builtin_vec_sr (vui, vui); + VSRW VSRW_VUI + vsll __builtin_vec_sr (vsll, vull); + VSRD VSRD_VSLL + vull __builtin_vec_sr (vull, vull); + VSRD VSRD_VULL + vsq __builtin_vec_sr (vsq, vuq); + VSRQ VSRQ_VSQ + vuq __builtin_vec_sr (vuq, vuq); + VSRQ VSRQ_VUQ + +[VEC_SRA, vec_sra, __builtin_vec_sra] + vsc __builtin_vec_sra (vsc, vuc); + VSRAB VSRAB_VSC + vuc __builtin_vec_sra (vuc, vuc); + VSRAB VSRAB_VUC + vss __builtin_vec_sra (vss, vus); + VSRAH VSRAH_VSS + vus __builtin_vec_sra (vus, vus); + VSRAH VSRAH_VUS + vsi __builtin_vec_sra (vsi, vui); + VSRAW VSRAW_VSI + vui __builtin_vec_sra (vui, vui); + VSRAW VSRAW_VUI + vsll __builtin_vec_sra (vsll, vull); + VSRAD VSRAD_VSLL + vull __builtin_vec_sra (vull, vull); + VSRAD VSRAD_VULL + vsq __builtin_vec_sra (vsq, vuq); + VSRAQ VSRAQ_VSQ + vuq __builtin_vec_sra (vuq, vuq); + VSRAQ VSRAQ_VUQ + +[VEC_SRDB, vec_srdb, __builtin_vec_srdb, _ARCH_PWR10] + vsc __builtin_vec_srdb (vsc, vsc, const int); + VSRDB_V16QI VSRDB_VSC + vuc __builtin_vec_srdb (vuc, vuc, const int); + VSRDB_V16QI VSRDB_VUC + vss __builtin_vec_srdb (vss, vss, const int); + VSRDB_V8HI VSRDB_VSS + vus __builtin_vec_srdb (vus, vus, const int); + VSRDB_V8HI VSRDB_VUS + vsi __builtin_vec_srdb (vsi, vsi, const int); + VSRDB_V4SI VSRDB_VSI + vui __builtin_vec_srdb (vui, vui, const int); + VSRDB_V4SI VSRDB_VUI + vsll __builtin_vec_srdb (vsll, vsll, const int); + VSRDB_V2DI VSRDB_VSLL + vull __builtin_vec_srdb (vull, vull, const int); + VSRDB_V2DI VSRDB_VULL + +[VEC_SRL, vec_srl, __builtin_vec_srl] + vsc __builtin_vec_srl (vsc, vuc); + VSR VSR_VSC + vuc __builtin_vec_srl (vuc, vuc); + VSR VSR_VUC + vss __builtin_vec_srl (vss, vuc); + VSR VSR_VSS + vus __builtin_vec_srl (vus, vuc); + VSR VSR_VUS + vp __builtin_vec_srl (vp, vuc); + VSR VSR_VP + vsi __builtin_vec_srl (vsi, vuc); + VSR VSR_VSI + vui __builtin_vec_srl (vui, vuc); + VSR VSR_VUI + vsll __builtin_vec_srl (vsll, vuc); + VSR VSR_VSLL + vull __builtin_vec_srl (vull, vuc); + VSR VSR_VULL +; The following variants are deprecated. + vsc __builtin_vec_srl (vsc, vus); + VSR VSR_VSC_VUS + vsc __builtin_vec_srl (vsc, vui); + VSR VSR_VSC_VUI + vuc __builtin_vec_srl (vuc, vus); + VSR VSR_VUC_VUS + vuc __builtin_vec_srl (vuc, vui); + VSR VSR_VUC_VUI + vbc __builtin_vec_srl (vbc, vuc); + VSR VSR_VBC_VUC + vbc __builtin_vec_srl (vbc, vus); + VSR VSR_VBC_VUS + vbc __builtin_vec_srl (vbc, vui); + VSR VSR_VBC_VUI + vss __builtin_vec_srl (vss, vus); + VSR VSR_VSS_VUS + vss __builtin_vec_srl (vss, vui); + VSR VSR_VSS_VUI + vus __builtin_vec_srl (vus, vus); + VSR VSR_VUS_VUS + vus __builtin_vec_srl (vus, vui); + VSR VSR_VUS_VUI + vbs __builtin_vec_srl (vbs, vuc); + VSR VSR_VBS_VUC + vbs __builtin_vec_srl (vbs, vus); + VSR VSR_VBS_VUS + vbs __builtin_vec_srl (vbs, vui); + VSR VSR_VBS_VUI + vp __builtin_vec_srl (vp, vus); + VSR VSR_VP_VUS + vp __builtin_vec_srl (vp, vui); + VSR VSR_VP_VUI + vsi __builtin_vec_srl (vsi, vus); + VSR VSR_VSI_VUS + vsi __builtin_vec_srl (vsi, vui); + VSR VSR_VSI_VUI + vui __builtin_vec_srl (vui, vus); + VSR VSR_VUI_VUS + vui __builtin_vec_srl (vui, vui); + VSR VSR_VUI_VUI + vbi __builtin_vec_srl (vbi, vuc); + VSR VSR_VBI_VUC + vbi __builtin_vec_srl (vbi, vus); + VSR VSR_VBI_VUS + vbi __builtin_vec_srl (vbi, vui); + VSR VSR_VBI_VUI + +[VEC_SRO, vec_sro, __builtin_vec_sro] + vsc __builtin_vec_sro (vsc, vsc); + VSRO VSRO_VSCS + vsc __builtin_vec_sro (vsc, vuc); + VSRO VSRO_VSCU + vuc __builtin_vec_sro (vuc, vsc); + VSRO VSRO_VUCS + vuc __builtin_vec_sro (vuc, vuc); + VSRO VSRO_VUCU + vss __builtin_vec_sro (vss, vsc); + VSRO VSRO_VSSS + vss __builtin_vec_sro (vss, vuc); + VSRO VSRO_VSSU + vus __builtin_vec_sro (vus, vsc); + VSRO VSRO_VUSS + vus __builtin_vec_sro (vus, vuc); + VSRO VSRO_VUSU + vp __builtin_vec_sro (vp, vsc); + VSRO VSRO_VPS + vp __builtin_vec_sro (vp, vuc); + VSRO VSRO_VPU + vsi __builtin_vec_sro (vsi, vsc); + VSRO VSRO_VSIS + vsi __builtin_vec_sro (vsi, vuc); + VSRO VSRO_VSIU + vui __builtin_vec_sro (vui, vsc); + VSRO VSRO_VUIS + vui __builtin_vec_sro (vui, vuc); + VSRO VSRO_VUIU + vsll __builtin_vec_sro (vsll, vsc); + VSRO VSRO_VSLLS + vsll __builtin_vec_sro (vsll, vuc); + VSRO VSRO_VSLLU + vull __builtin_vec_sro (vull, vsc); + VSRO VSRO_VULLS + vull __builtin_vec_sro (vull, vuc); + VSRO VSRO_VULLU + vf __builtin_vec_sro (vf, vsc); + VSRO VSRO_VFS + vf __builtin_vec_sro (vf, vuc); + VSRO VSRO_VFU + +[VEC_SRV, vec_srv, __builtin_vec_vsrv, _ARCH_PWR9] + vuc __builtin_vec_vsrv (vuc, vuc); + VSRV + +[VEC_ST, vec_st, __builtin_vec_st] + void __builtin_vec_st (vsc, signed long long, vsc *); + STVX_V16QI STVX_VSC + void __builtin_vec_st (vsc, signed long long, signed char *); + STVX_V16QI STVX_SC + void __builtin_vec_st (vuc, signed long long, vuc *); + STVX_V16QI STVX_VUC + void __builtin_vec_st (vuc, signed long long, unsigned char *); + STVX_V16QI STVX_UC + void __builtin_vec_st (vbc, signed long long, vbc *); + STVX_V16QI STVX_VBC + void __builtin_vec_st (vbc, signed long long, signed char *); + STVX_V16QI STVX_SC_B + void __builtin_vec_st (vbc, signed long long, unsigned char *); + STVX_V16QI STVX_UC_B + void __builtin_vec_st (vss, signed long long, vss *); + STVX_V8HI STVX_VSS + void __builtin_vec_st (vss, signed long long, signed short *); + STVX_V8HI STVX_SS + void __builtin_vec_st (vus, signed long long, vus *); + STVX_V8HI STVX_VUS + void __builtin_vec_st (vus, signed long long, unsigned short *); + STVX_V8HI STVX_US + void __builtin_vec_st (vbs, signed long long, vbs *); + STVX_V8HI STVX_VBS + void __builtin_vec_st (vbs, signed long long, signed short *); + STVX_V8HI STVX_SS_B + void __builtin_vec_st (vbs, signed long long, unsigned short *); + STVX_V8HI STVX_US_B + void __builtin_vec_st (vp, signed long long, vp *); + STVX_V8HI STVX_P + void __builtin_vec_st (vsi, signed long long, vsi *); + STVX_V4SI STVX_VSI + void __builtin_vec_st (vsi, signed long long, signed int *); + STVX_V4SI STVX_SI + void __builtin_vec_st (vui, signed long long, vui *); + STVX_V4SI STVX_VUI + void __builtin_vec_st (vui, signed long long, unsigned int *); + STVX_V4SI STVX_UI + void __builtin_vec_st (vbi, signed long long, vbi *); + STVX_V4SI STVX_VBI + void __builtin_vec_st (vbi, signed long long, signed int *); + STVX_V4SI STVX_SI_B + void __builtin_vec_st (vbi, signed long long, unsigned int *); + STVX_V4SI STVX_UI_B + void __builtin_vec_st (vsll, signed long long, vsll *); + STVX_V2DI STVX_VSLL + void __builtin_vec_st (vsll, signed long long, signed long long *); + STVX_V2DI STVX_SLL + void __builtin_vec_st (vull, signed long long, vull *); + STVX_V2DI STVX_VULL + void __builtin_vec_st (vull, signed long long, unsigned long long *); + STVX_V2DI STVX_ULL + void __builtin_vec_st (vbll, signed long long, vbll *); + STVX_V2DI STVX_VBLL + void __builtin_vec_st (vf, signed long long, vf *); + STVX_V4SF STVX_VF + void __builtin_vec_st (vf, signed long long, float *); + STVX_V4SF STVX_F + void __builtin_vec_st (vd, signed long long, vd *); + STVX_V2DF STVX_VD + void __builtin_vec_st (vd, signed long long, double *); + STVX_V2DF STVX_D +; The following variants are deprecated. + void __builtin_vec_st (vbll, signed long long, signed long long *); + STVX_V2DI STVX_SLL_B + void __builtin_vec_st (vbll, signed long long, unsigned long long *); + STVX_V2DI STVX_ULL_B + +[VEC_STE, vec_ste, __builtin_vec_ste] + void __builtin_vec_ste (vsc, signed long long, signed char *); + STVEBX STVEBX_S + void __builtin_vec_ste (vuc, signed long long, unsigned char *); + STVEBX STVEBX_U + void __builtin_vec_ste (vbc, signed long long, signed char *); + STVEBX STVEBX_BS + void __builtin_vec_ste (vbc, signed long long, unsigned char *); + STVEBX STVEBX_BU + void __builtin_vec_ste (vss, signed long long, signed short *); + STVEHX STVEHX_S + void __builtin_vec_ste (vus, signed long long, unsigned short *); + STVEHX STVEHX_U + void __builtin_vec_ste (vbs, signed long long, signed short *); + STVEHX STVEHX_BS + void __builtin_vec_ste (vbs, signed long long, unsigned short *); + STVEHX STVEHX_BU + void __builtin_vec_ste (vp, signed long long, signed short *); + STVEHX STVEHX_PS + void __builtin_vec_ste (vp, signed long long, unsigned short *); + STVEHX STVEHX_PU + void __builtin_vec_ste (vsi, signed long long, signed int *); + STVEWX STVEHWX_S + void __builtin_vec_ste (vui, signed long long, unsigned int *); + STVEWX STVEWX_U + void __builtin_vec_ste (vbi, signed long long, signed int *); + STVEWX STVEWX_BS + void __builtin_vec_ste (vbi, signed long long, unsigned int *); + STVEWX STVEWX_BU + void __builtin_vec_ste (vf, signed long long, float *); + STVEWX STVEWX_F + +; There are no builtins for VEC_STEP; this is handled directly +; with a constant replacement in rs6000_resolve_overloaded_builtin. +; The single overload registers __builtin_vec_step with the front end +; so this can happen. +[VEC_STEP, vec_step, __builtin_vec_step] + signed int __builtin_vec_step (vsi); + VCLZLSBB_V4SI STEP_FAKERY + +[VEC_STL, vec_stl, __builtin_vec_stl] + void __builtin_vec_stl (vsc, signed long long, vsc *); + STVXL_V16QI STVXL_VSC + void __builtin_vec_stl (vsc, signed long long, signed char *); + STVXL_V16QI STVXL_SC + void __builtin_vec_stl (vuc, signed long long, vuc *); + STVXL_V16QI STVXL_VUC + void __builtin_vec_stl (vuc, signed long long, unsigned char *); + STVXL_V16QI STVXL_UC + void __builtin_vec_stl (vbc, signed long long, vbc *); + STVXL_V16QI STVXL_VBC + void __builtin_vec_stl (vbc, signed long long, signed char *); + STVXL_V16QI STVXL_SC_B + void __builtin_vec_stl (vbc, signed long long, unsigned char *); + STVXL_V16QI STVXL_UC_B + void __builtin_vec_stl (vss, signed long long, vss *); + STVXL_V8HI STVXL_VSS + void __builtin_vec_stl (vss, signed long long, signed short *); + STVXL_V8HI STVXL_SS + void __builtin_vec_stl (vus, signed long long, vus *); + STVXL_V8HI STVXL_VUS + void __builtin_vec_stl (vus, signed long long, unsigned short *); + STVXL_V8HI STVXL_US + void __builtin_vec_stl (vbs, signed long long, vbs *); + STVXL_V8HI STVXL_VBS + void __builtin_vec_stl (vbs, signed long long, signed short *); + STVXL_V8HI STVXL_SS_B + void __builtin_vec_stl (vbs, signed long long, unsigned short *); + STVXL_V8HI STVXL_US_B + void __builtin_vec_stl (vp, signed long long, vp *); + STVXL_V8HI STVXL_P + void __builtin_vec_stl (vsi, signed long long, vsi *); + STVXL_V4SI STVXL_VSI + void __builtin_vec_stl (vsi, signed long long, signed int *); + STVXL_V4SI STVXL_SI + void __builtin_vec_stl (vui, signed long long, vui *); + STVXL_V4SI STVXL_VUI + void __builtin_vec_stl (vui, signed long long, unsigned int *); + STVXL_V4SI STVXL_UI + void __builtin_vec_stl (vbi, signed long long, vbi *); + STVXL_V4SI STVXL_VBI + void __builtin_vec_stl (vbi, signed long long, signed int *); + STVXL_V4SI STVXL_SI_B + void __builtin_vec_stl (vbi, signed long long, unsigned int *); + STVXL_V4SI STVXL_UI_B + void __builtin_vec_stl (vsll, signed long long, vsll *); + STVXL_V2DI STVXL_VSLL + void __builtin_vec_stl (vsll, signed long long, signed long long *); + STVXL_V2DI STVXL_SLL + void __builtin_vec_stl (vull, signed long long, vull *); + STVXL_V2DI STVXL_VULL + void __builtin_vec_stl (vull, signed long long, unsigned long long *); + STVXL_V2DI STVXL_ULL + void __builtin_vec_stl (vbll, signed long long, vbll *); + STVXL_V2DI STVXL_VBLL + void __builtin_vec_stl (vbll, signed long long, signed long long *); + STVXL_V2DI STVXL_SLL_B + void __builtin_vec_stl (vbll, signed long long, unsigned long long *); + STVXL_V2DI STVXL_ULL_B + void __builtin_vec_stl (vf, signed long long, vf *); + STVXL_V4SF STVXL_VF + void __builtin_vec_stl (vf, signed long long, float *); + STVXL_V4SF STVXL_F + void __builtin_vec_stl (vd, signed long long, vd *); + STVXL_V2DF STVXL_VD + void __builtin_vec_stl (vd, signed long long, double *); + STVXL_V2DF STVXL_D + +[VEC_STRIL, vec_stril, __builtin_vec_stril, _ARCH_PWR10] + vuc __builtin_vec_stril (vuc); + VSTRIBL VSTRIBL_U + vsc __builtin_vec_stril (vsc); + VSTRIBL VSTRIBL_S + vus __builtin_vec_stril (vus); + VSTRIHL VSTRIHL_U + vss __builtin_vec_stril (vss); + VSTRIHL VSTRIHL_S + +[VEC_STRIL_P, vec_stril_p, __builtin_vec_stril_p, _ARCH_PWR10] + signed int __builtin_vec_stril_p (vuc); + VSTRIBL_P VSTRIBL_PU + signed int __builtin_vec_stril_p (vsc); + VSTRIBL_P VSTRIBL_PS + signed int __builtin_vec_stril_p (vus); + VSTRIHL_P VSTRIHL_PU + signed int __builtin_vec_stril_p (vss); + VSTRIHL_P VSTRIHL_PS + +[VEC_STRIR, vec_strir, __builtin_vec_strir, _ARCH_PWR10] + vuc __builtin_vec_strir (vuc); + VSTRIBR VSTRIBR_U + vsc __builtin_vec_strir (vsc); + VSTRIBR VSTRIBR_S + vus __builtin_vec_strir (vus); + VSTRIHR VSTRIHR_U + vss __builtin_vec_strir (vss); + VSTRIHR VSTRIHR_S + +[VEC_STRIR_P, vec_strir_p, __builtin_vec_strir_p, _ARCH_PWR10] + signed int __builtin_vec_strir_p (vuc); + VSTRIBR_P VSTRIBR_PU + signed int __builtin_vec_strir_p (vsc); + VSTRIBR_P VSTRIBR_PS + signed int __builtin_vec_strir_p (vus); + VSTRIHR_P VSTRIHR_PU + signed int __builtin_vec_strir_p (vss); + VSTRIHR_P VSTRIHR_PS + +[VEC_STVLX, vec_stvlx, __builtin_vec_stvlx, __PPU__] + void __builtin_vec_stvlx (vbc, signed long long, vbc *); + STVLX STVLX_VBC + void __builtin_vec_stvlx (vsc, signed long long, vsc *); + STVLX STVLX_VSC + void __builtin_vec_stvlx (vsc, signed long long, signed char *); + STVLX STVLX_SC + void __builtin_vec_stvlx (vuc, signed long long, vuc *); + STVLX STVLX_VUC + void __builtin_vec_stvlx (vuc, signed long long, unsigned char *); + STVLX STVLX_UC + void __builtin_vec_stvlx (vbs, signed long long, vbs *); + STVLX STVLX_VBS + void __builtin_vec_stvlx (vss, signed long long, vss *); + STVLX STVLX_VSS + void __builtin_vec_stvlx (vss, signed long long, signed short *); + STVLX STVLX_SS + void __builtin_vec_stvlx (vus, signed long long, vus *); + STVLX STVLX_VUS + void __builtin_vec_stvlx (vus, signed long long, unsigned short *); + STVLX STVLX_US + void __builtin_vec_stvlx (vp, signed long long, vp *); + STVLX STVLX_VP + void __builtin_vec_stvlx (vbi, signed long long, vbi *); + STVLX STVLX_VBI + void __builtin_vec_stvlx (vsi, signed long long, vsi *); + STVLX STVLX_VSI + void __builtin_vec_stvlx (vsi, signed long long, signed int *); + STVLX STVLX_SI + void __builtin_vec_stvlx (vui, signed long long, vui *); + STVLX STVLX_VUI + void __builtin_vec_stvlx (vui, signed long long, unsigned int *); + STVLX STVLX_UI + void __builtin_vec_stvlx (vf, signed long long, vf *); + STVLX STVLX_VF + void __builtin_vec_stvlx (vf, signed long long, float *); + STVLX STVLX_F + +[VEC_STVLXL, vec_stvlxl, __builtin_vec_stvlxl, __PPU__] + void __builtin_vec_stvlxl (vbc, signed long long, vbc *); + STVLXL STVLXL_VBC + void __builtin_vec_stvlxl (vsc, signed long long, vsc *); + STVLXL STVLXL_VSC + void __builtin_vec_stvlxl (vsc, signed long long, signed char *); + STVLXL STVLXL_SC + void __builtin_vec_stvlxl (vuc, signed long long, vuc *); + STVLXL STVLXL_VUC + void __builtin_vec_stvlxl (vuc, signed long long, unsigned char *); + STVLXL STVLXL_UC + void __builtin_vec_stvlxl (vbs, signed long long, vbs *); + STVLXL STVLXL_VBS + void __builtin_vec_stvlxl (vss, signed long long, vss *); + STVLXL STVLXL_VSS + void __builtin_vec_stvlxl (vss, signed long long, signed short *); + STVLXL STVLXL_SS + void __builtin_vec_stvlxl (vus, signed long long, vus *); + STVLXL STVLXL_VUS + void __builtin_vec_stvlxl (vus, signed long long, unsigned short *); + STVLXL STVLXL_US + void __builtin_vec_stvlxl (vp, signed long long, vp *); + STVLXL STVLXL_VP + void __builtin_vec_stvlxl (vbi, signed long long, vbi *); + STVLXL STVLXL_VBI + void __builtin_vec_stvlxl (vsi, signed long long, vsi *); + STVLXL STVLXL_VSI + void __builtin_vec_stvlxl (vsi, signed long long, signed int *); + STVLXL STVLXL_SI + void __builtin_vec_stvlxl (vui, signed long long, vui *); + STVLXL STVLXL_VUI + void __builtin_vec_stvlxl (vui, signed long long, unsigned int *); + STVLXL STVLXL_UI + void __builtin_vec_stvlxl (vf, signed long long, vf *); + STVLXL STVLXL_VF + void __builtin_vec_stvlxl (vf, signed long long, float *); + STVLXL STVLXL_F + +[VEC_STVRX, vec_stvrx, __builtin_vec_stvrx, __PPU__] + void __builtin_vec_stvrx (vbc, signed long long, vbc *); + STVRX STVRX_VBC + void __builtin_vec_stvrx (vsc, signed long long, vsc *); + STVRX STVRX_VSC + void __builtin_vec_stvrx (vsc, signed long long, signed char *); + STVRX STVRX_SC + void __builtin_vec_stvrx (vuc, signed long long, vuc *); + STVRX STVRX_VUC + void __builtin_vec_stvrx (vuc, signed long long, unsigned char *); + STVRX STVRX_UC + void __builtin_vec_stvrx (vbs, signed long long, vbs *); + STVRX STVRX_VBS + void __builtin_vec_stvrx (vss, signed long long, vss *); + STVRX STVRX_VSS + void __builtin_vec_stvrx (vss, signed long long, signed short *); + STVRX STVRX_SS + void __builtin_vec_stvrx (vus, signed long long, vus *); + STVRX STVRX_VUS + void __builtin_vec_stvrx (vus, signed long long, unsigned short *); + STVRX STVRX_US + void __builtin_vec_stvrx (vp, signed long long, vp *); + STVRX STVRX_VP + void __builtin_vec_stvrx (vbi, signed long long, vbi *); + STVRX STVRX_VBI + void __builtin_vec_stvrx (vsi, signed long long, vsi *); + STVRX STVRX_VSI + void __builtin_vec_stvrx (vsi, signed long long, signed int *); + STVRX STVRX_SI + void __builtin_vec_stvrx (vui, signed long long, vui *); + STVRX STVRX_VUI + void __builtin_vec_stvrx (vui, signed long long, unsigned int *); + STVRX STVRX_UI + void __builtin_vec_stvrx (vf, signed long long, vf *); + STVRX STVRX_VF + void __builtin_vec_stvrx (vf, signed long long, float *); + STVRX STVRX_F + +[VEC_STVRXL, vec_stvrxl, __builtin_vec_stvrxl, __PPU__] + void __builtin_vec_stvrxl (vbc, signed long long, vbc *); + STVRXL STVRXL_VBC + void __builtin_vec_stvrxl (vsc, signed long long, vsc *); + STVRXL STVRXL_VSC + void __builtin_vec_stvrxl (vsc, signed long long, signed char *); + STVRXL STVRXL_SC + void __builtin_vec_stvrxl (vuc, signed long long, vuc *); + STVRXL STVRXL_VUC + void __builtin_vec_stvrxl (vuc, signed long long, unsigned char *); + STVRXL STVRXL_UC + void __builtin_vec_stvrxl (vbs, signed long long, vbs *); + STVRXL STVRXL_VBS + void __builtin_vec_stvrxl (vss, signed long long, vss *); + STVRXL STVRXL_VSS + void __builtin_vec_stvrxl (vss, signed long long, signed short *); + STVRXL STVRXL_SS + void __builtin_vec_stvrxl (vus, signed long long, vus *); + STVRXL STVRXL_VUS + void __builtin_vec_stvrxl (vus, signed long long, unsigned short *); + STVRXL STVRXL_US + void __builtin_vec_stvrxl (vp, signed long long, vp *); + STVRXL STVRXL_VP + void __builtin_vec_stvrxl (vbi, signed long long, vbi *); + STVRXL STVRXL_VBI + void __builtin_vec_stvrxl (vsi, signed long long, vsi *); + STVRXL STVRXL_VSI + void __builtin_vec_stvrxl (vsi, signed long long, signed int *); + STVRXL STVRXL_SI + void __builtin_vec_stvrxl (vui, signed long long, vui *); + STVRXL STVRXL_VUI + void __builtin_vec_stvrxl (vui, signed long long, unsigned int *); + STVRXL STVRXL_UI + void __builtin_vec_stvrxl (vf, signed long long, vf *); + STVRXL STVRXL_VF + void __builtin_vec_stvrxl (vf, signed long long, float *); + STVRXL STVRXL_F + +[VEC_STXVL, vec_xst_len, __builtin_vec_stxvl, _ARCH_PPC64_PWR9] + void __builtin_vec_stxvl (vsc, signed char *, unsigned int); + STXVL STXVL_VSC + void __builtin_vec_stxvl (vuc, unsigned char *, unsigned int); + STXVL STXVL_VUC + void __builtin_vec_stxvl (vss, signed short *, unsigned int); + STXVL STXVL_VSS + void __builtin_vec_stxvl (vus, unsigned short *, unsigned int); + STXVL STXVL_VUS + void __builtin_vec_stxvl (vsi, signed int *, unsigned int); + STXVL STXVL_VSI + void __builtin_vec_stxvl (vui, unsigned int *, unsigned int); + STXVL STXVL_VUI + void __builtin_vec_stxvl (vsll, signed long long *, unsigned int); + STXVL STXVL_VSLL + void __builtin_vec_stxvl (vull, unsigned long long *, unsigned int); + STXVL STXVL_VULL + void __builtin_vec_stxvl (vsq, signed __int128 *, unsigned int); + STXVL STXVL_VSQ + void __builtin_vec_stxvl (vuq, unsigned __int128 *, unsigned int); + STXVL STXVL_VUQ + void __builtin_vec_stxvl (vf, float *, unsigned int); + STXVL STXVL_VF + void __builtin_vec_stxvl (vd, double *, unsigned int); + STXVL STXVL_VD + +; #### XVSUBSP{TARGET_VSX};VSUBFP +[VEC_SUB, vec_sub, __builtin_vec_sub] + vsc __builtin_vec_sub (vsc, vsc); + VSUBUBM VSUBUBM_VSC + vuc __builtin_vec_sub (vuc, vuc); + VSUBUBM VSUBUBM_VUC + vss __builtin_vec_sub (vss, vss); + VSUBUHM VSUBUHM_VSS + vus __builtin_vec_sub (vus, vus); + VSUBUHM VSUBUHM_VUS + vsi __builtin_vec_sub (vsi, vsi); + VSUBUWM VSUBUWM_VSI + vui __builtin_vec_sub (vui, vui); + VSUBUWM VSUBUWM_VUI + vsll __builtin_vec_sub (vsll, vsll); + VSUBUDM VSUBUDM_VSLL + vull __builtin_vec_sub (vull, vull); + VSUBUDM VSUBUDM_VULL + vsq __builtin_vec_sub (vsq, vsq); + VSUBUQM VSUBUQM_VSQ + vuq __builtin_vec_sub (vuq, vuq); + VSUBUQM VSUBUQM_VUQ + vf __builtin_vec_sub (vf, vf); + VSUBFP + vd __builtin_vec_sub (vd, vd); + XVSUBDP +; The following variants are deprecated. + vsc __builtin_vec_sub (vsc, vbc); + VSUBUBM VSUBUBM_VSC_VBC + vsc __builtin_vec_sub (vbc, vsc); + VSUBUBM VSUBUBM_VBC_VSC + vuc __builtin_vec_sub (vuc, vbc); + VSUBUBM VSUBUBM_VUC_VBC + vuc __builtin_vec_sub (vbc, vuc); + VSUBUBM VSUBUBM_VBC_VUC + vss __builtin_vec_sub (vss, vbs); + VSUBUHM VSUBUHM_VSS_VBS + vss __builtin_vec_sub (vbs, vss); + VSUBUHM VSUBUHM_VBS_VSS + vus __builtin_vec_sub (vus, vbs); + VSUBUHM VSUBUHM_VUS_VBS + vus __builtin_vec_sub (vbs, vus); + VSUBUHM VSUBUHM_VBS_VUS + vsi __builtin_vec_sub (vsi, vbi); + VSUBUWM VSUBUWM_VSI_VBI + vsi __builtin_vec_sub (vbi, vsi); + VSUBUWM VSUBUWM_VBI_VSI + vui __builtin_vec_sub (vui, vbi); + VSUBUWM VSUBUWM_VUI_VBI + vui __builtin_vec_sub (vbi, vui); + VSUBUWM VSUBUWM_VBI_VUI + vsll __builtin_vec_sub (vsll, vbll); + VSUBUDM VSUBUDM_VSLL_VBLL + vsll __builtin_vec_sub (vbll, vsll); + VSUBUDM VSUBUDM_VBLL_VSLL + vull __builtin_vec_sub (vull, vbll); + VSUBUDM VSUBUDM_VULL_VBLL + vull __builtin_vec_sub (vbll, vull); + VSUBUDM VSUBUDM_VBLL_VULL + +[VEC_SUBC, vec_subc, __builtin_vec_subc] + vsi __builtin_vec_subc (vsi, vsi); + VSUBCUW VSUBCUW_VSI + vui __builtin_vec_subc (vui, vui); + VSUBCUW VSUBCUW_VUI + vsq __builtin_vec_subc (vsq, vsq); + VSUBCUQ VSUBCUQ_VSQ + vuq __builtin_vec_subc (vuq, vuq); + VSUBCUQ VSUBCUQ_VUQ + +; TODO: Note that the entry for VEC_SUBE currently gets ignored in +; altivec_resolve_overloaded_builtin. Revisit whether we can remove +; that. We still need to register the legal builtin forms here. +[VEC_SUBE, vec_sube, __builtin_vec_sube] + vsq __builtin_vec_sube (vsq, vsq, vsq); + VSUBEUQM VSUBEUQM_VSQ + vuq __builtin_vec_sube (vuq, vuq, vuq); + VSUBEUQM VSUBEUQM_VUQ + +; TODO: Note that the entry for VEC_SUBEC currently gets ignored in +; altivec_resolve_overloaded_builtin. Revisit whether we can remove +; that. We still need to register the legal builtin forms here. +[VEC_SUBEC, vec_subec, __builtin_vec_subec] + vsq __builtin_vec_subec (vsq, vsq, vsq); + VSUBECUQ VSUBECUQ_VSQ + vuq __builtin_vec_subec (vuq, vuq, vuq); + VSUBECUQ VSUBECUQ_VUQ + +[VEC_SUBS, vec_subs, __builtin_vec_subs] + vuc __builtin_vec_subs (vuc, vuc); + VSUBUBS + vsc __builtin_vec_subs (vsc, vsc); + VSUBSBS + vus __builtin_vec_subs (vus, vus); + VSUBUHS + vss __builtin_vec_subs (vss, vss); + VSUBSHS + vui __builtin_vec_subs (vui, vui); + VSUBUWS + vsi __builtin_vec_subs (vsi, vsi); + VSUBSWS +; The following variants are deprecated. + vuc __builtin_vec_subs (vuc, vbc); + VSUBUBS VSUBUBS_UB + vuc __builtin_vec_subs (vbc, vuc); + VSUBUBS VSUBUBS_BU + vsc __builtin_vec_subs (vsc, vbc); + VSUBSBS VSUBSBS_SB + vsc __builtin_vec_subs (vbc, vsc); + VSUBSBS VSUBSBS_BS + vus __builtin_vec_subs (vus, vbs); + VSUBUHS VSUBUHS_UB + vus __builtin_vec_subs (vbs, vus); + VSUBUHS VSUBUHS_BU + vss __builtin_vec_subs (vss, vbs); + VSUBSHS VSUBSHS_SB + vss __builtin_vec_subs (vbs, vss); + VSUBSHS VSUBSHS_BS + vui __builtin_vec_subs (vui, vbi); + VSUBUWS VSUBUWS_UB + vui __builtin_vec_subs (vbi, vui); + VSUBUWS VSUBUWS_BU + vsi __builtin_vec_subs (vsi, vbi); + VSUBSWS VSUBSWS_SB + vsi __builtin_vec_subs (vbi, vsi); + VSUBSWS VSUBSWS_BS + +[VEC_SUM2S, vec_sum2s, __builtin_vec_sum2s] + vsi __builtin_vec_sum2s (vsi, vsi); + VSUM2SWS + +[VEC_SUM4S, vec_sum4s, __builtin_vec_sum4s] + vui __builtin_vec_sum4s (vuc, vui); + VSUM4UBS + vsi __builtin_vec_sum4s (vsc, vsi); + VSUM4SBS + vsi __builtin_vec_sum4s (vss, vsi); + VSUM4SHS + +[VEC_SUMS, vec_sums, __builtin_vec_sums] + vsi __builtin_vec_sums (vsi, vsi); + VSUMSWS + +[VEC_TERNARYLOGIC, vec_ternarylogic, __builtin_vec_xxeval, _ARCH_PWR10] + vuc __builtin_vec_xxeval (vuc, vuc, vuc, const int); + XXEVAL XXEVAL_VUC + vus __builtin_vec_xxeval (vus, vus, vus, const int); + XXEVAL XXEVAL_VUS + vui __builtin_vec_xxeval (vui, vui, vui, const int); + XXEVAL XXEVAL_VUI + vull __builtin_vec_xxeval (vull, vull, vull, const int); + XXEVAL XXEVAL_VULL + vuq __builtin_vec_xxeval (vuq, vuq, vuq, const int); + XXEVAL XXEVAL_VUQ + +[VEC_TEST_LSBB_ALL_ONES, vec_test_lsbb_all_ones, __builtin_vec_xvtlsbb_all_ones, _ARCH_PWR9] + signed int __builtin_vec_xvtlsbb_all_ones (vuc); + XVTLSBB_ONES + +[VEC_TEST_LSBB_ALL_ZEROS, vec_test_lsbb_all_zeros, __builtin_vec_xvtlsbb_all_zeros, _ARCH_PWR9] + signed int __builtin_vec_xvtlsbb_all_zeros (vuc); + XVTLSBB_ZEROS + +; #### XVRSPIZ{TARGET_VSX}; VRFIZ +[VEC_TRUNC, vec_trunc, __builtin_vec_trunc] + vf __builtin_vec_trunc (vf); + VRFIZ + vd __builtin_vec_trunc (vd); + XVRDPIZ + +[VEC_TSTSFI_GT, SKIP, __builtin_dfp_dtstsfi_gt] + signed int __builtin_dfp_dtstsfi_gt (const int, _Decimal64); + TSTSFI_GT_DD + signed int __builtin_dfp_dtstsfi_gt (const int, _Decimal128); + TSTSFI_GT_TD + +[VEC_TSTSFI_EQ, SKIP, __builtin_dfp_dtstsfi_eq] + signed int __builtin_dfp_dtstsfi_eq (const int, _Decimal64); + TSTSFI_EQ_DD + signed int __builtin_dfp_dtstsfi_eq (const int, _Decimal128); + TSTSFI_EQ_TD + +[VEC_TSTSFI_LT, SKIP, __builtin_dfp_dtstsfi_lt] + signed int __builtin_dfp_dtstsfi_lt (const int, _Decimal64); + TSTSFI_LT_DD + signed int __builtin_dfp_dtstsfi_lt (const int, _Decimal128); + TSTSFI_LT_TD + +[VEC_TSTSFI_OV, SKIP, __builtin_dfp_dtstsfi_ov] + signed int __builtin_dfp_dtstsfi_ov (const int, _Decimal64); + TSTSFI_OV_DD + signed int __builtin_dfp_dtstsfi_ov (const int, _Decimal128); + TSTSFI_OV_TD + +[VEC_UNPACKH, vec_unpackh, __builtin_vec_unpackh] + vss __builtin_vec_unpackh (vsc); + VUPKHSB VUPKHSB_VSC + vbs __builtin_vec_unpackh (vbc); + VUPKHSB VUPKHSB_VBC + vsi __builtin_vec_unpackh (vss); + VUPKHSH VUPKHSH_VSS + vbi __builtin_vec_unpackh (vbs); + VUPKHSH VUPKHSH_VBS + vui __builtin_vec_unpackh (vp); + VUPKHPX + vsll __builtin_vec_unpackh (vsi); + VUPKHSW VUPKHSW_VSI + vbll __builtin_vec_unpackh (vbi); + VUPKHSW VUPKHSW_VBI + vd __builtin_vec_unpackh (vf); + DOUBLEH_V4SF VUPKHF + +[VEC_UNPACKL, vec_unpackl, __builtin_vec_unpackl] + vss __builtin_vec_unpackl (vsc); + VUPKLSB VUPKLSB_VSC + vbs __builtin_vec_unpackl (vbc); + VUPKLSB VUPKLSB_VBC + vsi __builtin_vec_unpackl (vss); + VUPKLSH VUPKLSH_VSS + vbi __builtin_vec_unpackl (vbs); + VUPKLSH VUPKLSH_VBS + vui __builtin_vec_unpackl (vp); + VUPKLPX + vsll __builtin_vec_unpackl (vsi); + VUPKLSW VUPKLSW_VSI + vbll __builtin_vec_unpackl (vbi); + VUPKLSW VUPKLSW_VBI + vd __builtin_vec_unpackl (vf); + DOUBLEL_V4SF VUPKLF + +[VEC_UNSIGNED, vec_unsigned, __builtin_vec_vunsigned] + vui __builtin_vec_vunsigned (vf); + VEC_VUNSIGNED_V4SF + vull __builtin_vec_vunsigned (vd); + VEC_VUNSIGNED_V2DF + +[VEC_UNSIGNED2, vec_unsigned2, __builtin_vec_vunsigned2] + vui __builtin_vec_vunsigned2 (vd, vd); + VEC_VUNSIGNED2_V2DF + +[VEC_UNSIGNEDE, vec_unsignede, __builtin_vec_vunsignede] + vui __builtin_vec_vunsignede (vd); + VEC_VUNSIGNEDE_V2DF + +[VEC_UNSIGNEDO, vec_unsignedo, __builtin_vec_vunsignedo] + vui __builtin_vec_vunsignedo (vd); + VEC_VUNSIGNEDO_V2DF + +[VEC_VEE, vec_extract_exp, __builtin_vec_extract_exp, _ARCH_PWR9] + vui __builtin_vec_extract_exp (vf); + VEESP + vull __builtin_vec_extract_exp (vd); + VEEDP + +[VEC_VES, vec_extract_sig, __builtin_vec_extract_sig, _ARCH_PWR9] + vui __builtin_vec_extract_sig (vf); + VESSP + vull __builtin_vec_extract_sig (vd); + VESDP + +[VEC_VIE, vec_insert_exp, __builtin_vec_insert_exp, _ARCH_PWR9] + vf __builtin_vec_insert_exp (vf, vui); + VIESP VIESP_VF + vf __builtin_vec_insert_exp (vui, vui); + VIESP VIESP_VUI + vd __builtin_vec_insert_exp (vd, vull); + VIEDP VIEDP_VD + vd __builtin_vec_insert_exp (vull, vull); + VIEDP VIEDP_VULL + +; It is truly unfortunate that vec_vprtyb has an incompatible set of +; interfaces with vec_parity_lsbb. So we can't even deprecate this. +[VEC_VPRTYB, vec_vprtyb, __builtin_vec_vprtyb, _ARCH_PWR9] + vsi __builtin_vec_vprtyb (vsi); + VPRTYBW VPRTYB_VSI + vui __builtin_vec_vprtyb (vui); + VPRTYBW VPRTYB_VUI + vsll __builtin_vec_vprtyb (vsll); + VPRTYBD VPRTYB_VSLL + vull __builtin_vec_vprtyb (vull); + VPRTYBD VPRTYB_VULL + vsq __builtin_vec_vprtyb (vsq); + VPRTYBQ VPRTYB_VSQ + vuq __builtin_vec_vprtyb (vuq); + VPRTYBQ VPRTYB_VUQ + signed __int128 __builtin_vec_vprtyb (signed __int128); + VPRTYBQ VPRTYB_SQ + unsigned __int128 __builtin_vec_vprtyb (unsigned __int128); + VPRTYBQ VPRTYB_UQ + +[VEC_VSCEEQ, scalar_cmp_exp_eq, __builtin_vec_scalar_cmp_exp_eq, _ARCH_PWR9] + signed int __builtin_vec_scalar_cmp_exp_eq (double, double); + VSCEDPEQ + signed int __builtin_vec_scalar_cmp_exp_eq (_Float128, _Float128); + VSCEQPEQ + +[VEC_VSCEGT, scalar_cmp_exp_gt, __builtin_vec_scalar_cmp_exp_gt, _ARCH_PWR9] + signed int __builtin_vec_scalar_cmp_exp_gt (double, double); + VSCEDPGT + signed int __builtin_vec_scalar_cmp_exp_gt (_Float128, _Float128); + VSCEQPGT + +[VEC_VSCELT, scalar_cmp_exp_lt, __builtin_vec_scalar_cmp_exp_lt, _ARCH_PWR9] + signed int __builtin_vec_scalar_cmp_exp_lt (double, double); + VSCEDPLT + signed int __builtin_vec_scalar_cmp_exp_lt (_Float128, _Float128); + VSCEQPLT + +[VEC_VSCEUO, scalar_cmp_exp_unordered, __builtin_vec_scalar_cmp_exp_unordered, _ARCH_PWR9] + signed int __builtin_vec_scalar_cmp_exp_unordered (double, double); + VSCEDPUO + signed int __builtin_vec_scalar_cmp_exp_unordered (_Float128, _Float128); + VSCEQPUO + +[VEC_VSEE, scalar_extract_exp, __builtin_vec_scalar_extract_exp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_extract_exp (double); + VSEEDP + unsigned int __builtin_vec_scalar_extract_exp (_Float128); + VSEEQP + +[VEC_VSES, scalar_extract_sig, __builtin_vec_scalar_extract_sig, _ARCH_PWR9] + unsigned long long __builtin_vec_scalar_extract_sig (double); + VSESDP + unsigned __int128 __builtin_vec_scalar_extract_sig (_Float128); + VSESQP + +[VEC_VSIE, scalar_insert_exp, __builtin_vec_scalar_insert_exp, _ARCH_PWR9] + double __builtin_vec_scalar_insert_exp (unsigned long long, unsigned long long); + VSIEDP + double __builtin_vec_scalar_insert_exp (double, unsigned long long); + VSIEDPF + _Float128 __builtin_vec_scalar_insert_exp (unsigned __int128, unsigned long long); + VSIEQP + _Float128 __builtin_vec_scalar_insert_exp (_Float128, unsigned long long); + VSIEQPF + +[VEC_VSTDC, scalar_test_data_class, __builtin_vec_scalar_test_data_class, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_data_class (float, const int); + VSTDCSP + unsigned int __builtin_vec_scalar_test_data_class (double, const int); + VSTDCDP + unsigned int __builtin_vec_scalar_test_data_class (_Float128, const int); + VSTDCQP + +[VEC_VSTDCN, scalar_test_neg, __builtin_vec_scalar_test_neg, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_neg (float); + VSTDCNSP + unsigned int __builtin_vec_scalar_test_neg (double); + VSTDCNDP + unsigned int __builtin_vec_scalar_test_neg (_Float128); + VSTDCNQP + +[VEC_VTDC, vec_test_data_class, __builtin_vec_test_data_class, _ARCH_PWR9] + vbi __builtin_vec_test_data_class (vf, const int); + VTDCSP + vbll __builtin_vec_test_data_class (vd, const int); + VTDCDP + +[VEC_XL, vec_xl, __builtin_vec_vsx_ld, __VSX__] + vsc __builtin_vec_vsx_ld (signed long long, const vsc *); + LXVW4X_V16QI LXVW4X_VSC + vsc __builtin_vec_vsx_ld (signed long long, const signed char *); + LXVW4X_V16QI LXVW4X_SC + vuc __builtin_vec_vsx_ld (signed long long, const vuc *); + LXVW4X_V16QI LXVW4X_VUC + vuc __builtin_vec_vsx_ld (signed long long, const unsigned char *); + LXVW4X_V16QI LXVW4X_UC + vbc __builtin_vec_vsx_ld (signed long long, const vbc *); + LXVW4X_V16QI LXVW4X_VBC + vss __builtin_vec_vsx_ld (signed long long, const vss *); + LXVW4X_V8HI LXVW4X_VSS + vss __builtin_vec_vsx_ld (signed long long, const signed short *); + LXVW4X_V8HI LXVW4X_SS + vus __builtin_vec_vsx_ld (signed long long, const vus *); + LXVW4X_V8HI LXVW4X_VUS + vus __builtin_vec_vsx_ld (signed long long, const unsigned short *); + LXVW4X_V8HI LXVW4X_US + vbs __builtin_vec_vsx_ld (signed long long, const vbs *); + LXVW4X_V8HI LXVW4X_VBS + vp __builtin_vec_vsx_ld (signed long long, const vp *); + LXVW4X_V8HI LXVW4X_P + vsi __builtin_vec_vsx_ld (signed long long, const vsi *); + LXVW4X_V4SI LXVW4X_VSI + vsi __builtin_vec_vsx_ld (signed long long, const signed int *); + LXVW4X_V4SI LXVW4X_SI + vui __builtin_vec_vsx_ld (signed long long, const vui *); + LXVW4X_V4SI LXVW4X_VUI + vui __builtin_vec_vsx_ld (signed long long, const unsigned int *); + LXVW4X_V4SI LXVW4X_UI + vbi __builtin_vec_vsx_ld (signed long long, const vbi *); + LXVW4X_V4SI LXVW4X_VBI + vsll __builtin_vec_vsx_ld (signed long long, const vsll *); + LXVD2X_V2DI LXVD2X_VSLL + vsll __builtin_vec_vsx_ld (signed long long, const signed long long *); + LXVD2X_V2DI LXVD2X_SLL + vull __builtin_vec_vsx_ld (signed long long, const vull *); + LXVD2X_V2DI LXVD2X_VULL + vull __builtin_vec_vsx_ld (signed long long, const unsigned long long *); + LXVD2X_V2DI LXVD2X_ULL + vbll __builtin_vec_vsx_ld (signed long long, const vbll *); + LXVD2X_V2DI LXVD2X_VBLL + vsq __builtin_vec_vsx_ld (signed long long, const vsq *); + LXVD2X_V1TI LXVD2X_VSQ + vsq __builtin_vec_vsx_ld (signed long long, const signed __int128 *); + LXVD2X_V1TI LXVD2X_SQ + vuq __builtin_vec_vsx_ld (signed long long, const unsigned __int128 *); + LXVD2X_V1TI LXVD2X_UQ + vf __builtin_vec_vsx_ld (signed long long, const vf *); + LXVW4X_V4SF LXVW4X_VF + vf __builtin_vec_vsx_ld (signed long long, const float *); + LXVW4X_V4SF LXVW4X_F + vd __builtin_vec_vsx_ld (signed long long, const vd *); + LXVD2X_V2DF LXVD2X_VD + vd __builtin_vec_vsx_ld (signed long long, const double *); + LXVD2X_V2DF LXVD2X_D + +[VEC_XL_BE, vec_xl_be, __builtin_vec_xl_be, __VSX__] + vsc __builtin_vec_xl_be (signed long long, const vsc *); + LD_ELEMREV_V16QI LD_ELEMREV_VSC + vsc __builtin_vec_xl_be (signed long long, const signed char *); + LD_ELEMREV_V16QI LD_ELEMREV_SC + vuc __builtin_vec_xl_be (signed long long, const vuc *); + LD_ELEMREV_V16QI LD_ELEMREV_VUC + vuc __builtin_vec_xl_be (signed long long, const unsigned char *); + LD_ELEMREV_V16QI LD_ELEMREV_UC + vss __builtin_vec_xl_be (signed long long, const vss *); + LD_ELEMREV_V8HI LD_ELEMREV_VSS + vss __builtin_vec_xl_be (signed long long, const signed short *); + LD_ELEMREV_V8HI LD_ELEMREV_SS + vus __builtin_vec_xl_be (signed long long, const vus *); + LD_ELEMREV_V8HI LD_ELEMREV_VUS + vus __builtin_vec_xl_be (signed long long, const unsigned short *); + LD_ELEMREV_V8HI LD_ELEMREV_US + vsi __builtin_vec_xl_be (signed long long, const vsi *); + LD_ELEMREV_V4SI LD_ELEMREV_VSI + vsi __builtin_vec_xl_be (signed long long, const signed int *); + LD_ELEMREV_V4SI LD_ELEMREV_SI + vui __builtin_vec_xl_be (signed long long, const vui *); + LD_ELEMREV_V4SI LD_ELEMREV_VUI + vui __builtin_vec_xl_be (signed long long, const unsigned int *); + LD_ELEMREV_V4SI LD_ELEMREV_UI + vsll __builtin_vec_xl_be (signed long long, const vsll *); + LD_ELEMREV_V2DI LD_ELEMREV_VSLL + vsll __builtin_vec_xl_be (signed long long, const signed long long *); + LD_ELEMREV_V2DI LD_ELEMREV_SLL + vull __builtin_vec_xl_be (signed long long, const vull *); + LD_ELEMREV_V2DI LD_ELEMREV_VULL + vull __builtin_vec_xl_be (signed long long, const unsigned long long *); + LD_ELEMREV_V2DI LD_ELEMREV_ULL + vsq __builtin_vec_xl_be (signed long long, const signed __int128 *); + LD_ELEMREV_V1TI LD_ELEMREV_SQ + vuq __builtin_vec_xl_be (signed long long, const unsigned __int128 *); + LD_ELEMREV_V1TI LD_ELEMREV_UQ + vf __builtin_vec_xl_be (signed long long, const vf *); + LD_ELEMREV_V4SF LD_ELEMREV_VF + vf __builtin_vec_xl_be (signed long long, const float *); + LD_ELEMREV_V4SF LD_ELEMREV_F + vd __builtin_vec_xl_be (signed long long, const vd *); + LD_ELEMREV_V2DF LD_ELEMREV_VD + vd __builtin_vec_xl_be (signed long long, const double *); + LD_ELEMREV_V2DF LD_ELEMREV_DD + +[VEC_XL_LEN_R, vec_xl_len_r, __builtin_vec_xl_len_r, _ARCH_PPC64_PWR9] + vuc __builtin_vsx_xl_len_r (const unsigned char *, unsigned int); + XL_LEN_R + +[VEC_XL_SEXT, vec_xl_sext, __builtin_vec_xl_sext, _ARCH_PWR10] + vsq __builtin_vec_xl_sext (signed long long, const signed char *); + SE_LXVRBX + vsq __builtin_vec_xl_sext (signed long long, const signed short *); + SE_LXVRHX + vsq __builtin_vec_xl_sext (signed long long, const signed int *); + SE_LXVRWX + vsq __builtin_vec_xl_sext (signed long long, const signed long long *); + SE_LXVRDX + +[VEC_XL_ZEXT, vec_xl_zext, __builtin_vec_xl_zext, _ARCH_PWR10] + vuq __builtin_vec_xl_zext (signed long long, const unsigned char *); + ZE_LXVRBX + vuq __builtin_vec_xl_zext (signed long long, const unsigned short *); + ZE_LXVRHX + vuq __builtin_vec_xl_zext (signed long long, const unsigned int *); + ZE_LXVRWX + vuq __builtin_vec_xl_zext (signed long long, const unsigned long long *); + ZE_LXVRDX + +[VEC_XOR, vec_xor, __builtin_vec_xor] + vsc __builtin_vec_xor (vsc, vsc); + VXOR_V16QI + vuc __builtin_vec_xor (vuc, vuc); + VXOR_V16QI_UNS VXOR_VUC + vbc __builtin_vec_xor (vbc, vbc); + VXOR_V16QI_UNS VXOR_VBC + vss __builtin_vec_xor (vss, vss); + VXOR_V8HI + vus __builtin_vec_xor (vus, vus); + VXOR_V8HI_UNS VXOR_VUS + vbs __builtin_vec_xor (vbs, vbs); + VXOR_V8HI_UNS VXOR_VBS + vsi __builtin_vec_xor (vsi, vsi); + VXOR_V4SI + vui __builtin_vec_xor (vui, vui); + VXOR_V4SI_UNS VXOR_VUI + vbi __builtin_vec_xor (vbi, vbi); + VXOR_V4SI_UNS VXOR_VBI + vsll __builtin_vec_xor (vsll, vsll); + VXOR_V2DI + vull __builtin_vec_xor (vull, vull); + VXOR_V2DI_UNS VXOR_VULL + vbll __builtin_vec_xor (vbll, vbll); + VXOR_V2DI_UNS VXOR_VBLL + vf __builtin_vec_xor (vf, vf); + VXOR_V4SF + vd __builtin_vec_xor (vd, vd); + VXOR_V2DF +; The following variants are deprecated. + vsc __builtin_vec_xor (vsc, vbc); + VXOR_V16QI VXOR_VSC_VBC + vsc __builtin_vec_xor (vbc, vsc); + VXOR_V16QI VXOR_VBC_VSC + vsc __builtin_vec_xor (vsc, vuc); + VXOR_V16QI VXOR_VSC_VUC + vuc __builtin_vec_xor (vuc, vbc); + VXOR_V16QI_UNS VXOR_VUC_VBC + vuc __builtin_vec_xor (vbc, vuc); + VXOR_V16QI_UNS VXOR_VBC_VUC + vuc __builtin_vec_xor (vuc, vsc); + VXOR_V16QI_UNS VXOR_VUC_VSC + vss __builtin_vec_xor (vss, vbs); + VXOR_V8HI VXOR_VSS_VBS + vss __builtin_vec_xor (vbs, vss); + VXOR_V8HI VXOR_VBS_VSS + vus __builtin_vec_xor (vus, vbs); + VXOR_V8HI_UNS VXOR_VUS_VBS + vus __builtin_vec_xor (vbs, vus); + VXOR_V8HI_UNS VXOR_VBS_VUS + vsi __builtin_vec_xor (vsi, vbi); + VXOR_V4SI VXOR_VSI_VBI + vsi __builtin_vec_xor (vbi, vsi); + VXOR_V4SI VXOR_VBI_VSI + vui __builtin_vec_xor (vui, vbi); + VXOR_V4SI_UNS VXOR_VUI_VBI + vui __builtin_vec_xor (vbi, vui); + VXOR_V4SI_UNS VXOR_VBI_VUI + vsll __builtin_vec_xor (vsll, vbll); + VXOR_V2DI VXOR_VSLL_VBLL + vsll __builtin_vec_xor (vbll, vsll); + VXOR_V2DI VXOR_VBLL_VSLL + vull __builtin_vec_xor (vull, vbll); + VXOR_V2DI_UNS VXOR_VULL_VBLL + vull __builtin_vec_xor (vbll, vull); + VXOR_V2DI_UNS VXOR_VBLL_VULL + vf __builtin_vec_xor (vf, vbi); + VXOR_V4SF VXOR_VF_VBI + vf __builtin_vec_xor (vbi, vf); + VXOR_V4SF VXOR_VBI_VF + vd __builtin_vec_xor (vd, vbll); + VXOR_V2DF VXOR_VD_VBLL + vd __builtin_vec_xor (vbll, vd); + VXOR_V2DF VXOR_VBLL_VD + +[VEC_XST, vec_xst, __builtin_vec_vsx_st, __VSX__] + void __builtin_vec_vsx_st (vsc, signed long long, vsc *); + STXVW4X_V16QI STXVW4X_VSC + void __builtin_vec_vsx_st (vsc, signed long long, signed char *); + STXVW4X_V16QI STXVW4X_SC + void __builtin_vec_vsx_st (vuc, signed long long, vuc *); + STXVW4X_V16QI STXVW4X_VUC + void __builtin_vec_vsx_st (vuc, signed long long, unsigned char *); + STXVW4X_V16QI STXVW4X_UC + void __builtin_vec_vsx_st (vbc, signed long long, vbc *); + STXVW4X_V16QI STXVW4X_VBC + void __builtin_vec_vsx_st (vbc, signed long long, signed char *); + STXVW4X_V16QI STXVW4X_VBC_S + void __builtin_vec_vsx_st (vbc, signed long long, unsigned char *); + STXVW4X_V16QI STXVW4X_VBC_U + void __builtin_vec_vsx_st (vss, signed long long, vss *); + STXVW4X_V8HI STXVW4X_VSS + void __builtin_vec_vsx_st (vss, signed long long, signed short *); + STXVW4X_V8HI STXVW4X_SS + void __builtin_vec_vsx_st (vus, signed long long, vus *); + STXVW4X_V8HI STXVW4X_VUS + void __builtin_vec_vsx_st (vus, signed long long, unsigned short *); + STXVW4X_V8HI STXVW4X_US + void __builtin_vec_vsx_st (vbs, signed long long, vbs *); + STXVW4X_V8HI STXVW4X_VBS + void __builtin_vec_vsx_st (vbs, signed long long, signed short *); + STXVW4X_V8HI STXVW4X_VBS_S + void __builtin_vec_vsx_st (vbs, signed long long, unsigned short *); + STXVW4X_V8HI STXVW4X_VBS_U + void __builtin_vec_vsx_st (vp, signed long long, vp *); + STXVW4X_V8HI STXVW4X_VP + void __builtin_vec_vsx_st (vsi, signed long long, vsi *); + STXVW4X_V4SI STXVW4X_VSI + void __builtin_vec_vsx_st (vsi, signed long long, signed int *); + STXVW4X_V4SI STXVW4X_SI + void __builtin_vec_vsx_st (vui, signed long long, vui *); + STXVW4X_V4SI STXVW4X_VUI + void __builtin_vec_vsx_st (vui, signed long long, unsigned int *); + STXVW4X_V4SI STXVW4X_UI + void __builtin_vec_vsx_st (vbi, signed long long, vbi *); + STXVW4X_V4SI STXVW4X_VBI + void __builtin_vec_vsx_st (vbi, signed long long, signed int *); + STXVW4X_V4SI STXVW4X_VBI_S + void __builtin_vec_vsx_st (vbi, signed long long, unsigned int *); + STXVW4X_V4SI STXVW4X_VBI_U + void __builtin_vec_vsx_st (vsll, signed long long, vsll *); + STXVD2X_V2DI STXVD2X_VSLL + void __builtin_vec_vsx_st (vsll, signed long long, signed long long *); + STXVD2X_V2DI STXVD2X_SLL + void __builtin_vec_vsx_st (vull, signed long long, vull *); + STXVD2X_V2DI STXVD2X_VULL + void __builtin_vec_vsx_st (vull, signed long long, unsigned long long *); + STXVD2X_V2DI STXVD2X_ULL + void __builtin_vec_vsx_st (vbll, signed long long, vbll *); + STXVD2X_V2DI STXVD2X_VBLL + void __builtin_vec_vsx_st (vsq, signed long long, signed __int128 *); + STXVD2X_V1TI STXVD2X_SQ + void __builtin_vec_vsx_st (vuq, signed long long, unsigned __int128 *); + STXVD2X_V1TI STXVD2X_UQ + void __builtin_vec_vsx_st (vf, signed long long, vf *); + STXVW4X_V4SF STXVW4X_VF + void __builtin_vec_vsx_st (vf, signed long long, float *); + STXVW4X_V4SF STXVW4X_F + void __builtin_vec_vsx_st (vd, signed long long, vd *); + STXVD2X_V2DF STXVD2X_VD + void __builtin_vec_vsx_st (vd, signed long long, double *); + STXVD2X_V2DF STXVD2X_D + +[VEC_XST_BE, vec_xst_be, __builtin_vec_xst_be, __VSX__] + void __builtin_vec_xst_be (vsc, signed long long, vsc *); + ST_ELEMREV_V16QI ST_ELEMREV_VSC + void __builtin_vec_xst_be (vsc, signed long long, signed char *); + ST_ELEMREV_V16QI ST_ELEMREV_SC_ + void __builtin_vec_xst_be (vuc, signed long long, vuc *); + ST_ELEMREV_V16QI ST_ELEMREV_VUC + void __builtin_vec_xst_be (vuc, signed long long, unsigned char *); + ST_ELEMREV_V16QI ST_ELEMREV_UC + void __builtin_vec_xst_be (vss, signed long long, vss *); + ST_ELEMREV_V8HI ST_ELEMREV_VSS + void __builtin_vec_xst_be (vss, signed long long, signed short *); + ST_ELEMREV_V8HI ST_ELEMREV_SS + void __builtin_vec_xst_be (vus, signed long long, vus *); + ST_ELEMREV_V8HI ST_ELEMREV_VUS + void __builtin_vec_xst_be (vus, signed long long, unsigned short *); + ST_ELEMREV_V8HI ST_ELEMREV_US + void __builtin_vec_xst_be (vsi, signed long long, vsi *); + ST_ELEMREV_V4SI ST_ELEMREV_VSI + void __builtin_vec_xst_be (vsi, signed long long, signed int *); + ST_ELEMREV_V4SI ST_ELEMREV_SI + void __builtin_vec_xst_be (vui, signed long long, vui *); + ST_ELEMREV_V4SI ST_ELEMREV_VUI + void __builtin_vec_xst_be (vui, signed long long, unsigned int *); + ST_ELEMREV_V4SI ST_ELEMREV_UI + void __builtin_vec_xst_be (vsll, signed long long, vsll *); + ST_ELEMREV_V2DI ST_ELEMREV_VSLL + void __builtin_vec_xst_be (vsll, signed long long, signed long long *); + ST_ELEMREV_V2DI ST_ELEMREV_SLL + void __builtin_vec_xst_be (vull, signed long long, vull *); + ST_ELEMREV_V2DI ST_ELEMREV_VULL + void __builtin_vec_xst_be (vull, signed long long, unsigned long long *); + ST_ELEMREV_V2DI ST_ELEMREV_ULL + void __builtin_vec_xst_be (vsq, signed long long, signed __int128 *); + ST_ELEMREV_V1TI ST_ELEMREV_SQ + void __builtin_vec_xst_be (vuq, signed long long, unsigned __int128 *); + ST_ELEMREV_V1TI ST_ELEMREV_UQ + void __builtin_vec_xst_be (vf, signed long long, vf *); + ST_ELEMREV_V4SF ST_ELEMREV_VF + void __builtin_vec_xst_be (vf, signed long long, float *); + ST_ELEMREV_V4SF ST_ELEMREV_F + void __builtin_vec_xst_be (vd, signed long long, vd *); + ST_ELEMREV_V2DF ST_ELEMREV_VD + void __builtin_vec_xst_be (vd, signed long long, double *); + ST_ELEMREV_V2DF ST_ELEMREV_D + +[VEC_XST_LEN_R, vec_xst_len_r, __builtin_vec_xst_len_r, _ARCH_PPC64_PWR9] + void __builtin_vsx_xst_len_r (vuc, unsigned char *, unsigned int); + XST_LEN_R + +[VEC_XST_TRUNC, vec_xst_trunc, __builtin_vec_xst_trunc, _ARCH_PWR10] + void __builtin_vec_xst_trunc (vsq, signed long long, signed char *); + TR_STXVRBX TR_STXVRBX_S + void __builtin_vec_xst_trunc (vuq, signed long long, unsigned char *); + TR_STXVRBX TR_STXVRBX_U + void __builtin_vec_xst_trunc (vsq, signed long long, signed short *); + TR_STXVRHX TR_STXVRHX_S + void __builtin_vec_xst_trunc (vuq, signed long long, unsigned short *); + TR_STXVRHX TR_STXVRHX_U + void __builtin_vec_xst_trunc (vsq, signed long long, signed int *); + TR_STXVRWX TR_STXVRWX_S + void __builtin_vec_xst_trunc (vuq, signed long long, unsigned int *); + TR_STXVRWX TR_STXVRWX_U + void __builtin_vec_xst_trunc (vsq, signed long long, signed long long *); + TR_STXVRDX TR_STXVRDX_S + void __builtin_vec_xst_trunc (vuq, signed long long, unsigned long long *); + TR_STXVRDX TR_STXVRDX_U + +[VEC_XXPERMDI, vec_xxpermdi, __builtin_vsx_xxpermdi, __VSX__] + vsc __builtin_vsx_xxpermdi (vsc, vsc, const int); + XXPERMDI_16QI XXPERMDI_VSC + vuc __builtin_vsx_xxpermdi (vuc, vuc, const int); + XXPERMDI_16QI XXPERMDI_VUC + vss __builtin_vsx_xxpermdi (vss, vss, const int); + XXPERMDI_8HI XXPERMDI_VSS + vus __builtin_vsx_xxpermdi (vus, vus, const int); + XXPERMDI_8HI XXPERMDI_VUS + vsi __builtin_vsx_xxpermdi (vsi, vsi, const int); + XXPERMDI_4SI XXPERMDI_VSI + vui __builtin_vsx_xxpermdi (vui, vui, const int); + XXPERMDI_4SI XXPERMDI_VUI + vsll __builtin_vsx_xxpermdi (vsll, vsll, const int); + XXPERMDI_2DI XXPERMDI_VSLL + vull __builtin_vsx_xxpermdi (vull, vull, const int); + XXPERMDI_2DI XXPERMDI_VULL + vf __builtin_vsx_xxpermdi (vf, vf, const int); + XXPERMDI_4SF XXPERMDI_VF + vd __builtin_vsx_xxpermdi (vd, vd, const int); + XXPERMDI_2DF XXPERMDI_VD + +[VEC_XXSLDWI, vec_xxsldwi, __builtin_vsx_xxsldwi, __VSX__] + vsc __builtin_vsx_xxsldwi (vsc, vsc, const int); + XXSLDWI_16QI XXSLDWI_VSC2 + vuc __builtin_vsx_xxsldwi (vuc, vuc, const int); + XXSLDWI_16QI XXSLDWI_VUC2 + vss __builtin_vsx_xxsldwi (vss, vss, const int); + XXSLDWI_8HI XXSLDWI_VSS2 + vus __builtin_vsx_xxsldwi (vus, vus, const int); + XXSLDWI_8HI XXSLDWI_VUS2 + vsi __builtin_vsx_xxsldwi (vsi, vsi, const int); + XXSLDWI_4SI XXSLDWI_VSI2 + vui __builtin_vsx_xxsldwi (vui, vui, const int); + XXSLDWI_4SI XXSLDWI_VUI2 + vsll __builtin_vsx_xxsldwi (vsll, vsll, const int); + XXSLDWI_2DI XXSLDWI_VSLL2 + vull __builtin_vsx_xxsldwi (vull, vull, const int); + XXSLDWI_2DI XXSLDWI_VULL2 + vf __builtin_vsx_xxsldwi (vf, vf, const int); + XXSLDWI_4SF XXSLDWI_VF2 + vd __builtin_vsx_xxsldwi (vd, vd, const int); + XXSLDWI_2DF XXSLDWI_VD2 + + +; ************************************************************************** +; ************************************************************************** +; **** Deprecated overloads that should never have existed at all **** +; ************************************************************************** +; ************************************************************************** + +[VEC_LVEBX, vec_lvebx, __builtin_vec_lvebx] + vsc __builtin_vec_lvebx (signed long, signed char *); + LVEBX LVEBX_DEPR1 + vuc __builtin_vec_lvebx (signed long, unsigned char *); + LVEBX LVEBX_DEPR2 + +[VEC_LVEHX, vec_lvehx, __builtin_vec_lvehx] + vss __builtin_vec_lvehx (signed long, signed short *); + LVEHX LVEHX_DEPR1 + vus __builtin_vec_lvehx (signed long, unsigned short *); + LVEHX LVEHX_DEPR2 + +[VEC_LVEWX, vec_lvewx, __builtin_vec_lvewx] + vf __builtin_vec_lvewx (signed long, float *); + LVEWX LVEWX_DEPR1 + vsi __builtin_vec_lvewx (signed long, signed int *); + LVEWX LVEWX_DEPR2 + vui __builtin_vec_lvewx (signed long, unsigned int *); + LVEWX LVEWX_DEPR3 + vsi __builtin_vec_lvewx (signed long, signed long *); + LVEWX LVEWX_DEPR4 + vui __builtin_vec_lvewx (signed long, unsigned long *); + LVEWX LVEWX_DEPR5 + +[VEC_STVEBX, vec_stvebx, __builtin_vec_stvebx] + void __builtin_vec_stvebx (vsc, signed long, signed char *); + STVEBX STVEBX_DEPR1 + void __builtin_vec_stvebx (vuc, signed long, unsigned char *); + STVEBX STVEBX_DEPR2 + void __builtin_vec_stvebx (vbc, signed long, signed char *); + STVEBX STVEBX_DEPR3 + void __builtin_vec_stvebx (vbc, signed long, signed char *); + STVEBX STVEBX_DEPR4 + void __builtin_vec_stvebx (vsc, signed long, void *); + STVEBX STVEBX_DEPR5 + void __builtin_vec_stvebx (vuc, signed long, void *); + STVEBX STVEBX_DEPR6 + +[VEC_STVEHX, vec_stvehx, __builtin_vec_stvehx] + void __builtin_vec_stvehx (vss, signed long, signed short *); + STVEHX STVEHX_DEPR1 + void __builtin_vec_stvehx (vus, signed long, unsigned short *); + STVEHX STVEHX_DEPR2 + void __builtin_vec_stvehx (vbs, signed long, signed short *); + STVEHX STVEHX_DEPR3 + void __builtin_vec_stvehx (vbs, signed long, signed short *); + STVEHX STVEHX_DEPR4 + void __builtin_vec_stvehx (vss, signed long, void *); + STVEHX STVEHX_DEPR5 + void __builtin_vec_stvehx (vus, signed long, void *); + STVEHX STVEHX_DEPR6 + +[VEC_STVEWX, vec_stvewx, __builtin_vec_stvewx] + void __builtin_vec_stvewx (vf, signed long, float *); + STVEWX STVEWX_DEPR1 + void __builtin_vec_stvewx (vsi, signed long, signed int *); + STVEWX STVEWX_DEPR2 + void __builtin_vec_stvewx (vui, signed long, unsigned int *); + STVEWX STVEWX_DEPR3 + void __builtin_vec_stvewx (vbi, signed long, signed int *); + STVEWX STVEWX_DEPR4 + void __builtin_vec_stvewx (vbi, signed long, unsigned int *); + STVEWX STVEWX_DEPR5 + void __builtin_vec_stvewx (vf, signed long, void *); + STVEWX STVEWX_DEPR6 + void __builtin_vec_stvewx (vsi, signed long, void *); + STVEWX STVEWX_DEPR7 + void __builtin_vec_stvewx (vui, signed long, void *); + STVEWX STVEWX_DEPR8 + +[VEC_TSTSFI_EQ_DD, SKIP, __builtin_dfp_dtstsfi_eq_dd, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_eq_dd (const int, _Decimal64); + TSTSFI_EQ_DD TSTSFI_EQ_DD_DEPR1 + +[VEC_TSTSFI_EQ_TD, SKIP, __builtin_dfp_dtstsfi_eq_td, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_eq_td (const int, _Decimal128); + TSTSFI_EQ_TD TSTSFI_EQ_TD_DEPR1 + +[VEC_TSTSFI_GT_DD, SKIP, __builtin_dfp_dtstsfi_gt_dd, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_gt_dd (const int, _Decimal64); + TSTSFI_GT_DD TSTSFI_GT_DD_DEPR1 + +[VEC_TSTSFI_GT_TD, SKIP, __builtin_dfp_dtstsfi_gt_td, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_gt_td (const int, _Decimal128); + TSTSFI_GT_TD TSTSFI_GT_TD_DEPR1 + +[VEC_TSTSFI_LT_DD, SKIP, __builtin_dfp_dtstsfi_lt_dd, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_lt_dd (const int, _Decimal64); + TSTSFI_LT_DD TSTSFI_LT_DD_DEPR1 + +[VEC_TSTSFI_LT_TD, SKIP, __builtin_dfp_dtstsfi_lt_td, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_lt_td (const int, _Decimal128); + TSTSFI_LT_TD TSTSFI_LT_TD_DEPR1 + +[VEC_TSTSFI_OV_DD, SKIP, __builtin_dfp_dtstsfi_ov_dd, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_ov_dd (const int, _Decimal64); + TSTSFI_OV_DD TSTSFI_OV_DD_DEPR1 + +[VEC_TSTSFI_OV_TD, SKIP, __builtin_dfp_dtstsfi_ov_td, _ARCH_PWR9] + signed int __builtin_dfp_dtstsfi_ov_td (const int, _Decimal128); + TSTSFI_OV_TD TSTSFI_OV_TD_DEPR1 + +[VEC_VADDCUQ, vec_vaddcuq, __builtin_vec_vaddcuq, _ARCH_PWR8] + vsq __builtin_vec_vaddcuq (vsq, vsq); + VADDCUQ VADDCUQ_DEPR1 + vuq __builtin_vec_vaddcuq (vuq, vuq); + VADDCUQ VADDCUQ_DEPR2 + +[VEC_VADDECUQ, vec_vaddecuq, __builtin_vec_vaddecuq, _ARCH_PWR8] + vsq __builtin_vec_vaddecuq (vsq, vsq, vsq); + VADDECUQ VADDECUQ_DEPR1 + vuq __builtin_vec_vaddecuq (vuq, vuq, vuq); + VADDECUQ VADDECUQ_DEPR2 + +[VEC_VADDEUQM, vec_vaddeuqm, __builtin_vec_vaddeuqm, _ARCH_PWR8] + vsq __builtin_vec_vaddeuqm (vsq, vsq, vsq); + VADDEUQM VADDEUQM_DEPR1 + vuq __builtin_vec_vaddeuqm (vuq, vuq, vuq); + VADDEUQM VADDEUQM_DEPR2 + +[VEC_VADDFP, vec_vaddfp, __builtin_vec_vaddfp] + vf __builtin_vec_vaddfp (vf, vf); + VADDFP VADDFP_DEPR1 + +[VEC_VADDSBS, vec_vaddsbs, __builtin_vec_vaddsbs] + vsc __builtin_vec_vaddsbs (vsc, vsc); + VADDSBS VADDSBS_DEPR1 + vsc __builtin_vec_vaddsbs (vbc, vsc); + VADDSBS VADDSBS_DEPR2 + vsc __builtin_vec_vaddsbs (vsc, vbc); + VADDSBS VADDSBS_DEPR3 + +[VEC_VADDSHS, vec_vaddshs, __builtin_vec_vaddshs] + vss __builtin_vec_vaddshs (vss, vss); + VADDSHS VADDSHS_DEPR1 + vss __builtin_vec_vaddshs (vbs, vss); + VADDSHS VADDSHS_DEPR2 + vss __builtin_vec_vaddshs (vss, vbs); + VADDSHS VADDSHS_DEPR3 + +[VEC_VADDSWS, vec_vaddsws, __builtin_vec_vaddsws] + vsi __builtin_vec_vaddsws (vsi, vsi); + VADDSWS VADDSWS_DEPR1 + vsi __builtin_vec_vaddsws (vbi, vsi); + VADDSWS VADDSWS_DEPR2 + vsi __builtin_vec_vaddsws (vsi, vbi); + VADDSWS VADDSWS_DEPR3 + +[VEC_VADDUBM, vec_vaddubm, __builtin_vec_vaddubm] + vsc __builtin_vec_vaddubm (vsc, vsc); + VADDUBM VADDUBM_DEPR1 + vuc __builtin_vec_vaddubm (vsc, vuc); + VADDUBM VADDUBM_DEPR2 + vuc __builtin_vec_vaddubm (vuc, vsc); + VADDUBM VADDUBM_DEPR3 + vuc __builtin_vec_vaddubm (vuc, vuc); + VADDUBM VADDUBM_DEPR4 + vsc __builtin_vec_vaddubm (vbc, vsc); + VADDUBM VADDUBM_DEPR5 + vsc __builtin_vec_vaddubm (vsc, vbc); + VADDUBM VADDUBM_DEPR6 + vuc __builtin_vec_vaddubm (vbc, vuc); + VADDUBM VADDUBM_DEPR7 + vuc __builtin_vec_vaddubm (vuc, vbc); + VADDUBM VADDUBM_DEPR8 + +[VEC_VADDUBS, vec_vaddubs, __builtin_vec_vaddubs] + vuc __builtin_vec_vaddubs (vsc, vuc); + VADDUBS VADDUBS_DEPR1 + vuc __builtin_vec_vaddubs (vuc, vsc); + VADDUBS VADDUBS_DEPR2 + vuc __builtin_vec_vaddubs (vuc, vuc); + VADDUBS VADDUBS_DEPR3 + vuc __builtin_vec_vaddubs (vbc, vuc); + VADDUBS VADDUBS_DEPR4 + vuc __builtin_vec_vaddubs (vuc, vbc); + VADDUBS VADDUBS_DEPR5 + +[VEC_VADDUDM, vec_vaddudm, __builtin_vec_vaddudm, _ARCH_PWR8] + vsll __builtin_vec_vaddudm (vbll, vsll); + VADDUDM VADDUDM_DEPR1 + vsll __builtin_vec_vaddudm (vsll, vbll); + VADDUDM VADDUDM_DEPR2 + vsll __builtin_vec_vaddudm (vsll, vsll); + VADDUDM VADDUDM_DEPR3 + vull __builtin_vec_vaddudm (vbll, vull); + VADDUDM VADDUDM_DEPR4 + vull __builtin_vec_vaddudm (vull, vbll); + VADDUDM VADDUDM_DEPR5 + vull __builtin_vec_vaddudm (vull, vull); + VADDUDM VADDUDM_DEPR6 + +[VEC_VADDUHM, vec_vadduhm, __builtin_vec_vadduhm] + vss __builtin_vec_vadduhm (vss, vss); + VADDUHM VADDUHM_DEPR1 + vus __builtin_vec_vadduhm (vss, vus); + VADDUHM VADDUHM_DEPR2 + vus __builtin_vec_vadduhm (vus, vss); + VADDUHM VADDUHM_DEPR3 + vus __builtin_vec_vadduhm (vus, vus); + VADDUHM VADDUHM_DEPR4 + vss __builtin_vec_vadduhm (vbs, vss); + VADDUHM VADDUHM_DEPR5 + vss __builtin_vec_vadduhm (vss, vbs); + VADDUHM VADDUHM_DEPR6 + vus __builtin_vec_vadduhm (vbs, vus); + VADDUHM VADDUHM_DEPR7 + vus __builtin_vec_vadduhm (vus, vbs); + VADDUHM VADDUHM_DEPR8 + +[VEC_VADDUHS, vec_vadduhs, __builtin_vec_vadduhs] + vus __builtin_vec_vadduhs (vss, vus); + VADDUHS VADDUHS_DEPR1 + vus __builtin_vec_vadduhs (vus, vss); + VADDUHS VADDUHS_DEPR2 + vus __builtin_vec_vadduhs (vus, vus); + VADDUHS VADDUHS_DEPR3 + vus __builtin_vec_vadduhs (vbs, vus); + VADDUHS VADDUHS_DEPR4 + vus __builtin_vec_vadduhs (vus, vbs); + VADDUHS VADDUHS_DEPR5 + +[VEC_VADDUQM, vec_vadduqm, __builtin_vec_vadduqm, _ARCH_PWR8] + vsq __builtin_vec_vadduqm (vsq, vsq); + VADDUQM VADDUQM_DEPR1 + vuq __builtin_vec_vadduqm (vuq, vuq); + VADDUQM VADDUQM_DEPR2 + +[VEC_VADDUWM, vec_vadduwm, __builtin_vec_vadduwm] + vsi __builtin_vec_vadduwm (vsi, vsi); + VADDUWM VADDUWM_DEPR1 + vui __builtin_vec_vadduwm (vsi, vui); + VADDUWM VADDUWM_DEPR2 + vui __builtin_vec_vadduwm (vui, vsi); + VADDUWM VADDUWM_DEPR3 + vui __builtin_vec_vadduwm (vui, vui); + VADDUWM VADDUWM_DEPR4 + vsi __builtin_vec_vadduwm (vbi, vsi); + VADDUWM VADDUWM_DEPR5 + vsi __builtin_vec_vadduwm (vsi, vbi); + VADDUWM VADDUWM_DEPR6 + vui __builtin_vec_vadduwm (vbi, vui); + VADDUWM VADDUWM_DEPR7 + vui __builtin_vec_vadduwm (vui, vbi); + VADDUWM VADDUWM_DEPR8 + +[VEC_VADDUWS, vec_vadduws, __builtin_vec_vadduws] + vui __builtin_vec_vadduws (vsi, vui); + VADDUWS VADDUWS_DEPR1 + vui __builtin_vec_vadduws (vui, vsi); + VADDUWS VADDUWS_DEPR2 + vui __builtin_vec_vadduws (vui, vui); + VADDUWS VADDUWS_DEPR3 + vui __builtin_vec_vadduws (vbi, vui); + VADDUWS VADDUWS_DEPR4 + vui __builtin_vec_vadduws (vui, vbi); + VADDUWS VADDUWS_DEPR5 + +[VEC_VADUB, vec_absdb, __builtin_vec_vadub] + vuc __builtin_vec_vadub (vuc, vuc); + VADUB VADUB_DEPR1 + +[VEC_VADUH, vec_absdh, __builtin_vec_vaduh] + vus __builtin_vec_vaduh (vus, vus); + VADUH VADUH_DEPR1 + +[VEC_VADUW, vec_absdw, __builtin_vec_vaduw] + vui __builtin_vec_vaduw (vui, vui); + VADUW VADUW_DEPR1 + +[VEC_VAVGSB, vec_vavgsb, __builtin_vec_vavgsb] + vsc __builtin_vec_vavgsb (vsc, vsc); + VAVGSB VAVGSB_DEPR1 + +[VEC_VAVGSH, vec_vavgsh, __builtin_vec_vavgsh] + vss __builtin_vec_vavgsh (vss, vss); + VAVGSH VAVGSH_DEPR1 + +[VEC_VAVGSW, vec_vavgsw, __builtin_vec_vavgsw] + vsi __builtin_vec_vavgsw (vsi, vsi); + VAVGSW VAVGSW_DEPR1 + +[VEC_VAVGUB, vec_vavgub, __builtin_vec_vavgub] + vuc __builtin_vec_vavgub (vuc, vuc); + VAVGUB VAVGUB_DEPR1 + +[VEC_VAVGUH, vec_vavguh, __builtin_vec_vavguh] + vus __builtin_vec_vavguh (vus, vus); + VAVGUH VAVGUH_DEPR1 + +[VEC_VAVGUW, vec_vavguw, __builtin_vec_vavguw] + vui __builtin_vec_vavguw (vui, vui); + VAVGUW VAVGUW_DEPR1 + +[VEC_VBPERMQ, vec_vbpermq, __builtin_vec_vbpermq, _ARCH_PWR8] + vull __builtin_vec_vbpermq (vull, vuc); + VBPERMQ VBPERMQ_DEPR1 + vsll __builtin_vec_vbpermq (vsc, vsc); + VBPERMQ VBPERMQ_DEPR2 + vull __builtin_vec_vbpermq (vuc, vuc); + VBPERMQ VBPERMQ_DEPR3 + vull __builtin_vec_vbpermq (vuq, vuc); + VBPERMQ VBPERMQ_DEPR4 + +[VEC_VCFSX, vec_vcfsx, __builtin_vec_vcfsx] + vf __builtin_vec_vcfsx (vsi, const int); + VCFSX VCFSX_DEPR1 + +[VEC_VCFUX, vec_vcfux, __builtin_vec_vcfux] + vf __builtin_vec_vcfux (vui, const int); + VCFUX VCFUX_DEPR1 + +[VEC_VCLZB, vec_vclzb, __builtin_vec_vclzb, _ARCH_PWR8] + vsc __builtin_vec_vclzb (vsc); + VCLZB VCLZB_DEPR1 + vuc __builtin_vec_vclzb (vuc); + VCLZB VCLZB_DEPR2 + +[VEC_VCLZD, vec_vclzd, __builtin_vec_vclzd, _ARCH_PWR8] + vsll __builtin_vec_vclzd (vsll); + VCLZD VCLZD_DEPR1 + vull __builtin_vec_vclzd (vull); + VCLZD VCLZD_DEPR2 + +[VEC_VCLZH, vec_vclzh, __builtin_vec_vclzh, _ARCH_PWR8] + vss __builtin_vec_vclzh (vss); + VCLZH VCLZH_DEPR1 + vus __builtin_vec_vclzh (vus); + VCLZH VCLZH_DEPR2 + +[VEC_VCLZW, vec_vclzw, __builtin_vec_vclzw, _ARCH_PWR8] + vsi __builtin_vec_vclzw (vsi); + VCLZW VCLZW_DEPR1 + vui __builtin_vec_vclzw (vui); + VCLZW VCLZW_DEPR2 + +[VEC_VCMPEQFP, vec_vcmpeqfp, __builtin_vec_vcmpeqfp] + vbi __builtin_vec_vcmpeqfp (vf, vf); + VCMPEQFP VCMPEQFP_DEPR1 + +[VEC_VCMPEQUB, vec_vcmpequb, __builtin_vec_vcmpequb] + vbc __builtin_vec_vcmpequb (vsc, vsc); + VCMPEQUB VCMPEQUB_DEPR1 + vbc __builtin_vec_vcmpequb (vuc, vuc); + VCMPEQUB VCMPEQUB_DEPR2 + +[VEC_VCMPEQUH, vec_vcmpequh, __builtin_vec_vcmpequh] + vbs __builtin_vec_vcmpequh (vss, vss); + VCMPEQUH VCMPEQUH_DEPR1 + vbs __builtin_vec_vcmpequh (vus, vus); + VCMPEQUH VCMPEQUH_DEPR2 + +[VEC_VCMPEQUW, vec_vcmpequw, __builtin_vec_vcmpequw] + vbi __builtin_vec_vcmpequw (vsi, vsi); + VCMPEQUW VCMPEQUW_DEPR1 + vbi __builtin_vec_vcmpequw (vui, vui); + VCMPEQUW VCMPEQUW_DEPR2 + +[VEC_VCMPGTFP, vec_vcmpgtfp, __builtin_vec_vcmpgtfp] + vbi __builtin_vec_vcmpgtfp (vf, vf); + VCMPGTFP VCMPGTFP_DEPR1 + +[VEC_VCMPGTSB, vec_vcmpgtsb, __builtin_vec_vcmpgtsb] + vbc __builtin_vec_vcmpgtsb (vsc, vsc); + VCMPGTSB VCMPGTSB_DEPR1 + +[VEC_VCMPGTSH, vec_vcmpgtsh, __builtin_vec_vcmpgtsh] + vbs __builtin_vec_vcmpgtsh (vss, vss); + VCMPGTSH VCMPGTSH_DEPR1 + +[VEC_VCMPGTSW, vec_vcmpgtsw, __builtin_vec_vcmpgtsw] + vbi __builtin_vec_vcmpgtsw (vsi, vsi); + VCMPGTSW VCMPGTSW_DEPR1 + +[VEC_VCMPGTUB, vec_vcmpgtub, __builtin_vec_vcmpgtub] + vbc __builtin_vec_vcmpgtub (vuc, vuc); + VCMPGTUB VCMPGTUB_DEPR1 + +[VEC_VCMPGTUH, vec_vcmpgtuh, __builtin_vec_vcmpgtuh] + vbs __builtin_vec_vcmpgtuh (vus, vus); + VCMPGTUH VCMPGTUH_DEPR1 + +[VEC_VCMPGTUW, vec_vcmpgtuw, __builtin_vec_vcmpgtuw] + vbi __builtin_vec_vcmpgtuw (vui, vui); + VCMPGTUW VCMPGTUW_DEPR1 + +[VEC_VCTZB, vec_vctzb, __builtin_vec_vctzb, _ARCH_PWR9] + vsc __builtin_vec_vctzb (vsc); + VCTZB VCTZB_DEPR1 + vuc __builtin_vec_vctzb (vuc); + VCTZB VCTZB_DEPR2 + +[VEC_VCTZD, vec_vctzd, __builtin_vec_vctzd, _ARCH_PWR9] + vsll __builtin_vec_vctzd (vsll); + VCTZD VCTZD_DEPR1 + vull __builtin_vec_vctzd (vull); + VCTZD VCTZD_DEPR2 + +[VEC_VCTZH, vec_vctzh, __builtin_vec_vctzh, _ARCH_PWR9] + vss __builtin_vec_vctzh (vss); + VCTZH VCTZH_DEPR1 + vus __builtin_vec_vctzh (vus); + VCTZH VCTZH_DEPR2 + +[VEC_VCTZW, vec_vctzw, __builtin_vec_vctzw, _ARCH_PWR9] + vsi __builtin_vec_vctzw (vsi); + VCTZW VCTZW_DEPR1 + vui __builtin_vec_vctzw (vui); + VCTZW VCTZW_DEPR2 + +[VEC_VEEDP, vec_extract_exp_dp, __builtin_vec_extract_exp_dp, _ARCH_PWR9] + vull __builtin_vec_extract_exp_dp (vd); + VEEDP VEEDP_DEPR1 + +[VEC_VEESP, vec_extract_exp_sp, __builtin_vec_extract_exp_sp, _ARCH_PWR9] + vui __builtin_vec_extract_exp_sp (vf); + VEESP VEESP_DEPR1 + +[VEC_VESDP, vec_extract_sig_dp, __builtin_vec_extract_sig_dp, _ARCH_PWR9] + vull __builtin_vec_extract_sig_dp (vd); + VESDP VESDP_DEPR1 + +[VEC_VESSP, vec_extract_sig_sp, __builtin_vec_extract_sig_sp, _ARCH_PWR9] + vui __builtin_vec_extract_sig_sp (vf); + VESSP VESSP_DEPR1 + +[VEC_VIEDP, vec_insert_exp_dp, __builtin_vec_insert_exp_dp, _ARCH_PWR9] + vd __builtin_vec_insert_exp_dp (vd, vull); + VIEDP VIEDP_DEPR1 + vd __builtin_vec_insert_exp_dp (vull, vull); + VIEDP VIEDP_DEPR2 + +[VEC_VIESP, vec_insert_exp_sp, __builtin_vec_insert_exp_sp, _ARCH_PWR9] + vf __builtin_vec_insert_exp_sp (vf, vui); + VIESP VIESP_DEPR1 + vf __builtin_vec_insert_exp_sp (vui, vui); + VIESP VIESP_DEPR2 + +[VEC_VMAXFP, vec_vmaxfp, __builtin_vec_vmaxfp] + vf __builtin_vec_vmaxfp (vf, vf); + VMAXFP VMAXFP_DEPR1 + +[VEC_VMAXSB, vec_vmaxsb, __builtin_vec_vmaxsb] + vsc __builtin_vec_vmaxsb (vsc, vsc); + VMAXSB VMAXSB_DEPR1 + vsc __builtin_vec_vmaxsb (vbc, vsc); + VMAXSB VMAXSB_DEPR2 + vsc __builtin_vec_vmaxsb (vsc, vbc); + VMAXSB VMAXSB_DEPR3 + +[VEC_VMAXSD, vec_vmaxsd, __builtin_vec_vmaxsd] + vsll __builtin_vec_vmaxsd (vsll, vsll); + VMAXSD VMAXSD_DEPR1 + vsll __builtin_vec_vmaxsd (vbll, vsll); + VMAXSD VMAXSD_DEPR2 + vsll __builtin_vec_vmaxsd (vsll, vbll); + VMAXSD VMAXSD_DEPR3 + +[VEC_VMAXSH, vec_vmaxsh, __builtin_vec_vmaxsh] + vss __builtin_vec_vmaxsh (vss, vss); + VMAXSH VMAXSH_DEPR1 + vss __builtin_vec_vmaxsh (vbs, vss); + VMAXSH VMAXSH_DEPR2 + vss __builtin_vec_vmaxsh (vss, vbs); + VMAXSH VMAXSH_DEPR3 + +[VEC_VMAXSW, vec_vmaxsw, __builtin_vec_vmaxsw] + vsi __builtin_vec_vmaxsw (vsi, vsi); + VMAXSW VMAXSW_DEPR1 + vsi __builtin_vec_vmaxsw (vbi, vsi); + VMAXSW VMAXSW_DEPR2 + vsi __builtin_vec_vmaxsw (vsi, vbi); + VMAXSW VMAXSW_DEPR3 + +[VEC_VMAXUB, vec_vmaxub, __builtin_vec_vmaxub] + vuc __builtin_vec_vmaxub (vsc, vuc); + VMAXUB VMAXUB_DEPR1 + vuc __builtin_vec_vmaxub (vuc, vsc); + VMAXUB VMAXUB_DEPR2 + vuc __builtin_vec_vmaxub (vuc, vuc); + VMAXUB VMAXUB_DEPR3 + vuc __builtin_vec_vmaxub (vbc, vuc); + VMAXUB VMAXUB_DEPR4 + vuc __builtin_vec_vmaxub (vuc, vbc); + VMAXUB VMAXUB_DEPR5 + +[VEC_VMAXUD, vec_vmaxud, __builtin_vec_vmaxud] + vull __builtin_vec_vmaxud (vull, vull); + VMAXUD VMAXUD_DEPR1 + vull __builtin_vec_vmaxud (vbll, vull); + VMAXUD VMAXUD_DEPR2 + vull __builtin_vec_vmaxud (vull, vbll); + VMAXUD VMAXUD_DEPR3 + +[VEC_VMAXUH, vec_vmaxuh, __builtin_vec_vmaxuh] + vus __builtin_vec_vmaxuh (vss, vus); + VMAXUH VMAXUH_DEPR1 + vus __builtin_vec_vmaxuh (vus, vss); + VMAXUH VMAXUH_DEPR2 + vus __builtin_vec_vmaxuh (vus, vus); + VMAXUH VMAXUH_DEPR3 + vus __builtin_vec_vmaxuh (vbs, vus); + VMAXUH VMAXUH_DEPR4 + vus __builtin_vec_vmaxuh (vus, vbs); + VMAXUH VMAXUH_DEPR5 + +[VEC_VMAXUW, vec_vmaxuw, __builtin_vec_vmaxuw] + vui __builtin_vec_vmaxuw (vsi, vui); + VMAXUW VMAXUW_DEPR1 + vui __builtin_vec_vmaxuw (vui, vsi); + VMAXUW VMAXUW_DEPR2 + vui __builtin_vec_vmaxuw (vui, vui); + VMAXUW VMAXUW_DEPR3 + vui __builtin_vec_vmaxuw (vbi, vui); + VMAXUW VMAXUW_DEPR4 + vui __builtin_vec_vmaxuw (vui, vbi); + VMAXUW VMAXUW_DEPR5 + +[VEC_VMINFP, vec_vminfp, __builtin_vec_vminfp] + vf __builtin_vec_vminfp (vf, vf); + VMINFP VMINFP_DEPR1 + +[VEC_VMINSB, vec_vminsb, __builtin_vec_vminsb] + vsc __builtin_vec_vminsb (vsc, vsc); + VMINSB VMINSB_DEPR1 + vsc __builtin_vec_vminsb (vbc, vsc); + VMINSB VMINSB_DEPR2 + vsc __builtin_vec_vminsb (vsc, vbc); + VMINSB VMINSB_DEPR3 + +[VEC_VMINSD, vec_vminsd, __builtin_vec_vminsd] + vsll __builtin_vec_vminsd (vsll, vsll); + VMINSD VMINSD_DEPR1 + vsll __builtin_vec_vminsd (vbll, vsll); + VMINSD VMINSD_DEPR2 + vsll __builtin_vec_vminsd (vsll, vbll); + VMINSD VMINSD_DEPR3 + +[VEC_VMINSH, vec_vminsh, __builtin_vec_vminsh] + vss __builtin_vec_vminsh (vss, vss); + VMINSH VMINSH_DEPR1 + vss __builtin_vec_vminsh (vbs, vss); + VMINSH VMINSH_DEPR2 + vss __builtin_vec_vminsh (vss, vbs); + VMINSH VMINSH_DEPR3 + +[VEC_VMINSW, vec_vminsw, __builtin_vec_vminsw] + vsi __builtin_vec_vminsw (vsi, vsi); + VMINSW VMINSW_DEPR1 + vsi __builtin_vec_vminsw (vbi, vsi); + VMINSW VMINSW_DEPR2 + vsi __builtin_vec_vminsw (vsi, vbi); + VMINSW VMINSW_DEPR3 + +[VEC_VMINUB, vec_vminub, __builtin_vec_vminub] + vuc __builtin_vec_vminub (vsc, vuc); + VMINUB VMINUB_DEPR1 + vuc __builtin_vec_vminub (vuc, vsc); + VMINUB VMINUB_DEPR2 + vuc __builtin_vec_vminub (vuc, vuc); + VMINUB VMINUB_DEPR3 + vuc __builtin_vec_vminub (vbc, vuc); + VMINUB VMINUB_DEPR4 + vuc __builtin_vec_vminub (vuc, vbc); + VMINUB VMINUB_DEPR5 + +[VEC_VMINUD, vec_vminud, __builtin_vec_vminud] + vull __builtin_vec_vminud (vull, vull); + VMINUD VMINUD_DEPR1 + vull __builtin_vec_vminud (vbll, vull); + VMINUD VMINUD_DEPR2 + vull __builtin_vec_vminud (vull, vbll); + VMINUD VMINUD_DEPR3 + +[VEC_VMINUH, vec_vminuh, __builtin_vec_vminuh] + vus __builtin_vec_vminuh (vss, vus); + VMINUH VMINUH_DEPR1 + vus __builtin_vec_vminuh (vus, vss); + VMINUH VMINUH_DEPR2 + vus __builtin_vec_vminuh (vus, vus); + VMINUH VMINUH_DEPR3 + vus __builtin_vec_vminuh (vbs, vus); + VMINUH VMINUH_DEPR4 + vus __builtin_vec_vminuh (vus, vbs); + VMINUH VMINUH_DEPR5 + +[VEC_VMINUW, vec_vminuw, __builtin_vec_vminuw] + vui __builtin_vec_vminuw (vsi, vui); + VMINUW VMINUW_DEPR1 + vui __builtin_vec_vminuw (vui, vsi); + VMINUW VMINUW_DEPR2 + vui __builtin_vec_vminuw (vui, vui); + VMINUW VMINUW_DEPR3 + vui __builtin_vec_vminuw (vbi, vui); + VMINUW VMINUW_DEPR4 + vui __builtin_vec_vminuw (vui, vbi); + VMINUW VMINUW_DEPR5 + +[VEC_VMRGHB, vec_vmrghb, __builtin_vec_vmrghb] + vsc __builtin_vec_vmrghb (vsc, vsc); + VMRGHB VMRGHB_DEPR1 + vuc __builtin_vec_vmrghb (vuc, vuc); + VMRGHB VMRGHB_DEPR2 + vbc __builtin_vec_vmrghb (vbc, vbc); + VMRGHB VMRGHB_DEPR3 + +[VEC_VMRGHH, vec_vmrghh, __builtin_vec_vmrghh] + vss __builtin_vec_vmrghh (vss, vss); + VMRGHH VMRGHH_DEPR1 + vus __builtin_vec_vmrghh (vus, vus); + VMRGHH VMRGHH_DEPR2 + vbs __builtin_vec_vmrghh (vbs, vbs); + VMRGHH VMRGHH_DEPR3 + vp __builtin_vec_vmrghh (vp, vp); + VMRGHH VMRGHH_DEPR4 + +[VEC_VMRGHW, vec_vmrghw, __builtin_vec_vmrghw] + vf __builtin_vec_vmrghw (vf, vf); + VMRGHW VMRGHW_DEPR1 + vsi __builtin_vec_vmrghw (vsi, vsi); + VMRGHW VMRGHW_DEPR2 + vui __builtin_vec_vmrghw (vui, vui); + VMRGHW VMRGHW_DEPR3 + vbi __builtin_vec_vmrghw (vbi, vbi); + VMRGHW VMRGHW_DEPR4 + +[VEC_VMRGLB, vec_vmrglb, __builtin_vec_vmrglb] + vsc __builtin_vec_vmrglb (vsc, vsc); + VMRGLB VMRGLB_DEPR1 + vuc __builtin_vec_vmrglb (vuc, vuc); + VMRGLB VMRGLB_DEPR2 + vbc __builtin_vec_vmrglb (vbc, vbc); + VMRGLB VMRGLB_DEPR3 + +[VEC_VMRGLH, vec_vmrglh, __builtin_vec_vmrglh] + vss __builtin_vec_vmrglh (vss, vss); + VMRGLH VMRGLH_DEPR1 + vus __builtin_vec_vmrglh (vus, vus); + VMRGLH VMRGLH_DEPR2 + vbs __builtin_vec_vmrglh (vbs, vbs); + VMRGLH VMRGLH_DEPR3 + vp __builtin_vec_vmrglh (vp, vp); + VMRGLH VMRGLH_DEPR4 + +[VEC_VMRGLW, vec_vmrglw, __builtin_vec_vmrglw] + vf __builtin_vec_vmrglw (vf, vf); + VMRGLW VMRGLW_DEPR1 + vsi __builtin_vec_vmrglw (vsi, vsi); + VMRGLW VMRGLW_DEPR2 + vui __builtin_vec_vmrglw (vui, vui); + VMRGLW VMRGLW_DEPR3 + vbi __builtin_vec_vmrglw (vbi, vbi); + VMRGLW VMRGLW_DEPR4 + +[VEC_VMSUMMBM, vec_vmsummbm, __builtin_vec_vmsummbm] + vsi __builtin_vec_vmsummbm (vsc, vuc, vsi); + VMSUMMBM VMSUMMBM_DEPR1 + +[VEC_VMSUMSHM, vec_vmsumshm, __builtin_vec_vmsumshm] + vsi __builtin_vec_vmsumshm (vss, vss, vsi); + VMSUMSHM VMSUMSHM_DEPR1 + +[VEC_VMSUMSHS, vec_vmsumshs, __builtin_vec_vmsumshs] + vsi __builtin_vec_vmsumshs (vss, vss, vsi); + VMSUMSHS VMSUMSHS_DEPR1 + +[VEC_VMSUMUBM, vec_vmsumubm, __builtin_vec_vmsumubm] + vui __builtin_vec_vmsumubm (vuc, vuc, vui); + VMSUMUBM VMSUMUBM_DEPR1 + +[VEC_VMSUMUDM, vec_vmsumudm, __builtin_vec_vmsumudm] + vuq __builtin_vec_vmsumudm (vull, vull, vuq); + VMSUMUDM VMSUMUDM_DEPR1 + +[VEC_VMSUMUHM, vec_vmsumuhm, __builtin_vec_vmsumuhm] + vui __builtin_vec_vmsumuhm (vus, vus, vui); + VMSUMUHM VMSUMUHM_DEPR1 + +[VEC_VMSUMUHS, vec_vmsumuhs, __builtin_vec_vmsumuhs] + vui __builtin_vec_vmsumuhs (vus, vus, vui); + VMSUMUHS VMSUMUHS_DEPR1 + +[VEC_VMULESB, vec_vmulesb, __builtin_vec_vmulesb] + vss __builtin_vec_vmulesb (vsc, vsc); + VMULESB VMULESB_DEPR1 + +[VEC_VMULESH, vec_vmulesh, __builtin_vec_vmulesh] + vsi __builtin_vec_vmulesh (vss, vss); + VMULESH VMULESH_DEPR1 + +[VEC_VMULESW, SKIP, __builtin_vec_vmulesw] + vsll __builtin_vec_vmulesw (vsi, vsi); + VMULESW VMULESW_DEPR1 + +[VEC_VMULEUB, vec_vmuleub, __builtin_vec_vmuleub] + vus __builtin_vec_vmuleub (vuc, vuc); + VMULEUB VMULEUB_DEPR1 + +[VEC_VMULEUH, vec_vmuleuh, __builtin_vec_vmuleuh] + vui __builtin_vec_vmuleuh (vus, vus); + VMULEUH VMULEUH_DEPR1 + +[VEC_VMULEUW, SKIP, __builtin_vec_vmuleuw] + vull __builtin_vec_vmuleuw (vui, vui); + VMULEUW VMULEUW_DEPR1 + +[VEC_VMULOSB, vec_vmulosb, __builtin_vec_vmulosb] + vss __builtin_vec_vmulosb (vsc, vsc); + VMULOSB VMULOSB_DEPR1 + +[VEC_VMULOSH, vec_vmulosh, __builtin_vec_vmulosh] + vsi __builtin_vec_vmulosh (vss, vss); + VMULOSH VMULOSH_DEPR1 + +[VEC_VMULOSW, SKIP, __builtin_vec_vmulosw] + vsll __builtin_vec_vmulosw (vsi, vsi); + VMULOSW VMULOSW_DEPR1 + +[VEC_VMULOUB, vec_vmuloub, __builtin_vec_vmuloub] + vus __builtin_vec_vmuloub (vuc, vuc); + VMULOUB VMULOUB_DEPR1 + +[VEC_VMULOUH, vec_vmulouh, __builtin_vec_vmulouh] + vui __builtin_vec_vmulouh (vus, vus); + VMULOUH VMULOUH_DEPR1 + +[VEC_VMULOUW, SKIP, __builtin_vec_vmulouw] + vull __builtin_vec_vmulouw (vui, vui); + VMULOUW VMULOUW_DEPR1 + +[VEC_VPKSDSS, vec_vpksdss, __builtin_vec_vpksdss, _ARCH_PWR8] + vsi __builtin_vec_vpksdss (vsll, vsll); + VPKSDSS VPKSDSS_DEPR1 + +[VEC_VPKSDUS, vec_vpksdus, __builtin_vec_vpksdus, _ARCH_PWR8] + vui __builtin_vec_vpksdus (vsll, vsll); + VPKSDUS VPKSDUS_DEPR1 + +[VEC_VPKSHSS, vec_vpkshss, __builtin_vec_vpkshss] + vsc __builtin_vec_vpkshss (vss, vss); + VPKSHSS VPKSHSS_DEPR1 + +[VEC_VPKSHUS, vec_vpkshus, __builtin_vec_vpkshus] + vuc __builtin_vec_vpkshus (vss, vss); + VPKSHUS VPKSHUS_DEPR1 + +[VEC_VPKSWSS, vec_vpkswss, __builtin_vec_vpkswss] + vss __builtin_vec_vpkswss (vsi, vsi); + VPKSWSS VPKSWSS_DEPR1 + +[VEC_VPKSWUS, vec_vpkswus, __builtin_vec_vpkswus] + vus __builtin_vec_vpkswus (vsi, vsi); + VPKSWUS VPKSWUS_DEPR1 + +[VEC_VPKUDUM, vec_vpkudum, __builtin_vec_vpkudum, _ARCH_PWR8] + vsi __builtin_vec_vpkudum (vsll, vsll); + VPKUDUM VPKUDUM_DEPR1 + vui __builtin_vec_vpkudum (vull, vull); + VPKUDUM VPKUDUM_DEPR2 + vbi __builtin_vec_vpkudum (vbll, vbll); + VPKUDUM VPKUDUM_DEPR3 + +[VEC_VPKUDUS, vec_vpkudus, __builtin_vec_vpkudus, _ARCH_PWR8] + vui __builtin_vec_vpkudus (vull, vull); + VPKUDUS VPKUDUS_DEPR1 + +[VEC_VPKUHUM, vec_vpkuhum, __builtin_vec_vpkuhum] + vsc __builtin_vec_vpkuhum (vss, vss); + VPKUHUM VPKUHUM_DEPR1 + vuc __builtin_vec_vpkuhum (vus, vus); + VPKUHUM VPKUHUM_DEPR2 + vbc __builtin_vec_vpkuhum (vbs, vbs); + VPKUHUM VPKUHUM_DEPR3 + +[VEC_VPKUHUS, vec_vpkuhus, __builtin_vec_vpkuhus] + vuc __builtin_vec_vpkuhus (vus, vus); + VPKUHUS VPKUHUS_DEPR1 + +[VEC_VPKUWUM, vec_vpkuwum, __builtin_vec_vpkuwum] + vss __builtin_vec_vpkuwum (vsi, vsi); + VPKUWUM VPKUWUM_DEPR1 + vus __builtin_vec_vpkuwum (vui, vui); + VPKUWUM VPKUWUM_DEPR2 + vbs __builtin_vec_vpkuwum (vbi, vbi); + VPKUWUM VPKUWUM_DEPR3 + +[VEC_VPKUWUS, vec_vpkuwus, __builtin_vec_vpkuwus] + vus __builtin_vec_vpkuwus (vui, vui); + VPKUWUS VPKUWUS_DEPR1 + +[VEC_VPOPCNT, vec_vpopcnt, __builtin_vec_vpopcnt, _ARCH_PWR8] + vsc __builtin_vec_vpopcnt (vsc); + VPOPCNTB VPOPCNT_DEPR1 + vuc __builtin_vec_vpopcnt (vuc); + VPOPCNTB VPOPCNT_DEPR2 + vss __builtin_vec_vpopcnt (vss); + VPOPCNTH VPOPCNT_DEPR3 + vus __builtin_vec_vpopcnt (vus); + VPOPCNTH VPOPCNT_DEPR4 + vsi __builtin_vec_vpopcnt (vsi); + VPOPCNTW VPOPCNT_DEPR5 + vui __builtin_vec_vpopcnt (vui); + VPOPCNTW VPOPCNT_DEPR6 + vsll __builtin_vec_vpopcnt (vsll); + VPOPCNTD VPOPCNT_DEPR7 + vull __builtin_vec_vpopcnt (vull); + VPOPCNTD VPOPCNT_DEPR8 + +[VEC_VPOPCNTB, vec_vpopcntb, __builtin_vec_vpopcntb, _ARCH_PWR8] + vsc __builtin_vec_vpopcntb (vsc); + VPOPCNTB VPOPCNTB_DEPR1 + vuc __builtin_vec_vpopcntb (vuc); + VPOPCNTB VPOPCNTB_DEPR2 + +[VEC_VPOPCNTD, vec_vpopcntd, __builtin_vec_vpopcntd, _ARCH_PWR8] + vsll __builtin_vec_vpopcntd (vsll); + VPOPCNTD VPOPCNTD_DEPR1 + vull __builtin_vec_vpopcntd (vull); + VPOPCNTD VPOPCNTD_DEPR2 + +[VEC_VPOPCNTH, vec_vpopcnth, __builtin_vec_vpopcnth, _ARCH_PWR8] + vss __builtin_vec_vpopcnth (vss); + VPOPCNTH VPOPCNTH_DEPR1 + vus __builtin_vec_vpopcnth (vus); + VPOPCNTH VPOPCNTH_DEPR2 + +[VEC_VPOPCNTW, vec_vpopcntw, __builtin_vec_vpopcntw, _ARCH_PWR8] + vsi __builtin_vec_vpopcntw (vsi); + VPOPCNTW VPOPCNTW_DEPR1 + vui __builtin_vec_vpopcntw (vui); + VPOPCNTW VPOPCNTW_DEPR2 + +[VEC_VPRTYBD, vec_vprtybd, __builtin_vec_vprtybd, _ARCH_PWR9] + vsll __builtin_vec_vprtybd (vsll); + VPRTYBD VPRTYBD_DEPR1 + vull __builtin_vec_vprtybd (vull); + VPRTYBD VPRTYBD_DEPR2 + +[VEC_VPRTYBQ, vec_vprtybq, __builtin_vec_vprtybq, _ARCH_PPC64_PWR9] + vsq __builtin_vec_vprtybq (vsq); + VPRTYBQ VPRTYBQ_DEPR1 + vuq __builtin_vec_vprtybq (vuq); + VPRTYBQ VPRTYBQ_DEPR2 + signed __int128 __builtin_vec_vprtybq (signed __int128); + VPRTYBQ VPRTYBQ_DEPR3 + unsigned __int128 __builtin_vec_vprtybq (unsigned __int128); + VPRTYBQ VPRTYBQ_DEPR4 + +[VEC_VPRTYBW, vec_vprtybw, __builtin_vec_vprtybw, _ARCH_PWR9] + vsi __builtin_vec_vprtybw (vsi); + VPRTYBW VPRTYBW_DEPR1 + vui __builtin_vec_vprtybw (vui); + VPRTYBW VPRTYBW_DEPR2 + +[VEC_VRLB, vec_vrlb, __builtin_vec_vrlb] + vsc __builtin_vec_vrlb (vsc, vuc); + VRLB VRLB_DEPR1 + vuc __builtin_vec_vrlb (vuc, vuc); + VRLB VRLB_DEPR2 + +[VEC_VRLD, SKIP, __builtin_vec_vrld, _ARCH_PWR8] + vsll __builtin_vec_vrld (vsll, vull); + VRLD VRLD_DEPR1 + vull __builtin_vec_vrld (vull, vull); + VRLD VRLD_DEPR2 + +[VEC_VRLH, vec_vrlh, __builtin_vec_vrlh] + vss __builtin_vec_vrlh (vss, vus); + VRLH VRLH_DEPR1 + vus __builtin_vec_vrlh (vus, vus); + VRLH VRLH_DEPR2 + +[VEC_VRLW, vec_vrlw, __builtin_vec_vrlw] + vsi __builtin_vec_vrlw (vsi, vui); + VRLW VRLW_DEPR1 + vui __builtin_vec_vrlw (vui, vui); + VRLW VRLW_DEPR2 + +[VEC_VSLB, vec_vslb, __builtin_vec_vslb] + vsc __builtin_vec_vslb (vsc, vuc); + VSLB VSLB_DEPR1 + vuc __builtin_vec_vslb (vuc, vuc); + VSLB VSLB_DEPR2 + +[VEC_VSLD, SKIP, __builtin_vec_vsld, _ARCH_PWR8] + vsll __builtin_vec_vsld (vsll, vull); + VSLD VSLD_DEPR1 + vull __builtin_vec_vsld (vull, vull); + VSLD VSLD_DEPR2 + +[VEC_VSLH, vec_vslh, __builtin_vec_vslh] + vss __builtin_vec_vslh (vss, vus); + VSLH VSLH_DEPR1 + vus __builtin_vec_vslh (vus, vus); + VSLH VSLH_DEPR2 + +[VEC_VSLW, vec_vslw, __builtin_vec_vslw] + vsi __builtin_vec_vslw (vsi, vui); + VSLW VSLW_DEPR1 + vui __builtin_vec_vslw (vui, vui); + VSLW VSLW_DEPR2 + +[VEC_VSPLTB, vec_vspltb, __builtin_vec_vspltb] + vsc __builtin_vec_vspltb (vsc, const int); + VSPLTB VSPLTB_DEPR1 + vuc __builtin_vec_vspltb (vuc, const int); + VSPLTB VSPLTB_DEPR2 + vbc __builtin_vec_vspltb (vbc, const int); + VSPLTB VSPLTB_DEPR3 + +[VEC_VSPLTH, vec_vsplth, __builtin_vec_vsplth] + vss __builtin_vec_vsplth (vss, const int); + VSPLTH VSPLTH_DEPR1 + vus __builtin_vec_vsplth (vus, const int); + VSPLTH VSPLTH_DEPR2 + vbs __builtin_vec_vsplth (vbs, const int); + VSPLTH VSPLTH_DEPR3 + vp __builtin_vec_vsplth (vp, const int); + VSPLTH VSPLTH_DEPR4 + +[VEC_VSPLTW, vec_vspltw, __builtin_vec_vspltw] + vsi __builtin_vec_vspltw (vsi, const int); + VSPLTW VSPLTW_DEPR1 + vui __builtin_vec_vspltw (vui, const int); + VSPLTW VSPLTW_DEPR2 + vbi __builtin_vec_vspltw (vbi, const int); + VSPLTW VSPLTW_DEPR3 + vf __builtin_vec_vspltw (vf, const int); + VSPLTW VSPLTW_DEPR4 + +[VEC_VSRAB, vec_vsrab, __builtin_vec_vsrab] + vsc __builtin_vec_vsrab (vsc, vuc); + VSRAB VSRAB_DEPR1 + vuc __builtin_vec_vsrab (vuc, vuc); + VSRAB VSRAB_DEPR2 + +[VEC_VSRAD, SKIP, __builtin_vec_vsrad, _ARCH_PWR8] + vsll __builtin_vec_vsrad (vsll, vull); + VSRAD VSRAD_DEPR1 + vull __builtin_vec_vsrad (vull, vull); + VSRAD VSRAD_DEPR2 + +[VEC_VSRAH, vec_vsrah, __builtin_vec_vsrah] + vss __builtin_vec_vsrah (vss, vus); + VSRAH VSRAH_DEPR1 + vus __builtin_vec_vsrah (vus, vus); + VSRAH VSRAH_DEPR2 + +[VEC_VSRAW, vec_vsraw, __builtin_vec_vsraw] + vsi __builtin_vec_vsraw (vsi, vui); + VSRAW VSRAW_DEPR1 + vui __builtin_vec_vsraw (vui, vui); + VSRAW VSRAW_DEPR2 + +[VEC_VSRB, vec_vsrb, __builtin_vec_vsrb] + vsc __builtin_vec_vsrb (vsc, vuc); + VSRB VSRB_DEPR1 + vuc __builtin_vec_vsrb (vuc, vuc); + VSRB VSRB_DEPR2 + +[VEC_VSRD, SKIP, __builtin_vec_vsrd, _ARCH_PWR8] + vsll __builtin_vec_vsrd (vsll, vull); + VSRD VSRD_DEPR1 + vull __builtin_vec_vsrd (vull, vull); + VSRD VSRD_DEPR2 + +[VEC_VSRH, vec_vsrh, __builtin_vec_vsrh] + vss __builtin_vec_vsrh (vss, vus); + VSRH VSRH_DEPR1 + vus __builtin_vec_vsrh (vus, vus); + VSRH VSRH_DEPR2 + +[VEC_VSRW, vec_vsrw, __builtin_vec_vsrw] + vsi __builtin_vec_vsrw (vsi, vui); + VSRW VSRW_DEPR1 + vui __builtin_vec_vsrw (vui, vui); + VSRW VSRW_DEPR2 + +[VEC_VSTDCDP, scalar_test_data_class_dp, __builtin_vec_scalar_test_data_class_dp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_data_class_dp (double, const int); + VSTDCDP VSTDCDP_DEPR1 + +[VEC_VSTDCNDP, scalar_test_neg_dp, __builtin_vec_scalar_test_neg_dp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_neg_dp (double); + VSTDCNDP VSTDCNDP_DEPR1 + +[VEC_VSTDCNQP, scalar_test_neg_qp, __builtin_vec_scalar_test_neg_qp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_neg_qp (_Float128); + VSTDCNQP VSTDCNQP_DEPR1 + +[VEC_VSTDCNSP, scalar_test_neg_sp, __builtin_vec_scalar_test_neg_sp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_neg_sp (float); + VSTDCNSP VSTDCNSP_DEPR1 + +[VEC_VSTDCQP, scalar_test_data_class_qp, __builtin_vec_scalar_test_data_class_qp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_data_class_qp (_Float128, const int); + VSTDCQP VSTDCQP_DEPR1 + +[VEC_VSTDCSP, scalar_test_data_class_sp, __builtin_vec_scalar_test_data_class_sp, _ARCH_PWR9] + unsigned int __builtin_vec_scalar_test_data_class_sp (float, const int); + VSTDCSP VSTDCSP_DEPR1 + +[VEC_VSUBCUQ, vec_vsubcuqP, __builtin_vec_vsubcuq] + vsq __builtin_vec_vsubcuq (vsq, vsq); + VSUBCUQ VSUBCUQ_DEPR1 + vuq __builtin_vec_vsubcuq (vuq, vuq); + VSUBCUQ VSUBCUQ_DEPR2 + +[VEC_VSUBECUQ, vec_vsubecuq, __builtin_vec_vsubecuq, ARCH_PWR8] + vsq __builtin_vec_vsubecuq (vsq, vsq, vsq); + VSUBECUQ VSUBECUQ_DEPR1 + vuq __builtin_vec_vsubecuq (vuq, vuq, vuq); + VSUBECUQ VSUBECUQ_DEPR2 + +[VEC_VSUBEUQM, vec_vsubeuqm, __builtin_vec_vsubeuqm, _ARCH_PWR8] + vsq __builtin_vec_vsubeuqm (vsq, vsq, vsq); + VSUBEUQM VSUBEUQM_DEPR1 + vuq __builtin_vec_vsubeuqm (vuq, vuq, vuq); + VSUBEUQM VSUBEUQM_DEPR2 + +[VEC_VSUBFP, vec_vsubfp, __builtin_vec_vsubfp] + vf __builtin_vec_vsubfp (vf, vf); + VSUBFP VSUBFP_DEPR1 + +[VEC_VSUBSBS, vec_vsubsbs, __builtin_vec_vsubsbs] + vsc __builtin_vec_vsubsbs (vsc, vsc); + VSUBSBS VSUBSBS_DEPR1 + vsc __builtin_vec_vsubsbs (vbc, vsc); + VSUBSBS VSUBSBS_DEPR2 + vsc __builtin_vec_vsubsbs (vsc, vbc); + VSUBSBS VSUBSBS_DEPR3 + +[VEC_VSUBSHS, vec_vsubshs, __builtin_vec_vsubshs] + vss __builtin_vec_vsubshs (vss, vss); + VSUBSHS VSUBSHS_DEPR1 + vss __builtin_vec_vsubshs (vbs, vss); + VSUBSHS VSUBSHS_DEPR2 + vss __builtin_vec_vsubshs (vss, vbs); + VSUBSHS VSUBSHS_DEPR3 + +[VEC_VSUBSWS, vec_vsubsws, __builtin_vec_vsubsws] + vsi __builtin_vec_vsubsws (vsi, vsi); + VSUBSWS VSUBSWS_DEPR1 + vsi __builtin_vec_vsubsws (vbi, vsi); + VSUBSWS VSUBSWS_DEPR2 + vsi __builtin_vec_vsubsws (vsi, vbi); + VSUBSWS VSUBSWS_DEPR3 + +[VEC_VSUBUBM, vec_vsububm, __builtin_vec_vsububm] + vsc __builtin_vec_vsububm (vsc, vsc); + VSUBUBM VSUBUBM_DEPR1 + vuc __builtin_vec_vsububm (vsc, vuc); + VSUBUBM VSUBUBM_DEPR2 + vuc __builtin_vec_vsububm (vuc, vsc); + VSUBUBM VSUBUBM_DEPR3 + vuc __builtin_vec_vsububm (vuc, vuc); + VSUBUBM VSUBUBM_DEPR4 + vsc __builtin_vec_vsububm (vbc, vsc); + VSUBUBM VSUBUBM_DEPR5 + vsc __builtin_vec_vsububm (vsc, vbc); + VSUBUBM VSUBUBM_DEPR6 + vuc __builtin_vec_vsububm (vbc, vuc); + VSUBUBM VSUBUBM_DEPR7 + vuc __builtin_vec_vsububm (vuc, vbc); + VSUBUBM VSUBUBM_DEPR8 + +[VEC_VSUBUBS, vec_vsububs, __builtin_vec_vsububs] + vsc __builtin_vec_vsububs (vsc, vsc); + VSUBUBS VSUBUBS_DEPR1 + vsc __builtin_vec_vsububs (vbc, vsc); + VSUBUBS VSUBUBS_DEPR2 + vsc __builtin_vec_vsububs (vsc, vbc); + VSUBUBS VSUBUBS_DEPR3 + vuc __builtin_vec_vsububs (vsc, vuc); + VSUBUBS VSUBUBS_DEPR4 + vuc __builtin_vec_vsububs (vuc, vsc); + VSUBUBS VSUBUBS_DEPR5 + vuc __builtin_vec_vsububs (vuc, vuc); + VSUBUBS VSUBUBS_DEPR6 + vuc __builtin_vec_vsububs (vbc, vuc); + VSUBUBS VSUBUBS_DEPR7 + vuc __builtin_vec_vsububs (vuc, vbc); + VSUBUBS VSUBUBS_DEPR8 + +[VEC_VSUBUDM, vec_vsubudm, __builtin_vec_vsubudm, _ARCH_PWR8] + vsll __builtin_vec_vsubudm (vbll, vsll); + VSUBUDM VSUBUDM_DEPR1 + vsll __builtin_vec_vsubudm (vsll, vbll); + VSUBUDM VSUBUDM_DEPR2 + vsll __builtin_vec_vsubudm (vsll, vsll); + VSUBUDM VSUBUDM_DEPR3 + vull __builtin_vec_vsubudm (vbll, vull); + VSUBUDM VSUBUDM_DEPR4 + vull __builtin_vec_vsubudm (vull, vbll); + VSUBUDM VSUBUDM_DEPR5 + vull __builtin_vec_vsubudm (vull, vull); + VSUBUDM VSUBUDM_DEPR6 + +[VEC_VSUBUHM, vec_vsubuhm, __builtin_vec_vsubuhm] + vss __builtin_vec_vsubuhm (vss, vss); + VSUBUHM VUSBUHM_DEPR1 + vus __builtin_vec_vsubuhm (vss, vus); + VSUBUHM VUSBUHM_DEPR2 + vus __builtin_vec_vsubuhm (vus, vss); + VSUBUHM VUSBUHM_DEPR3 + vus __builtin_vec_vsubuhm (vus, vus); + VSUBUHM VUSBUHM_DEPR4 + vss __builtin_vec_vsubuhm (vbs, vss); + VSUBUHM VUSBUHM_DEPR5 + vss __builtin_vec_vsubuhm (vss, vbs); + VSUBUHM VUSBUHM_DEPR6 + vus __builtin_vec_vsubuhm (vbs, vus); + VSUBUHM VUSBUHM_DEPR7 + vus __builtin_vec_vsubuhm (vus, vbs); + VSUBUHM VUSBUHM_DEPR8 + +[VEC_VSUBUHS, vec_vsubuhs, __builtin_vec_vsubuhs] + vus __builtin_vec_vsubuhs (vss, vus); + VSUBUHS VSUBUHS_DEPR1 + vus __builtin_vec_vsubuhs (vus, vss); + VSUBUHS VSUBUHS_DEPR2 + vus __builtin_vec_vsubuhs (vus, vus); + VSUBUHS VSUBUHS_DEPR3 + vus __builtin_vec_vsubuhs (vbs, vus); + VSUBUHS VSUBUHS_DEPR4 + vus __builtin_vec_vsubuhs (vus, vbs); + VSUBUHS VSUBUHS_DEPR5 + +[VEC_VSUBUQM, vec_vsubuqm, __builtin_vec_vsubuqm, _ARCH_PWR8] + vsq __builtin_vec_vsubuqm (vsq, vsq); + VSUBUQM VSUBUQM_DEPR1 + vuq __builtin_vec_vsubuqm (vuq, vuq); + VSUBUQM VSUBUQM_DEPR2 + +[VEC_VSUBUWM, vec_vsubuwm, __builtin_vec_vsubuwm] + vsi __builtin_vec_vsubuwm (vbi, vsi); + VSUBUWM VSUBUWM_DEPR1 + vsi __builtin_vec_vsubuwm (vsi, vbi); + VSUBUWM VSUBUWM_DEPR2 + vui __builtin_vec_vsubuwm (vbi, vui); + VSUBUWM VSUBUWM_DEPR3 + vui __builtin_vec_vsubuwm (vui, vbi); + VSUBUWM VSUBUWM_DEPR4 + vsi __builtin_vec_vsubuwm (vsi, vsi); + VSUBUWM VSUBUWM_DEPR5 + vui __builtin_vec_vsubuwm (vsi, vui); + VSUBUWM VSUBUWM_DEPR6 + vui __builtin_vec_vsubuwm (vui, vsi); + VSUBUWM VSUBUWM_DEPR7 + vui __builtin_vec_vsubuwm (vui, vui); + VSUBUWM VSUBUWM_DEPR8 + +[VEC_VSUBUWS, vec_vsubuws, __builtin_vec_vsubuws] + vui __builtin_vec_vsubuws (vsi, vui); + VSUBUWS VSUBUWS_DEPR1 + vui __builtin_vec_vsubuws (vui, vsi); + VSUBUWS VSUBUWS_DEPR2 + vui __builtin_vec_vsubuws (vui, vui); + VSUBUWS VSUBUWS_DEPR3 + vui __builtin_vec_vsubuws (vbi, vui); + VSUBUWS VSUBUWS_DEPR4 + vui __builtin_vec_vsubuws (vui, vbi); + VSUBUWS VSUBUWS_DEPR5 + +[VEC_VSUM4SBS, vec_vsum4sbs, __builtin_vec_vsum4sbs] + vsi __builtin_vec_vsum4sbs (vsc, vsi); + VSUM4SBS VSUM4SBS_DEPR1 + +[VEC_VSUM4SHS, vec_vsum4shs, __builtin_vec_vsum4shs] + vsi __builtin_vec_vsum4shs (vss, vsi); + VSUM4SHS VSUM4SHS_DEPR1 + +[VEC_VSUM4UBS, vec_vsum4ubs, __builtin_vec_vsum4ubs] + vui __builtin_vec_vsum4ubs (vuc, vui); + VSUM4UBS VSUM4UBS_DEPR1 + +[VEC_VTDCDP, vec_test_data_class_dp, __builtin_vec_test_data_class_dp, _ARCH_PWR9] + vbll __builtin_vec_test_data_class_dp (vd, const int); + VTDCDP VTDCDP_DEPR1 + +[VEC_VTDCSP, vec_test_data_class_sp, __builtin_vec_test_data_class_sp, _ARCH_PWR9] + vbi __builtin_vec_test_data_class_sp (vf, const int); + VTDCSP VTDCSP_DEPR1 + +[VEC_UNS_DOUBLEE, vec_uns_doublee, __builtin_vec_uns_doublee] + vd __builtin_vec_uns_doublee (vui); + UNS_DOUBLEE_V4SI UNS_DOUBLEE_DEPR1 + +[VEC_UNS_DOUBLEH, vec_uns_doubleh, __builtin_vec_uns_doubleh] + vd __builtin_vec_uns_doubleh (vui); + UNS_DOUBLEH_V4SI UNS_DOUBLEH_DEPR1 + +[VEC_UNS_DOUBLEL, vec_uns_doublel, __builtin_vec_uns_doublel] + vd __builtin_vec_uns_doublel (vui); + UNS_DOUBLEL_V4SI UNS_DOUBLEL_DEPR1 + +[VEC_UNS_DOUBLEO, vec_uns_doubleo, __builtin_vec_uns_doubleo] + vd __builtin_vec_uns_doubleo (vui); + UNS_DOUBLEO_V4SI UNS_DOUBLEO_DEPR1 + +[VEC_VUPKHPX, vec_vupkhpx, __builtin_vec_vupkhpx] + vui __builtin_vec_vupkhpx (vus); + VUPKHPX VUPKHPX_DEPR1 + vui __builtin_vec_vupkhpx (vp); + VUPKHPX VUPKHPX_DEPR2 + +[VEC_VUPKHSB, vec_vupkhsb, __builtin_vec_vupkhsb] + vss __builtin_vec_vupkhsb (vsc); + VUPKHSB VUPKHSB_DEPR1 + vbs __builtin_vec_vupkhsb (vbc); + VUPKHSB VUPKHSB_DEPR2 + +[VEC_VUPKHSH, vec_vupkhsh, __builtin_vec_vupkhsh] + vsi __builtin_vec_vupkhsh (vss); + VUPKHSH VUPKHSH_DEPR1 + vbi __builtin_vec_vupkhsh (vbs); + VUPKHSH VUPKHSH_DEPR2 + +[VEC_VUPKHSW, vec_vupkhsw, __builtin_vec_vupkhsw, _ARCH_PWR8] + vsll __builtin_vec_vupkhsw (vsi); + VUPKHSW VUPKHSW_DEPR1 + vbll __builtin_vec_vupkhsw (vbi); + VUPKHSW VUPKHSW_DEPR2 + +[VEC_VUPKLPX, vec_vupklpx, __builtin_vec_vupklpx] + vui __builtin_vec_vupklpx (vus); + VUPKLPX VUPKLPX_DEPR1 + vui __builtin_vec_vupklpx (vp); + VUPKLPX VUPKLPX_DEPR2 + +[VEC_VUPKLSB, vec_vupklsb, __builtin_vec_vupklsb] + vss __builtin_vec_vupklsb (vsc); + VUPKLSB VUPKLSB_DEPR1 + vbs __builtin_vec_vupklsb (vbc); + VUPKLSB VUPKLSB_DEPR2 + +[VEC_VUPKLSH, vec_vupklsh, __builtin_vec_vupklsh] + vsi __builtin_vec_vupklsh (vss); + VUPKLSH VUPKLSH_DEPR1 + vbi __builtin_vec_vupklsh (vbs); + VUPKLSH VUPKLSH_DEPR2 + +[VEC_VUPKLSW, vec_vupklsw, __builtin_vec_vupklsw, _ARCH_PWR8] + vsll __builtin_vec_vupklsw (vsi); + VUPKLSW VUPKLSW_DEPR1 + vbll __builtin_vec_vupklsw (vbi); + VUPKLSW VUPKLSW_DEPR2