public inbox for gcc-cvs@sourceware.org
help / color / mirror / Atom feed
* [gcc r13-7156] RISC-V: Fix supporting data type according to RVV ISA. [PR109479]
@ 2023-04-12 15:08 Kito Cheng
  0 siblings, 0 replies; only message in thread
From: Kito Cheng @ 2023-04-12 15:08 UTC (permalink / raw)
  To: gcc-cvs

https://gcc.gnu.org/g:31eb8f18bbe64613fd8d77c4520c00beeb13598f

commit r13-7156-g31eb8f18bbe64613fd8d77c4520c00beeb13598f
Author: Ju-Zhe Zhong <juzhe.zhong@rivai.ai>
Date:   Wed Apr 12 21:56:05 2023 +0800

    RISC-V: Fix supporting data type according to RVV ISA. [PR109479]
    
    For vint64m*_t, we should only allow them in zve64* instead of zve32*_zvl64b (>=64b).
    Ideally, we should make error message more friendly like Clang.
    https://godbolt.org/z/f9GMv4dMo to report the RVV type require extenstion name.
    However, I failed to find a way to do that. So current GCC can only report "unknown" type.
    And I added comments to remind us doing this in the future.
    
    gcc/ChangeLog:
    
            PR target/109479
            * config/riscv/riscv-vector-builtins-types.def (vint8mf8_t): Fix predicate.
            (vint16mf4_t): Ditto.
            (vint32mf2_t): Ditto.
            (vint64m1_t): Ditto.
            (vint64m2_t): Ditto.
            (vint64m4_t): Ditto.
            (vint64m8_t): Ditto.
            (vuint8mf8_t): Ditto.
            (vuint16mf4_t): Ditto.
            (vuint32mf2_t): Ditto.
            (vuint64m1_t): Ditto.
            (vuint64m2_t): Ditto.
            (vuint64m4_t): Ditto.
            (vuint64m8_t): Ditto.
            (vfloat32mf2_t): Ditto.
            (vbool64_t): Ditto.
            * config/riscv/riscv-vector-builtins.cc (register_builtin_type): Add comments.
            (register_vector_type): Ditto.
            (check_required_extensions): Fix condition.
            * config/riscv/riscv-vector-builtins.h (RVV_REQUIRE_ZVE64): Remove it.
            (RVV_REQUIRE_ELEN_64): New define.
            (RVV_REQUIRE_MIN_VLEN_64): Ditto.
            * config/riscv/riscv-vector-switch.def (TARGET_VECTOR_FP32): Remove it.
            (TARGET_VECTOR_FP64): Ditto.
            (ENTRY): Fix predicate.
            * config/riscv/vector-iterators.md: Fix predicate.
    
    gcc/testsuite/ChangeLog:
    
            PR target/109479
            * gcc.target/riscv/rvv/base/pr109479-1.c: New test.
            * gcc.target/riscv/rvv/base/pr109479-2.c: New test.
            * gcc.target/riscv/rvv/base/pr109479-3.c: New test.
            * gcc.target/riscv/rvv/base/pr109479-4.c: New test.
            * gcc.target/riscv/rvv/base/pr109479-5.c: New test.
            * gcc.target/riscv/rvv/base/pr109479-6.c: New test.

Diff:
---
 gcc/config/riscv/riscv-vector-builtins-types.def   | 348 ++++++++++-----------
 gcc/config/riscv/riscv-vector-builtins.cc          |  14 +-
 gcc/config/riscv/riscv-vector-builtins.h           |   3 +-
 gcc/config/riscv/riscv-vector-switch.def           |  56 ++--
 gcc/config/riscv/vector-iterators.md               |  68 ++--
 .../gcc.target/riscv/rvv/base/pr109479-1.c         |  13 +
 .../gcc.target/riscv/rvv/base/pr109479-2.c         |  13 +
 .../gcc.target/riscv/rvv/base/pr109479-3.c         |  20 ++
 .../gcc.target/riscv/rvv/base/pr109479-4.c         |  20 ++
 .../gcc.target/riscv/rvv/base/pr109479-5.c         |  20 ++
 .../gcc.target/riscv/rvv/base/pr109479-6.c         |  20 ++
 11 files changed, 349 insertions(+), 246 deletions(-)

diff --git a/gcc/config/riscv/riscv-vector-builtins-types.def b/gcc/config/riscv/riscv-vector-builtins-types.def
index a55d494f1d9..a74df066521 100644
--- a/gcc/config/riscv/riscv-vector-builtins-types.def
+++ b/gcc/config/riscv/riscv-vector-builtins-types.def
@@ -235,53 +235,53 @@ along with GCC; see the file COPYING3. If not see
 #define DEF_RVV_LMUL4_OPS(TYPE, REQUIRE)
 #endif
 
-DEF_RVV_I_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_I_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_I_OPS (vint8mf4_t, 0)
 DEF_RVV_I_OPS (vint8mf2_t, 0)
 DEF_RVV_I_OPS (vint8m1_t, 0)
 DEF_RVV_I_OPS (vint8m2_t, 0)
 DEF_RVV_I_OPS (vint8m4_t, 0)
 DEF_RVV_I_OPS (vint8m8_t, 0)
-DEF_RVV_I_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_I_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_I_OPS (vint16mf2_t, 0)
 DEF_RVV_I_OPS (vint16m1_t, 0)
 DEF_RVV_I_OPS (vint16m2_t, 0)
 DEF_RVV_I_OPS (vint16m4_t, 0)
 DEF_RVV_I_OPS (vint16m8_t, 0)
-DEF_RVV_I_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_I_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_I_OPS (vint32m1_t, 0)
 DEF_RVV_I_OPS (vint32m2_t, 0)
 DEF_RVV_I_OPS (vint32m4_t, 0)
 DEF_RVV_I_OPS (vint32m8_t, 0)
-DEF_RVV_I_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_I_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_I_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_I_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_I_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_I_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_I_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_I_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_U_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_U_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_U_OPS (vuint8mf4_t, 0)
 DEF_RVV_U_OPS (vuint8mf2_t, 0)
 DEF_RVV_U_OPS (vuint8m1_t, 0)
 DEF_RVV_U_OPS (vuint8m2_t, 0)
 DEF_RVV_U_OPS (vuint8m4_t, 0)
 DEF_RVV_U_OPS (vuint8m8_t, 0)
-DEF_RVV_U_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_U_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_U_OPS (vuint16mf2_t, 0)
 DEF_RVV_U_OPS (vuint16m1_t, 0)
 DEF_RVV_U_OPS (vuint16m2_t, 0)
 DEF_RVV_U_OPS (vuint16m4_t, 0)
 DEF_RVV_U_OPS (vuint16m8_t, 0)
-DEF_RVV_U_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_U_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_U_OPS (vuint32m1_t, 0)
 DEF_RVV_U_OPS (vuint32m2_t, 0)
 DEF_RVV_U_OPS (vuint32m4_t, 0)
 DEF_RVV_U_OPS (vuint32m8_t, 0)
-DEF_RVV_U_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_U_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_U_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_U_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_U_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_U_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_U_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_U_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_F_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_F_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_F_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_F_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_F_OPS (vfloat32m4_t, RVV_REQUIRE_ELEN_FP_32)
@@ -291,7 +291,7 @@ DEF_RVV_F_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_F_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_F_OPS (vfloat64m8_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_B_OPS (vbool64_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_B_OPS (vbool64_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_B_OPS (vbool32_t, 0)
 DEF_RVV_B_OPS (vbool16_t, 0)
 DEF_RVV_B_OPS (vbool8_t, 0)
@@ -299,82 +299,82 @@ DEF_RVV_B_OPS (vbool4_t, 0)
 DEF_RVV_B_OPS (vbool2_t, 0)
 DEF_RVV_B_OPS (vbool1_t, 0)
 
-DEF_RVV_WEXTI_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTI_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WEXTI_OPS (vint16mf2_t, 0)
 DEF_RVV_WEXTI_OPS (vint16m1_t, 0)
 DEF_RVV_WEXTI_OPS (vint16m2_t, 0)
 DEF_RVV_WEXTI_OPS (vint16m4_t, 0)
 DEF_RVV_WEXTI_OPS (vint16m8_t, 0)
-DEF_RVV_WEXTI_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTI_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WEXTI_OPS (vint32m1_t, 0)
 DEF_RVV_WEXTI_OPS (vint32m2_t, 0)
 DEF_RVV_WEXTI_OPS (vint32m4_t, 0)
 DEF_RVV_WEXTI_OPS (vint32m8_t, 0)
-DEF_RVV_WEXTI_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTI_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTI_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTI_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTI_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTI_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTI_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTI_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_QEXTI_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_QEXTI_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_QEXTI_OPS (vint32m1_t, 0)
 DEF_RVV_QEXTI_OPS (vint32m2_t, 0)
 DEF_RVV_QEXTI_OPS (vint32m4_t, 0)
 DEF_RVV_QEXTI_OPS (vint32m8_t, 0)
-DEF_RVV_QEXTI_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTI_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTI_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTI_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_QEXTI_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTI_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTI_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTI_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_OEXTI_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTI_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTI_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTI_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_OEXTI_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTI_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTI_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTI_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_WEXTU_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTU_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WEXTU_OPS (vuint16mf2_t, 0)
 DEF_RVV_WEXTU_OPS (vuint16m1_t, 0)
 DEF_RVV_WEXTU_OPS (vuint16m2_t, 0)
 DEF_RVV_WEXTU_OPS (vuint16m4_t, 0)
 DEF_RVV_WEXTU_OPS (vuint16m8_t, 0)
-DEF_RVV_WEXTU_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTU_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WEXTU_OPS (vuint32m1_t, 0)
 DEF_RVV_WEXTU_OPS (vuint32m2_t, 0)
 DEF_RVV_WEXTU_OPS (vuint32m4_t, 0)
 DEF_RVV_WEXTU_OPS (vuint32m8_t, 0)
-DEF_RVV_WEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_WEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_WEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_QEXTU_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_QEXTU_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_QEXTU_OPS (vuint32m1_t, 0)
 DEF_RVV_QEXTU_OPS (vuint32m2_t, 0)
 DEF_RVV_QEXTU_OPS (vuint32m4_t, 0)
 DEF_RVV_QEXTU_OPS (vuint32m8_t, 0)
-DEF_RVV_QEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_QEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_QEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_QEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_OEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_OEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_OEXTU_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTU_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTU_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_OEXTU_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_FULL_V_I_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_I_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_I_OPS (vint8mf4_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint8mf2_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint8m1_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint8m2_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint8m4_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint8m8_t, 0)
-DEF_RVV_FULL_V_I_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_I_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_I_OPS (vint16mf2_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint16m1_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint16m2_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint16m4_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint16m8_t, 0)
-DEF_RVV_FULL_V_I_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_I_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_I_OPS (vint32m1_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint32m2_t, 0)
 DEF_RVV_FULL_V_I_OPS (vint32m4_t, 0)
@@ -384,20 +384,20 @@ DEF_RVV_FULL_V_I_OPS (vint64m2_t, RVV_REQUIRE_FULL_V)
 DEF_RVV_FULL_V_I_OPS (vint64m4_t, RVV_REQUIRE_FULL_V)
 DEF_RVV_FULL_V_I_OPS (vint64m8_t, RVV_REQUIRE_FULL_V)
 
-DEF_RVV_FULL_V_U_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_U_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_U_OPS (vuint8mf4_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint8mf2_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint8m1_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint8m2_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint8m4_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint8m8_t, 0)
-DEF_RVV_FULL_V_U_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_U_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_U_OPS (vuint16mf2_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint16m1_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint16m2_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint16m4_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint16m8_t, 0)
-DEF_RVV_FULL_V_U_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_FULL_V_U_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_FULL_V_U_OPS (vuint32m1_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint32m2_t, 0)
 DEF_RVV_FULL_V_U_OPS (vuint32m4_t, 0)
@@ -412,128 +412,128 @@ DEF_RVV_WEXTF_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_WEXTF_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_WEXTF_OPS (vfloat64m8_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_CONVERT_I_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_CONVERT_I_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_CONVERT_I_OPS (vint32m1_t, 0)
 DEF_RVV_CONVERT_I_OPS (vint32m2_t, 0)
 DEF_RVV_CONVERT_I_OPS (vint32m4_t, 0)
 DEF_RVV_CONVERT_I_OPS (vint32m8_t, 0)
-DEF_RVV_CONVERT_I_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_I_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_I_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_I_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_CONVERT_I_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_I_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_I_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_I_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_CONVERT_U_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_CONVERT_U_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_CONVERT_U_OPS (vuint32m1_t, 0)
 DEF_RVV_CONVERT_U_OPS (vuint32m2_t, 0)
 DEF_RVV_CONVERT_U_OPS (vuint32m4_t, 0)
 DEF_RVV_CONVERT_U_OPS (vuint32m8_t, 0)
-DEF_RVV_CONVERT_U_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_U_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_U_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_CONVERT_U_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_CONVERT_U_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_U_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_U_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_CONVERT_U_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_WCONVERT_I_OPS (vint64m1_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_I_OPS (vint64m2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_I_OPS (vint64m4_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_I_OPS (vint64m8_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_WCONVERT_I_OPS (vint64m1_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_I_OPS (vint64m2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_I_OPS (vint64m4_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_I_OPS (vint64m8_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
 
-DEF_RVV_WCONVERT_U_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_U_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_U_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
-DEF_RVV_WCONVERT_U_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_WCONVERT_U_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_U_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_U_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
+DEF_RVV_WCONVERT_U_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ELEN_64)
 
 DEF_RVV_WCONVERT_F_OPS (vfloat64m1_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_WCONVERT_F_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_WCONVERT_F_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_WCONVERT_F_OPS (vfloat64m8_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_WI_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WI_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WI_OPS (vint8mf4_t, 0)
 DEF_RVV_WI_OPS (vint8mf2_t, 0)
 DEF_RVV_WI_OPS (vint8m1_t, 0)
 DEF_RVV_WI_OPS (vint8m2_t, 0)
 DEF_RVV_WI_OPS (vint8m4_t, 0)
 DEF_RVV_WI_OPS (vint8m8_t, 0)
-DEF_RVV_WI_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WI_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WI_OPS (vint16mf2_t, 0)
 DEF_RVV_WI_OPS (vint16m1_t, 0)
 DEF_RVV_WI_OPS (vint16m2_t, 0)
 DEF_RVV_WI_OPS (vint16m4_t, 0)
 DEF_RVV_WI_OPS (vint16m8_t, 0)
-DEF_RVV_WI_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WI_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WI_OPS (vint32m1_t, 0)
 DEF_RVV_WI_OPS (vint32m2_t, 0)
 DEF_RVV_WI_OPS (vint32m4_t, 0)
 DEF_RVV_WI_OPS (vint32m8_t, 0)
 
-DEF_RVV_WU_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WU_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WU_OPS (vuint8mf4_t, 0)
 DEF_RVV_WU_OPS (vuint8mf2_t, 0)
 DEF_RVV_WU_OPS (vuint8m1_t, 0)
 DEF_RVV_WU_OPS (vuint8m2_t, 0)
 DEF_RVV_WU_OPS (vuint8m4_t, 0)
 DEF_RVV_WU_OPS (vuint8m8_t, 0)
-DEF_RVV_WU_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WU_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WU_OPS (vuint16mf2_t, 0)
 DEF_RVV_WU_OPS (vuint16m1_t, 0)
 DEF_RVV_WU_OPS (vuint16m2_t, 0)
 DEF_RVV_WU_OPS (vuint16m4_t, 0)
 DEF_RVV_WU_OPS (vuint16m8_t, 0)
-DEF_RVV_WU_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_WU_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WU_OPS (vuint32m1_t, 0)
 DEF_RVV_WU_OPS (vuint32m2_t, 0)
 DEF_RVV_WU_OPS (vuint32m4_t, 0)
 DEF_RVV_WU_OPS (vuint32m8_t, 0)
 
-DEF_RVV_WF_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_WF_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_WF_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_WF_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_WF_OPS (vfloat32m4_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_WF_OPS (vfloat32m8_t, RVV_REQUIRE_ELEN_FP_32)
 
-DEF_RVV_EI16_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vint8mf4_t, 0)
 DEF_RVV_EI16_OPS (vint8mf2_t, 0)
 DEF_RVV_EI16_OPS (vint8m1_t, 0)
 DEF_RVV_EI16_OPS (vint8m2_t, 0)
 DEF_RVV_EI16_OPS (vint8m4_t, 0)
-DEF_RVV_EI16_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vint16mf2_t, 0)
 DEF_RVV_EI16_OPS (vint16m1_t, 0)
 DEF_RVV_EI16_OPS (vint16m2_t, 0)
 DEF_RVV_EI16_OPS (vint16m4_t, 0)
 DEF_RVV_EI16_OPS (vint16m8_t, 0)
-DEF_RVV_EI16_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vint32m1_t, 0)
 DEF_RVV_EI16_OPS (vint32m2_t, 0)
 DEF_RVV_EI16_OPS (vint32m4_t, 0)
 DEF_RVV_EI16_OPS (vint32m8_t, 0)
-DEF_RVV_EI16_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vuint8mf4_t, 0)
 DEF_RVV_EI16_OPS (vuint8mf2_t, 0)
 DEF_RVV_EI16_OPS (vuint8m1_t, 0)
 DEF_RVV_EI16_OPS (vuint8m2_t, 0)
 DEF_RVV_EI16_OPS (vuint8m4_t, 0)
-DEF_RVV_EI16_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vuint16mf2_t, 0)
 DEF_RVV_EI16_OPS (vuint16m1_t, 0)
 DEF_RVV_EI16_OPS (vuint16m2_t, 0)
 DEF_RVV_EI16_OPS (vuint16m4_t, 0)
 DEF_RVV_EI16_OPS (vuint16m8_t, 0)
-DEF_RVV_EI16_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vuint32m1_t, 0)
 DEF_RVV_EI16_OPS (vuint32m2_t, 0)
 DEF_RVV_EI16_OPS (vuint32m4_t, 0)
 DEF_RVV_EI16_OPS (vuint32m8_t, 0)
-DEF_RVV_EI16_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EI16_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_EI16_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EI16_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EI16_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_EI16_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_EI16_OPS (vfloat32m4_t, RVV_REQUIRE_ELEN_FP_32)
@@ -543,36 +543,36 @@ DEF_RVV_EI16_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_EI16_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_EI16_OPS (vfloat64m8_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_EEW8_INTERPRET_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW8_INTERPRET_OPS (vint16mf2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint16m1_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint16m2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint16m4_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint16m8_t, 0)
-DEF_RVV_EEW8_INTERPRET_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW8_INTERPRET_OPS (vint32m1_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint32m2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint32m4_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vint32m8_t, 0)
-DEF_RVV_EEW8_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint16mf2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint16m1_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint16m2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint16m4_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint16m8_t, 0)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint32m1_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint32m2_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint32m4_t, 0)
 DEF_RVV_EEW8_INTERPRET_OPS (vuint32m8_t, 0)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW8_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW8_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
 DEF_RVV_EEW16_INTERPRET_OPS (vint8mf4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint8mf2_t, 0)
@@ -580,30 +580,30 @@ DEF_RVV_EEW16_INTERPRET_OPS (vint8m1_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint8m2_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint8m4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint8m8_t, 0)
-DEF_RVV_EEW16_INTERPRET_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW16_INTERPRET_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW16_INTERPRET_OPS (vint32m1_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint32m2_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint32m4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vint32m8_t, 0)
-DEF_RVV_EEW16_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW16_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8mf4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8mf2_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8m1_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8m2_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8m4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint8m8_t, 0)
-DEF_RVV_EEW16_INTERPRET_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW16_INTERPRET_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint32m1_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint32m2_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint32m4_t, 0)
 DEF_RVV_EEW16_INTERPRET_OPS (vuint32m8_t, 0)
-DEF_RVV_EEW16_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW16_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW16_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW16_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
 DEF_RVV_EEW32_INTERPRET_OPS (vint8mf2_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vint8m1_t, 0)
@@ -615,10 +615,10 @@ DEF_RVV_EEW32_INTERPRET_OPS (vint16m1_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vint16m2_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vint16m4_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vint16m8_t, 0)
-DEF_RVV_EEW32_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW32_INTERPRET_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vint64m8_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint8mf2_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint8m1_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint8m2_t, 0)
@@ -629,10 +629,10 @@ DEF_RVV_EEW32_INTERPRET_OPS (vuint16m1_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint16m2_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint16m4_t, 0)
 DEF_RVV_EEW32_INTERPRET_OPS (vuint16m8_t, 0)
-DEF_RVV_EEW32_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_EEW32_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_EEW32_INTERPRET_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_EEW32_INTERPRET_OPS (vuint64m8_t, RVV_REQUIRE_ELEN_64)
 
 DEF_RVV_EEW64_INTERPRET_OPS (vint8m1_t, 0)
 DEF_RVV_EEW64_INTERPRET_OPS (vint8m2_t, 0)
@@ -659,43 +659,43 @@ DEF_RVV_EEW64_INTERPRET_OPS (vuint32m2_t, 0)
 DEF_RVV_EEW64_INTERPRET_OPS (vuint32m4_t, 0)
 DEF_RVV_EEW64_INTERPRET_OPS (vuint32m8_t, 0)
 
-DEF_RVV_X2_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint8mf4_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint8mf2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint8m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint8m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint8m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint16mf2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint16m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint16m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint16m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint32m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint32m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vint32m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint8mf4_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint8mf2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint8m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint8m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint8m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint16mf2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint16m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint16m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint16m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint32m1_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint32m2_t, 0)
 DEF_RVV_X2_VLMUL_EXT_OPS (vuint32m4_t, 0)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X2_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X2_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_X2_VLMUL_EXT_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_X2_VLMUL_EXT_OPS (vfloat32m4_t, RVV_REQUIRE_ELEN_FP_32)
@@ -703,118 +703,118 @@ DEF_RVV_X2_VLMUL_EXT_OPS (vfloat64m1_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_X2_VLMUL_EXT_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_X4_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint8mf4_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint8mf2_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint8m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint8m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint16mf2_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint16m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint16m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint32m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vint32m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X4_VLMUL_EXT_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X4_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint8mf4_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint8mf2_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint8m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint8m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint16mf2_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint16m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint16m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint32m1_t, 0)
 DEF_RVV_X4_VLMUL_EXT_OPS (vuint32m2_t, 0)
-DEF_RVV_X4_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X4_VLMUL_EXT_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X4_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X4_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_X4_VLMUL_EXT_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_X4_VLMUL_EXT_OPS (vfloat64m1_t, RVV_REQUIRE_ELEN_FP_64)
 DEF_RVV_X4_VLMUL_EXT_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_X8_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint8mf4_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint8mf2_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint8m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint16mf2_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint16m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vint32m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X8_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint8mf4_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint8mf2_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint8m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint16mf2_t, 0)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint16m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vuint32m1_t, 0)
-DEF_RVV_X8_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X8_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
+DEF_RVV_X8_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X8_VLMUL_EXT_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_X8_VLMUL_EXT_OPS (vfloat64m1_t, RVV_REQUIRE_ELEN_FP_64)
 
-DEF_RVV_X16_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X16_VLMUL_EXT_OPS (vint8mf4_t, 0)
 DEF_RVV_X16_VLMUL_EXT_OPS (vint8mf2_t, 0)
-DEF_RVV_X16_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X16_VLMUL_EXT_OPS (vint16mf2_t, 0)
-DEF_RVV_X16_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X16_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X16_VLMUL_EXT_OPS (vuint8mf4_t, 0)
 DEF_RVV_X16_VLMUL_EXT_OPS (vuint8mf2_t, 0)
-DEF_RVV_X16_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X16_VLMUL_EXT_OPS (vuint16mf2_t, 0)
-DEF_RVV_X16_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X16_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_ZVE64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vuint32mf2_t, RVV_REQUIRE_MIN_VLEN_64)
+DEF_RVV_X16_VLMUL_EXT_OPS (vfloat32mf2_t, RVV_REQUIRE_ELEN_FP_32 | RVV_REQUIRE_MIN_VLEN_64)
 
-DEF_RVV_X32_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X32_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X32_VLMUL_EXT_OPS (vint8mf4_t, 0)
-DEF_RVV_X32_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X32_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X32_VLMUL_EXT_OPS (vint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
+DEF_RVV_X32_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 DEF_RVV_X32_VLMUL_EXT_OPS (vuint8mf4_t, 0)
-DEF_RVV_X32_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X32_VLMUL_EXT_OPS (vuint16mf4_t, RVV_REQUIRE_MIN_VLEN_64)
 
-DEF_RVV_X64_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_ZVE64)
-DEF_RVV_X64_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_X64_VLMUL_EXT_OPS (vint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
+DEF_RVV_X64_VLMUL_EXT_OPS (vuint8mf8_t, RVV_REQUIRE_MIN_VLEN_64)
 
 DEF_RVV_LMUL1_OPS (vint8m1_t, 0)
 DEF_RVV_LMUL1_OPS (vint16m1_t, 0)
 DEF_RVV_LMUL1_OPS (vint32m1_t, 0)
-DEF_RVV_LMUL1_OPS (vint64m1_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL1_OPS (vint64m1_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL1_OPS (vuint8m1_t, 0)
 DEF_RVV_LMUL1_OPS (vuint16m1_t, 0)
 DEF_RVV_LMUL1_OPS (vuint32m1_t, 0)
-DEF_RVV_LMUL1_OPS (vuint64m1_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL1_OPS (vuint64m1_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL1_OPS (vfloat32m1_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_LMUL1_OPS (vfloat64m1_t, RVV_REQUIRE_ELEN_FP_64)
 
 DEF_RVV_LMUL2_OPS (vint8m2_t, 0)
 DEF_RVV_LMUL2_OPS (vint16m2_t, 0)
 DEF_RVV_LMUL2_OPS (vint32m2_t, 0)
-DEF_RVV_LMUL2_OPS (vint64m2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL2_OPS (vint64m2_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL2_OPS (vuint8m2_t, 0)
 DEF_RVV_LMUL2_OPS (vuint16m2_t, 0)
 DEF_RVV_LMUL2_OPS (vuint32m2_t, 0)
-DEF_RVV_LMUL2_OPS (vuint64m2_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL2_OPS (vuint64m2_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL2_OPS (vfloat32m2_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_LMUL2_OPS (vfloat64m2_t, RVV_REQUIRE_ELEN_FP_64)
 
 DEF_RVV_LMUL4_OPS (vint8m4_t, 0)
 DEF_RVV_LMUL4_OPS (vint16m4_t, 0)
 DEF_RVV_LMUL4_OPS (vint32m4_t, 0)
-DEF_RVV_LMUL4_OPS (vint64m4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL4_OPS (vint64m4_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL4_OPS (vuint8m4_t, 0)
 DEF_RVV_LMUL4_OPS (vuint16m4_t, 0)
 DEF_RVV_LMUL4_OPS (vuint32m4_t, 0)
-DEF_RVV_LMUL4_OPS (vuint64m4_t, RVV_REQUIRE_ZVE64)
+DEF_RVV_LMUL4_OPS (vuint64m4_t, RVV_REQUIRE_ELEN_64)
 DEF_RVV_LMUL4_OPS (vfloat32m4_t, RVV_REQUIRE_ELEN_FP_32)
 DEF_RVV_LMUL4_OPS (vfloat64m4_t, RVV_REQUIRE_ELEN_FP_64)
 
diff --git a/gcc/config/riscv/riscv-vector-builtins.cc b/gcc/config/riscv/riscv-vector-builtins.cc
index bd16fe9db7d..01cea23d3e6 100644
--- a/gcc/config/riscv/riscv-vector-builtins.cc
+++ b/gcc/config/riscv/riscv-vector-builtins.cc
@@ -2312,6 +2312,10 @@ register_builtin_type (vector_type_index type, tree eltype, machine_mode mode)
   builtin_types[type].scalar = eltype;
   builtin_types[type].scalar_ptr = build_pointer_type (eltype);
   builtin_types[type].scalar_const_ptr = build_const_pointer (eltype);
+  /* TODO: We currently just skip the register of the illegal RVV type.
+     Ideally, we should report error message more friendly instead of
+     reporting "unknown" type. Support more friendly error message in
+     the future.  */
   if (!riscv_v_ext_vector_mode_p (mode))
     return;
 
@@ -2362,6 +2366,10 @@ register_vector_type (vector_type_index type)
 
   /* When vectype is NULL, the corresponding builtin type
      is disabled according to '-march'.  */
+  /* TODO: We currently just skip the register of the illegal RVV type.
+     Ideally, we should report error message more friendly instead of
+     reporting "unknown" type. Support more friendly error message in
+     the future.  */
   if (!vectype)
     return;
   tree id = get_identifier (vector_types[type].name);
@@ -2452,12 +2460,14 @@ check_required_extensions (const function_instance &instance)
     riscv_isa_flags |= RVV_REQUIRE_ELEN_FP_32;
   if (TARGET_VECTOR_ELEN_FP_64)
     riscv_isa_flags |= RVV_REQUIRE_ELEN_FP_64;
-  if (TARGET_MIN_VLEN > 32)
-    riscv_isa_flags |= RVV_REQUIRE_ZVE64;
+  if (TARGET_VECTOR_ELEN_64)
+    riscv_isa_flags |= RVV_REQUIRE_ELEN_64;
   if (TARGET_64BIT)
     riscv_isa_flags |= RVV_REQUIRE_RV64BIT;
   if (TARGET_FULL_V)
     riscv_isa_flags |= RVV_REQUIRE_FULL_V;
+  if (TARGET_MIN_VLEN > 32)
+    riscv_isa_flags |= RVV_REQUIRE_MIN_VLEN_64;
 
   uint64_t missing_extensions = required_extensions & ~riscv_isa_flags;
   if (missing_extensions != 0)
diff --git a/gcc/config/riscv/riscv-vector-builtins.h b/gcc/config/riscv/riscv-vector-builtins.h
index 84dfe676773..8ffb9d33e33 100644
--- a/gcc/config/riscv/riscv-vector-builtins.h
+++ b/gcc/config/riscv/riscv-vector-builtins.h
@@ -103,10 +103,11 @@ static const unsigned int CP_WRITE_CSR = 1U << 5;
 
 /* Bit values used to identify required extensions for RVV intrinsics.  */
 #define RVV_REQUIRE_RV64BIT (1 << 0)	/* Require RV64.  */
-#define RVV_REQUIRE_ZVE64 (1 << 1)	/* Require TARGET_MIN_VLEN > 32.  */
+#define RVV_REQUIRE_ELEN_64 (1 << 1)	/* Require TARGET_VECTOR_ELEN_64.  */
 #define RVV_REQUIRE_ELEN_FP_32 (1 << 2) /* Require FP ELEN >= 32.  */
 #define RVV_REQUIRE_ELEN_FP_64 (1 << 3) /* Require FP ELEN >= 64.  */
 #define RVV_REQUIRE_FULL_V (1 << 4) /* Require Full 'V' extension.  */
+#define RVV_REQUIRE_MIN_VLEN_64 (1 << 5)	/* Require TARGET_MIN_VLEN >= 64.  */
 
 /* Enumerates the RVV operand types.  */
 enum operand_type_index
diff --git a/gcc/config/riscv/riscv-vector-switch.def b/gcc/config/riscv/riscv-vector-switch.def
index 8e4aed40338..3b944547b49 100644
--- a/gcc/config/riscv/riscv-vector-switch.def
+++ b/gcc/config/riscv/riscv-vector-switch.def
@@ -83,16 +83,6 @@ TODO: FP16 vector needs support of 'zvfh', we don't support it yet.  */
 #define ENTRY(MODE, REQUIREMENT, VLMUL_FOR_MIN_VLEN32, RATIO_FOR_MIN_VLEN32,   \
 	      VLMUL_FOR_MIN_VLEN64, RATIO_FOR_MIN_VLEN64)
 #endif
-/* Flag of FP32 vector.  */
-#ifndef TARGET_VECTOR_FP32
-#define TARGET_VECTOR_FP32                                                     \
-  (TARGET_HARD_FLOAT && (TARGET_VECTOR_ELEN_FP_32 || TARGET_VECTOR_ELEN_FP_64))
-#endif
-/* Flag of FP64 vector.  */
-#ifndef TARGET_VECTOR_FP64
-#define TARGET_VECTOR_FP64                                                     \
-  (TARGET_DOUBLE_FLOAT && TARGET_VECTOR_ELEN_FP_64 && (TARGET_MIN_VLEN > 32))
-#endif
 
 /* Mask modes. Disable VNx64BImode when TARGET_MIN_VLEN == 32.  */
 ENTRY (VNx64BI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_8, 1)
@@ -129,35 +119,31 @@ ENTRY (VNx2HF, false, LMUL_1, 16, LMUL_F2, 32)
 ENTRY (VNx1HF, false, LMUL_F2, 32, LMUL_F4, 64)
 
 /* SEW = 32. Disable VNx16SImode when TARGET_MIN_VLEN == 32.
-   For single-precision floating-point, we need TARGET_VECTOR_FP32 ==
-   RVV_ENABLE.  */
+   For single-precision floating-point, we need TARGET_VECTOR_ELEN_FP_32 to be
+   true.  */
 ENTRY (VNx16SI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_8, 4)
 ENTRY (VNx8SI, true, LMUL_8, 4, LMUL_4, 8)
 ENTRY (VNx4SI, true, LMUL_4, 8, LMUL_2, 16)
 ENTRY (VNx2SI, true, LMUL_2, 16, LMUL_1, 32)
 ENTRY (VNx1SI, true, LMUL_1, 32, LMUL_F2, 64)
 
-ENTRY (VNx16SF, TARGET_VECTOR_FP32 && (TARGET_MIN_VLEN > 32), LMUL_RESERVED, 0,
-       LMUL_8, 4)
-ENTRY (VNx8SF, TARGET_VECTOR_FP32, LMUL_8, 4, LMUL_4, 8)
-ENTRY (VNx4SF, TARGET_VECTOR_FP32, LMUL_4, 8, LMUL_2, 16)
-ENTRY (VNx2SF, TARGET_VECTOR_FP32, LMUL_2, 16, LMUL_1, 32)
-ENTRY (VNx1SF, TARGET_VECTOR_FP32, LMUL_1, 32, LMUL_F2, 64)
-
-/* SEW = 64. Enable when TARGET_MIN_VLEN > 32.
-   For double-precision floating-point, we need TARGET_VECTOR_FP64 ==
-   RVV_ENABLE.  */
-ENTRY (VNx8DI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_8, 8)
-ENTRY (VNx4DI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_4, 16)
-ENTRY (VNx2DI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_2, 32)
-ENTRY (VNx1DI, TARGET_MIN_VLEN > 32, LMUL_RESERVED, 0, LMUL_1, 64)
-
-ENTRY (VNx8DF, TARGET_VECTOR_FP64 && (TARGET_MIN_VLEN > 32), LMUL_RESERVED, 0,
-       LMUL_8, 8)
-ENTRY (VNx4DF, TARGET_VECTOR_FP64, LMUL_RESERVED, 0, LMUL_4, 16)
-ENTRY (VNx2DF, TARGET_VECTOR_FP64, LMUL_RESERVED, 0, LMUL_2, 32)
-ENTRY (VNx1DF, TARGET_VECTOR_FP64, LMUL_RESERVED, 0, LMUL_1, 64)
-
-#undef TARGET_VECTOR_FP32
-#undef TARGET_VECTOR_FP64
+ENTRY (VNx16SF, TARGET_VECTOR_ELEN_FP_32, LMUL_RESERVED, 0, LMUL_8, 4)
+ENTRY (VNx8SF, TARGET_VECTOR_ELEN_FP_32, LMUL_8, 4, LMUL_4, 8)
+ENTRY (VNx4SF, TARGET_VECTOR_ELEN_FP_32, LMUL_4, 8, LMUL_2, 16)
+ENTRY (VNx2SF, TARGET_VECTOR_ELEN_FP_32, LMUL_2, 16, LMUL_1, 32)
+ENTRY (VNx1SF, TARGET_VECTOR_ELEN_FP_32, LMUL_1, 32, LMUL_F2, 64)
+
+/* SEW = 64. Enable when TARGET_VECTOR_ELEN_64 is true.
+   For double-precision floating-point, we need TARGET_VECTOR_ELEN_FP_64 to be
+   true.  */
+ENTRY (VNx8DI, TARGET_VECTOR_ELEN_64, LMUL_RESERVED, 0, LMUL_8, 8)
+ENTRY (VNx4DI, TARGET_VECTOR_ELEN_64, LMUL_RESERVED, 0, LMUL_4, 16)
+ENTRY (VNx2DI, TARGET_VECTOR_ELEN_64, LMUL_RESERVED, 0, LMUL_2, 32)
+ENTRY (VNx1DI, TARGET_VECTOR_ELEN_64, LMUL_RESERVED, 0, LMUL_1, 64)
+
+ENTRY (VNx8DF, TARGET_VECTOR_ELEN_FP_64, LMUL_RESERVED, 0, LMUL_8, 8)
+ENTRY (VNx4DF, TARGET_VECTOR_ELEN_FP_64, LMUL_RESERVED, 0, LMUL_4, 16)
+ENTRY (VNx2DF, TARGET_VECTOR_ELEN_FP_64, LMUL_RESERVED, 0, LMUL_2, 32)
+ENTRY (VNx1DF, TARGET_VECTOR_ELEN_FP_64, LMUL_RESERVED, 0, LMUL_1, 64)
+
 #undef ENTRY
diff --git a/gcc/config/riscv/vector-iterators.md b/gcc/config/riscv/vector-iterators.md
index 194e9b8f57f..70ad85b661b 100644
--- a/gcc/config/riscv/vector-iterators.md
+++ b/gcc/config/riscv/vector-iterators.md
@@ -86,8 +86,8 @@
   VNx1QI VNx2QI VNx4QI VNx8QI VNx16QI VNx32QI (VNx64QI "TARGET_MIN_VLEN > 32")
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -102,8 +102,8 @@
 (define_mode_iterator VEEWEXT2 [
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -117,8 +117,8 @@
 
 (define_mode_iterator VEEWEXT4 [
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -131,8 +131,8 @@
 ])
 
 (define_mode_iterator VEEWEXT8 [
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1DF "TARGET_VECTOR_ELEN_FP_64")
   (VNx2DF "TARGET_VECTOR_ELEN_FP_64")
   (VNx4DF "TARGET_VECTOR_ELEN_FP_64")
@@ -162,8 +162,8 @@
   VNx1QI VNx2QI VNx4QI VNx8QI VNx16QI VNx32QI
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI
   VNx1SI VNx2SI VNx4SI VNx8SI
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -177,7 +177,7 @@
   VNx1QI VNx2QI VNx4QI VNx8QI VNx16QI
   VNx1HI VNx2HI VNx4HI VNx8HI
   VNx1SI VNx2SI VNx4SI
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -189,7 +189,7 @@
   VNx1QI VNx2QI VNx4QI VNx8QI
   VNx1HI VNx2HI VNx4HI
   VNx1SI VNx2SI
-  (VNx1DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx1DF "TARGET_VECTOR_ELEN_FP_64")
@@ -215,8 +215,8 @@
   VNx1QI VNx2QI VNx4QI VNx8QI VNx16QI VNx32QI
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -232,8 +232,8 @@
   VNx1QI VNx2QI VNx4QI VNx8QI VNx16QI VNx32QI (VNx64QI "TARGET_MIN_VLEN > 32")
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
 ])
 
 (define_mode_iterator VI_ZVE32 [
@@ -291,8 +291,8 @@
 ])
 
 (define_mode_iterator VI_D [
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
 ])
 
 (define_mode_iterator VFULLI_D [
@@ -302,28 +302,28 @@
 
 (define_mode_iterator VNX1_QHSD [
   VNx1QI VNx1HI VNx1SI
-  (VNx1DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx1DF "TARGET_VECTOR_ELEN_FP_64")
 ])
 
 (define_mode_iterator VNX2_QHSD [
   VNx2QI VNx2HI VNx2SI
-  (VNx2DI "TARGET_MIN_VLEN > 32")
+  (VNx2DI "TARGET_VECTOR_ELEN_64")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx2DF "TARGET_VECTOR_ELEN_FP_64")
 ])
 
 (define_mode_iterator VNX4_QHSD [
   VNx4QI VNx4HI VNx4SI
-  (VNx4DI "TARGET_MIN_VLEN > 32")
+  (VNx4DI "TARGET_VECTOR_ELEN_64")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4DF "TARGET_VECTOR_ELEN_FP_64")
 ])
 
 (define_mode_iterator VNX8_QHSD [
   VNx8QI VNx8HI VNx8SI
-  (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx8SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx8DF "TARGET_VECTOR_ELEN_FP_64")
 ])
@@ -373,8 +373,8 @@
   (VNx4QI "TARGET_MIN_VLEN == 32") VNx8QI VNx16QI VNx32QI (VNx64QI "TARGET_MIN_VLEN > 32")
   (VNx2HI "TARGET_MIN_VLEN == 32") VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   (VNx1SI "TARGET_MIN_VLEN == 32") VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
   (VNx1SF "TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN == 32")
   (VNx2SF "TARGET_VECTOR_ELEN_FP_32")
   (VNx4SF "TARGET_VECTOR_ELEN_FP_32")
@@ -401,8 +401,8 @@
 (define_mode_iterator VWEXTI [
   VNx1HI VNx2HI VNx4HI VNx8HI VNx16HI (VNx32HI "TARGET_MIN_VLEN > 32")
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
 ])
 
 (define_mode_iterator VWEXTF [
@@ -413,21 +413,21 @@
 ])
 
 (define_mode_iterator VWCONVERTI [
-  (VNx1DI "TARGET_MIN_VLEN > 32 && TARGET_VECTOR_ELEN_FP_32")
-  (VNx2DI "TARGET_MIN_VLEN > 32 && TARGET_VECTOR_ELEN_FP_32")
-  (VNx4DI "TARGET_MIN_VLEN > 32 && TARGET_VECTOR_ELEN_FP_32")
-  (VNx8DI "TARGET_MIN_VLEN > 32 && TARGET_VECTOR_ELEN_FP_32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64 && TARGET_VECTOR_ELEN_FP_32")
+  (VNx2DI "TARGET_VECTOR_ELEN_64 && TARGET_VECTOR_ELEN_FP_32")
+  (VNx4DI "TARGET_VECTOR_ELEN_64 && TARGET_VECTOR_ELEN_FP_32")
+  (VNx8DI "TARGET_VECTOR_ELEN_64 && TARGET_VECTOR_ELEN_FP_32")
 ])
 
 (define_mode_iterator VQEXTI [
   VNx1SI VNx2SI VNx4SI VNx8SI (VNx16SI "TARGET_MIN_VLEN > 32")
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
 ])
 
 (define_mode_iterator VOEXTI [
-  (VNx1DI "TARGET_MIN_VLEN > 32") (VNx2DI "TARGET_MIN_VLEN > 32")
-  (VNx4DI "TARGET_MIN_VLEN > 32") (VNx8DI "TARGET_MIN_VLEN > 32")
+  (VNx1DI "TARGET_VECTOR_ELEN_64") (VNx2DI "TARGET_VECTOR_ELEN_64")
+  (VNx4DI "TARGET_VECTOR_ELEN_64") (VNx8DI "TARGET_VECTOR_ELEN_64")
 ])
 
 (define_mode_attr VLMULX2 [
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-1.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-1.c
new file mode 100644
index 00000000000..a13cc12c543
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-1.c
@@ -0,0 +1,13 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x_zvl64b -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {__rvv_int64m1_t t;} /* { dg-error {unknown type name '__rvv_int64m1_t'} } */
+void foo1 () {__rvv_uint64m1_t t;} /* { dg-error {unknown type name '__rvv_uint64m1_t'} } */
+void foo2 () {__rvv_int64m2_t t;} /* { dg-error {unknown type name '__rvv_int64m2_t'} } */
+void foo3 () {__rvv_uint64m2_t t;} /* { dg-error {unknown type name '__rvv_uint64m2_t'} } */
+void foo4 () {__rvv_int64m4_t t;} /* { dg-error {unknown type name '__rvv_int64m4_t'} } */
+void foo5 () {__rvv_uint64m4_t t;} /* { dg-error {unknown type name '__rvv_uint64m4_t'} } */
+void foo6 () {__rvv_int64m8_t t;} /* { dg-error {unknown type name '__rvv_int64m8_t'} } */
+void foo7 () {__rvv_uint64m8_t t;} /* { dg-error {unknown type name '__rvv_uint64m8_t'} } */
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-2.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-2.c
new file mode 100644
index 00000000000..dd6bcb0f548
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-2.c
@@ -0,0 +1,13 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x_zvl64b -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {vint64m1_t t;} /* { dg-error {unknown type name 'vint64m1_t'} } */
+void foo1 () {vuint64m1_t t;} /* { dg-error {unknown type name 'vuint64m1_t'} } */
+void foo2 () {vint64m2_t t;} /* { dg-error {unknown type name 'vint64m2_t'} } */
+void foo3 () {vuint64m2_t t;} /* { dg-error {unknown type name 'vuint64m2_t'} } */
+void foo4 () {vint64m4_t t;} /* { dg-error {unknown type name 'vint64m4_t'} } */
+void foo5 () {vuint64m4_t t;} /* { dg-error {unknown type name 'vuint64m4_t'} } */
+void foo6 () {vint64m8_t t;} /* { dg-error {unknown type name 'vint64m8_t'} } */
+void foo7 () {vuint64m8_t t;} /* { dg-error {unknown type name 'vuint64m8_t'} } */
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-3.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-3.c
new file mode 100644
index 00000000000..01e86223da9
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-3.c
@@ -0,0 +1,20 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {__rvv_bool64_t t;} /* { dg-error {unknown type name '__rvv_bool64_t'} } */
+void foo1 () {__rvv_int8mf8_t t;} /* { dg-error {unknown type name '__rvv_int8mf8_t'} } */
+void foo2 () {__rvv_uint8mf8_t t;} /* { dg-error {unknown type name '__rvv_uint8mf8_t'} } */
+void foo3 () {__rvv_int16mf4_t t;} /* { dg-error {unknown type name '__rvv_int16mf4_t'} } */
+void foo4 () {__rvv_uint16mf4_t t;} /* { dg-error {unknown type name '__rvv_uint16mf4_t'} } */
+void foo5 () {__rvv_int32mf2_t t;} /* { dg-error {unknown type name '__rvv_int32mf2_t'} } */
+void foo6 () {__rvv_uint32mf2_t t;} /* { dg-error {unknown type name '__rvv_uint32mf2_t'} } */
+void foo7 () {__rvv_int64m1_t t;} /* { dg-error {unknown type name '__rvv_int64m1_t'} } */
+void foo8 () {__rvv_uint64m1_t t;} /* { dg-error {unknown type name '__rvv_uint64m1_t'} } */
+void foo9 () {__rvv_int64m2_t t;} /* { dg-error {unknown type name '__rvv_int64m2_t'} } */
+void foo10 () {__rvv_uint64m2_t t;} /* { dg-error {unknown type name '__rvv_uint64m2_t'} } */
+void foo11 () {__rvv_int64m4_t t;} /* { dg-error {unknown type name '__rvv_int64m4_t'} } */
+void foo12 () {__rvv_uint64m4_t t;} /* { dg-error {unknown type name '__rvv_uint64m4_t'} } */
+void foo13 () {__rvv_int64m8_t t;} /* { dg-error {unknown type name '__rvv_int64m8_t'} } */
+void foo14 () {__rvv_uint64m8_t t;} /* { dg-error {unknown type name '__rvv_uint64m8_t'} } */
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-4.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-4.c
new file mode 100644
index 00000000000..62385b8dcd1
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-4.c
@@ -0,0 +1,20 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {vbool64_t t;} /* { dg-error {unknown type name 'vbool64_t'} } */
+void foo1 () {vint8mf8_t t;} /* { dg-error {unknown type name 'vint8mf8_t'} } */
+void foo2 () {vuint8mf8_t t;} /* { dg-error {unknown type name 'vuint8mf8_t'} } */
+void foo3 () {vint16mf4_t t;} /* { dg-error {unknown type name 'vint16mf4_t'} } */
+void foo4 () {vuint16mf4_t t;} /* { dg-error {unknown type name 'vuint16mf4_t'} } */
+void foo5 () {vint32mf2_t t;} /* { dg-error {unknown type name 'vint32mf2_t'} } */
+void foo6 () {vuint32mf2_t t;} /* { dg-error {unknown type name 'vuint32mf2_t'} } */
+void foo7 () {vint64m1_t t;} /* { dg-error {unknown type name 'vint64m1_t'} } */
+void foo8 () {vuint64m1_t t;} /* { dg-error {unknown type name 'vuint64m1_t'} } */
+void foo9 () {vint64m2_t t;} /* { dg-error {unknown type name 'vint64m2_t'} } */
+void foo10 () {vuint64m2_t t;} /* { dg-error {unknown type name 'vuint64m2_t'} } */
+void foo11 () {vint64m4_t t;} /* { dg-error {unknown type name 'vint64m4_t'} } */
+void foo12 () {vuint64m4_t t;} /* { dg-error {unknown type name 'vuint64m4_t'} } */
+void foo13 () {vint64m8_t t;} /* { dg-error {unknown type name 'vint64m8_t'} } */
+void foo14 () {vuint64m8_t t;} /* { dg-error {unknown type name 'vuint64m8_t'} } */
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-5.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-5.c
new file mode 100644
index 00000000000..26a0a742965
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-5.c
@@ -0,0 +1,20 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x_zvl64b -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {__rvv_bool64_t t;}
+void foo1 () {__rvv_int8mf8_t t;}
+void foo2 () {__rvv_uint8mf8_t t;}
+void foo3 () {__rvv_int16mf4_t t;}
+void foo4 () {__rvv_uint16mf4_t t;}
+void foo5 () {__rvv_int32mf2_t t;}
+void foo6 () {__rvv_uint32mf2_t t;}
+void foo7 () {__rvv_int64m1_t t;} /* { dg-error {unknown type name '__rvv_int64m1_t'} } */
+void foo8 () {__rvv_uint64m1_t t;} /* { dg-error {unknown type name '__rvv_uint64m1_t'} } */
+void foo9 () {__rvv_int64m2_t t;} /* { dg-error {unknown type name '__rvv_int64m2_t'} } */
+void foo10 () {__rvv_uint64m2_t t;} /* { dg-error {unknown type name '__rvv_uint64m2_t'} } */
+void foo11 () {__rvv_int64m4_t t;} /* { dg-error {unknown type name '__rvv_int64m4_t'} } */
+void foo12 () {__rvv_uint64m4_t t;} /* { dg-error {unknown type name '__rvv_uint64m4_t'} } */
+void foo13 () {__rvv_int64m8_t t;} /* { dg-error {unknown type name '__rvv_int64m8_t'} } */
+void foo14 () {__rvv_uint64m8_t t;} /* { dg-error {unknown type name '__rvv_uint64m8_t'} } */
diff --git a/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-6.c b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-6.c
new file mode 100644
index 00000000000..eb66f3ea2bd
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/rvv/base/pr109479-6.c
@@ -0,0 +1,20 @@
+/* { dg-do compile } */
+/* { dg-options "-O3 -march=rv32gc_zve32x_zvl64b -mabi=ilp32d" } */
+
+#include "riscv_vector.h"
+
+void foo0 () {vbool64_t t;}
+void foo1 () {vint8mf8_t t;}
+void foo2 () {vuint8mf8_t t;}
+void foo3 () {vint16mf4_t t;}
+void foo4 () {vuint16mf4_t t;}
+void foo5 () {vint32mf2_t t;}
+void foo6 () {vuint32mf2_t t;}
+void foo7 () {vint64m1_t t;} /* { dg-error {unknown type name 'vint64m1_t'} } */
+void foo8 () {vuint64m1_t t;} /* { dg-error {unknown type name 'vuint64m1_t'} } */
+void foo9 () {vint64m2_t t;} /* { dg-error {unknown type name 'vint64m2_t'} } */
+void foo10 () {vuint64m2_t t;} /* { dg-error {unknown type name 'vuint64m2_t'} } */
+void foo11 () {vint64m4_t t;} /* { dg-error {unknown type name 'vint64m4_t'} } */
+void foo12 () {vuint64m4_t t;} /* { dg-error {unknown type name 'vuint64m4_t'} } */
+void foo13 () {vint64m8_t t;} /* { dg-error {unknown type name 'vint64m8_t'} } */
+void foo14 () {vuint64m8_t t;} /* { dg-error {unknown type name 'vuint64m8_t'} } */

^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2023-04-12 15:08 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-04-12 15:08 [gcc r13-7156] RISC-V: Fix supporting data type according to RVV ISA. [PR109479] Kito Cheng

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).