diff --git a/gcc/common/config/riscv/riscv-common.c b/gcc/common/config/riscv/riscv-common.c
index 0ec067769847..42767165d25f 100644
--- a/gcc/common/config/riscv/riscv-common.c
+++ b/gcc/common/config/riscv/riscv-common.c
@@ -68,6 +68,9 @@ riscv_implied_info_t riscv_implied_info[] =
{"zks", "zksh"},
{"zks", "zkg"},
{"zks", "zkb"},
+ {"p", "zbpbo"},
+ {"p", "zpn"},
+ {"p", "zpsf"},
{NULL, NULL}
};
@@ -833,6 +836,9 @@ static const riscv_ext_flag_table_t riscv_ext_flag_table[] =
{"zksed", &gcc_options::x_riscv_crypto_subext, MASK_ZKSED},
{"zksh", &gcc_options::x_riscv_crypto_subext, MASK_ZKSH},
+ {"zbpbo", &gcc_options::x_riscv_rvp_subext, MASK_ZBPBO},
+ {"zpn", &gcc_options::x_riscv_rvp_subext, MASK_ZPN},
+ {"zpsf", &gcc_options::x_riscv_rvp_subext, MASK_ZPSF},
{NULL, NULL, 0}
};
diff --git a/gcc/config.gcc b/gcc/config.gcc
index 8d0e4de710ff..45b53f0a06d0 100644
--- a/gcc/config.gcc
+++ b/gcc/config.gcc
@@ -525,6 +525,7 @@ pru-*-*)
;;
riscv*)
cpu_type=riscv
+ extra_headers="rvp_intrinsic.h"
extra_objs="riscv-builtins.o riscv-c.o riscv-sr.o riscv-shorten-memrefs.o"
d_target_objs="riscv-d.o"
;;
diff --git a/gcc/config/riscv/constraints.md b/gcc/config/riscv/constraints.md
index ef9c81e424ec..481786d50b1a 100644
--- a/gcc/config/riscv/constraints.md
+++ b/gcc/config/riscv/constraints.md
@@ -81,3 +81,73 @@
A constant @code{move_operand}."
(and (match_operand 0 "move_operand")
(match_test "CONSTANT_P (op)")))
+
+(define_constraint "u02"
+ "Unsigned immediate 2-bit value"
+ (and (match_code "const_int")
+ (match_test "ival < (1 << 2) && ival >= 0")))
+
+(define_constraint "u03"
+ "Unsigned immediate 3-bit value"
+ (and (match_code "const_int")
+ (match_test "ival < (1 << 3) && ival >= 0")))
+
+(define_constraint "u04"
+ "Unsigned immediate 4-bit value"
+ (and (match_code "const_int")
+ (match_test "ival < (1 << 4) && ival >= 0")))
+
+(define_constraint "u05"
+ "Unsigned immediate 5-bit value"
+ (and (match_code "const_int")
+ (match_test "ival < (1 << 5) && ival >= 0")))
+
+(define_constraint "u06"
+ "Unsigned immediate 6-bit value"
+ (and (match_code "const_int")
+ (match_test "ival < (1 << 6) && ival >= 0")))
+
+(define_constraint "C00"
+ "Constant value 0"
+ (and (match_code "const_int")
+ (match_test "ival == 0")))
+
+(define_constraint "C01"
+ "Constant value 1"
+ (and (match_code "const_int")
+ (match_test "ival == 1")))
+
+(define_constraint "C02"
+ "Constant value 2"
+ (and (match_code "const_int")
+ (match_test "ival == 2")))
+
+(define_constraint "C03"
+ "Constant value 3"
+ (and (match_code "const_int")
+ (match_test "ival == 3")))
+
+(define_constraint "C04"
+ "Constant value 4"
+ (and (match_code "const_int")
+ (match_test "ival == 4")))
+
+(define_constraint "C08"
+ "Constant value 8"
+ (and (match_code "const_int")
+ (match_test "ival == 8")))
+
+(define_constraint "D07"
+ "A constraint that matches the integers 2^(0...7)."
+ (and (match_code "const_int")
+ (match_test "(unsigned) exact_log2 (ival) <= 7")))
+
+(define_constraint "C15"
+ "Constant value 15"
+ (and (match_code "const_int")
+ (match_test "ival == 15")))
+
+(define_constraint "C16"
+ "Constant value 16"
+ (and (match_code "const_int")
+ (match_test "ival == 16")))
\ No newline at end of file
diff --git a/gcc/config/riscv/predicates.md b/gcc/config/riscv/predicates.md
index f764fe7ba016..6684ff9d4856 100644
--- a/gcc/config/riscv/predicates.md
+++ b/gcc/config/riscv/predicates.md
@@ -212,3 +212,84 @@
{
return riscv_gpr_save_operation_p (op);
})
+
+(define_predicate "imm2u_operand"
+ (and (match_operand 0 "const_int_operand")
+ (match_test "satisfies_constraint_u02 (op)")))
+
+(define_predicate "imm3u_operand"
+ (and (match_operand 0 "const_int_operand")
+ (match_test "satisfies_constraint_u03 (op)")))
+
+(define_predicate "imm4u_operand"
+ (and (match_operand 0 "const_int_operand")
+ (match_test "satisfies_constraint_u04 (op)")))
+
+(define_predicate "imm5u_operand"
+ (and (match_operand 0 "const_int_operand")
+ (match_test "satisfies_constraint_u05 (op)")))
+
+(define_predicate "imm6u_operand"
+ (and (match_operand 0 "const_int_operand")
+ (match_test "satisfies_constraint_u06 (op)")))
+
+(define_predicate "rimm3u_operand"
+ (ior (match_operand 0 "register_operand")
+ (match_operand 0 "imm3u_operand")))
+
+(define_predicate "rimm4u_operand"
+ (ior (match_operand 0 "register_operand")
+ (match_operand 0 "imm4u_operand")))
+
+(define_predicate "rimm5u_operand"
+ (ior (match_operand 0 "register_operand")
+ (match_operand 0 "imm5u_operand")))
+
+(define_predicate "rimm6u_operand"
+ (ior (match_operand 0 "register_operand")
+ (match_operand 0 "imm6u_operand")))
+
+(define_predicate "const_insb64_operand"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
+
+(define_predicate "imm_1_2_4_8_operand"
+ (and (match_operand 0 "const_int_operand")
+ (ior (ior (match_test "satisfies_constraint_C01 (op)")
+ (match_test "satisfies_constraint_C02 (op)"))
+ (ior (match_test "satisfies_constraint_C04 (op)")
+ (match_test "satisfies_constraint_C08 (op)")))))
+
+(define_predicate "pwr_7_operand"
+ (and (match_code "const_int")
+ (match_test "INTVAL (op) != 0
+ && (unsigned) exact_log2 (INTVAL (op)) <= 7")))
+
+(define_predicate "imm_0_1_operand"
+ (and (match_operand 0 "const_int_operand")
+ (ior (match_test "satisfies_constraint_C00 (op)")
+ (match_test "satisfies_constraint_C01 (op)"))))
+
+(define_predicate "imm_1_2_operand"
+ (and (match_operand 0 "const_int_operand")
+ (ior (match_test "satisfies_constraint_C01 (op)")
+ (match_test "satisfies_constraint_C02 (op)"))))
+
+(define_predicate "imm_2_3_operand"
+ (and (match_operand 0 "const_int_operand")
+ (ior (match_test "satisfies_constraint_C02 (op)")
+ (match_test "satisfies_constraint_C03 (op)"))))
+
+(define_predicate "imm_15_16_operand"
+ (and (match_operand 0 "const_int_operand")
+ (ior (match_test "satisfies_constraint_C15 (op)")
+ (match_test "satisfies_constraint_C16 (op)"))))
+
+(define_predicate "rev_rimm_operand"
+ (ior (match_operand 0 "const_arith_operand")
+ (match_test "INTVAL (op) == (BITS_PER_WORD - 1)")))
+
+(define_predicate "fsr_shamt_imm"
+ (ior (match_operand 0 "register_operand")
+ (and (match_operand 0 "const_arith_operand")
+ (match_test "IN_RANGE (INTVAL (op), 1, 31)"))))
diff --git a/gcc/config/riscv/riscv-builtins-rvp.def b/gcc/config/riscv/riscv-builtins-rvp.def
new file mode 100644
index 000000000000..59e14d707a81
--- /dev/null
+++ b/gcc/config/riscv/riscv-builtins-rvp.def
@@ -0,0 +1,967 @@
+/* Builtin definitions for P extension
+ Copyright (C) 2021 Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 3, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3. If not see
+. */
+
+/* zpn subset */
+DIRECT_BUILTIN_NO_PREFIX (addv4qi3, add8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv8qi3, add8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv4qi3, v_uadd8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv4qi3, v_sadd8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv8qi3, v64_uadd8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv8qi3, v64_sadd8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv2hi3, add16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv4hi3, add16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv2hi3, v_uadd16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv2hi3, v_sadd16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (addv4hi3, v64_uadd16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv4hi3, v64_sadd16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ave, ave, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (avedi, ave, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (bitrevsi, bitrev, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (bitrevdi, bitrev, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_NO_TARGET_BUILTIN_NO_PREFIX (clrovsi, clrov, RISCV_VOID_FTYPE_VOID, zpn32),
+DIRECT_NO_TARGET_BUILTIN_NO_PREFIX (clrovdi, clrov, RISCV_VOID_FTYPE_VOID, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv4qi2, clrs8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv4qi2, v_clrs8, RISCV_UV4QI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv8qi2, clrs8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv8qi2, v64_clrs8, RISCV_UV8QI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv2hi2, clrs16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv4hi2, clrs16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv2hi2, v_clrs16, RISCV_UV2HI_FTYPE_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv4hi2, v64_clrs16, RISCV_UV4HI_FTYPE_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbsi2, clrs32, RISCV_UIXLEN_FTYPE_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv2si2, clrs32, RISCV_UIXLEN_FTYPE_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clrsbv2si2, v_clrs32, RISCV_UV2SI_FTYPE_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzv4qi2, clz8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clzv8qi2, clz8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzv4qi2, v_clz8, RISCV_UV4QI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clzv8qi2, v64_clz8, RISCV_UV8QI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzv2hi2, clz16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clzv4hi2, clz16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzv2hi2, v_clz16, RISCV_UV2HI_FTYPE_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clzv4hi2, v64_clz16, RISCV_UV4HI_FTYPE_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzsi2, clz32, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (clzv2si2, clz32, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (clzv2si2, v_clz32, RISCV_UV2SI_FTYPE_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4qi, cmpeq8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv8qi, cmpeq8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4qi, v_scmpeq8, RISCV_UV4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4qi, v_ucmpeq8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv8qi, v64_scmpeq8, RISCV_UV8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv8qi, v64_ucmpeq8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv2hi, cmpeq16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4hi, cmpeq16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv2hi, v_scmpeq16, RISCV_UV2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv2hi, v_ucmpeq16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4hi, v64_scmpeq16, RISCV_UV4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cmpeqv4hi, v64_ucmpeq16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crasv2hi, cras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (crasv2hi, v_ucras16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (crasv2hi, v_scras16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (cras16_64, cras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cras16_64, v64_ucras16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (cras16_64, v64_scras16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsav2hi, crsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (crsav2hi, v_ucrsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (crsav2hi, v_scrsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (crsa16_64, crsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsa16_64, v64_ucrsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsa16_64, v64_scrsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (insb, insb, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (insb64, insb, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv4qi2, kabs8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kabsv4qi2, v_kabs8, RISCV_V4QI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kabsv8qi2, kabs8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv8qi2, v64_kabs8, RISCV_V8QI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv2hi2, kabs16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kabsv2hi2, v_kabs16, RISCV_V2HI_FTYPE_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kabsv4hi2, kabs16, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv4hi2, v64_kabs16, RISCV_V4HI_FTYPE_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsw, kabsw, RISCV_SI_FTYPE_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kaddv4qi3, kadd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kaddv4qi3, v_kadd8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kaddv8qi3, kadd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kaddv8qi3, v64_kadd8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kaddv2hi3, kadd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kaddv2hi3, v_kadd16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kaddv4hi3, kadd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kaddv4hi3, v64_kadd16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ssaddhi3, kaddh, RISCV_HI_FTYPE_HI_HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kcrasv2hi, kcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kcrasv2hi, v_kcras16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kcras16_64, kcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcras16_64, v64_kcras16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrsav2hi, kcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kcrsav2hi, v_kcrsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kcrsa16_64, kcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrsa16_64, v64_kcrsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmbb, kdmbb, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmbt, kdmbt, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmtt, kdmtt, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmbb, v_kdmbb, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmbt, v_kdmbt, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmtt, v_kdmtt, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmabb, kdmabb, RISCV_SI_FTYPE_SI_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmabt, kdmabt, RISCV_SI_FTYPE_SI_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmatt, kdmatt, RISCV_SI_FTYPE_SI_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmabb, v_kdmabb, RISCV_SI_FTYPE_SI_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmabt, v_kdmabt, RISCV_SI_FTYPE_SI_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kdmatt, v_kdmatt, RISCV_SI_FTYPE_SI_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khm8v4qi, khm8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khmx8v4qi, khmx8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khm8v4qi, v_khm8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khmx8v4qi, v_khmx8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khm8v8qi, khm8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmx8v8qi, khmx8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khm8v8qi, v64_khm8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmx8v8qi, v64_khmx8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khm16v2hi, khm16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khmx16v2hi, khmx16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khm16v2hi, v_khm16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khmx16v2hi, v_khmx16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (khm16v4hi, khm16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmx16v4hi, khmx16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khm16v4hi, v64_khm16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmx16v4hi, v64_khmx16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmbbsi, khmbb, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khmbtsi, khmbt, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khmttsi, khmtt, RISCV_SI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khmbbsi, v_khmbb, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khmbtsi, v_khmbt, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (khmttsi, v_khmtt, RISCV_SI_FTYPE_V2HI_V2HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kmabb, kmabb, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmabt, kmabt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmatt, kmatt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmabb, v_kmabb, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmabt, v_kmabt, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmatt, v_kmatt, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmabb64, kmabb, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabt64, kmabt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmatt64, kmatt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabb64, v64_kmabb, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabt64, v64_kmabt, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmatt64, v64_kmatt, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmada, kmada, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda, kmaxda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmada, v_kmada, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda, v_kmaxda, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmada64, kmada, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda64, kmaxda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmada64, v64_kmada, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda64, v64_kmaxda, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmads, kmads, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs, kmadrs, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds, kmaxds, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmads, v_kmads, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs, v_kmadrs, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds, v_kmaxds, RISCV_IXLEN_FTYPE_IXLEN_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmads64, kmads, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs64, kmadrs, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds64, kmaxds, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmads64, v64_kmads, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs64, v64_kmadrs, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds64, v64_kmaxds, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmda, kmda, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmxda, kmxda, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmda, v_kmda, RISCV_IXLEN_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmxda, v_kmxda, RISCV_IXLEN_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmda64, kmda, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmxda64, kmxda, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmda64, v64_kmda, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmxda64, v64_kmxda, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmac, kmmac, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmac_round, kmmac_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmac_64, kmmac, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmac64_round, kmmac_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmac_64, v_kmmac, RISCV_V2SI_FTYPE_V2SI_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmac64_round, v_kmmac_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb, kmmawb, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb_round, kmmawb_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb, v_kmmawb, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb_round, v_kmmawb_u, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb64, kmmawb, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb64_round, kmmawb_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb64, v64_kmmawb, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb64_round, v64_kmmawb_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2, kmmawb2, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_round, kmmawb2_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2, v_kmmawb2, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_round, v_kmmawb2_u, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_64, kmmawb2, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_64_round, kmmawb2_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_64, v64_kmmawb2, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawb2_64_round, v64_kmmawb2_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt, kmmawt, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt_round, kmmawt_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2, kmmawt2, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_round, kmmawt2_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt, v_kmmawt, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt_round, v_kmmawt_u, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2, v_kmmawt2, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_round, v_kmmawt2_u, RISCV_SI_FTYPE_SI_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt64, kmmawt, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt64_round, kmmawt_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_64, kmmawt2, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_64_round, kmmawt2_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt64, v64_kmmawt, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt64_round, v64_kmmawt_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_64, v64_kmmawt2, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmawt2_64_round, v64_kmmawt2_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb, kmmsb, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb_round, kmmsb_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb_64, kmmsb, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb64_round, kmmsb_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb_64, v_kmmsb, RISCV_V2SI_FTYPE_V2SI_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmsb64_round, v_kmmsb_u, RISCV_V2SI_FTYPE_V2SI_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb2, kmmwb2, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb2_round, kmmwb2_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb2, v_kmmwb2, RISCV_SI_FTYPE_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb2_round, v_kmmwb2_u, RISCV_SI_FTYPE_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb64, kmmwb2, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb64_round, kmmwb2_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb64, v64_kmmwb2, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwb64_round, v64_kmmwb2_u, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt2, kmmwt2, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt2_round, kmmwt2_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt2, v_kmmwt2, RISCV_SI_FTYPE_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt2_round, v_kmmwt2_u, RISCV_SI_FTYPE_SI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt64, kmmwt2, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt64_round, kmmwt2_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt64, v64_kmmwt2, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmmwt64_round, v64_kmmwt2_u, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsda, kmsda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda, kmsxda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmsda, v_kmsda, RISCV_SI_FTYPE_SI_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda, v_kmsxda, RISCV_SI_FTYPE_SI_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kmsda64, kmsda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda64, kmsxda, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsda64, v64_kmsda, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda64, v64_kmsxda, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksll, ksllw, RISCV_SI_FTYPE_SI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kslli8v4qisi, ksll8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslli8v4qisi, v_ksll8, RISCV_V4QI_FTYPE_V4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslli8v8qidi, ksll8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslli8v8qidi, v64_ksll8, RISCV_V8QI_FTYPE_V8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslli16v2hi, ksll16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslli16v2hi, v_ksll16, RISCV_V2HI_FTYPE_V2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslli16v4hi, ksll16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslli16v4hi, v64_ksll16, RISCV_V4HI_FTYPE_V4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4qi, kslra8, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4qi_round, kslra8_u, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4qi, v_kslra8, RISCV_V4QI_FTYPE_V4QI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4qi_round, v_kslra8_u, RISCV_V4QI_FTYPE_V4QI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav8qi, kslra8, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav8qi_round, kslra8_u, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav8qi, v64_kslra8, RISCV_V8QI_FTYPE_V8QI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav8qi_round, v64_kslra8_u, RISCV_V8QI_FTYPE_V8QI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2hi, kslra16, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2hi_round, kslra16_u, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2hi, v_kslra16, RISCV_V2HI_FTYPE_V2HI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2hi_round, v_kslra16_u, RISCV_V2HI_FTYPE_V2HI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4hi, kslra16, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4hi_round, kslra16_u, RISCV_UIXLEN_FTYPE_UIXLEN_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4hi, v64_kslra16, RISCV_V4HI_FTYPE_V4HI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav4hi_round, v64_kslra16_u, RISCV_V4HI_FTYPE_V4HI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslraw, kslraw, RISCV_IXLEN_FTYPE_SI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslrawu, kslraw_u, RISCV_IXLEN_FTYPE_SI_SI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kslraw64, kslraw, RISCV_IXLEN_FTYPE_SI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrawu64, kslraw_u, RISCV_IXLEN_FTYPE_SI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstasv2hi, kstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kstasv2hi, v_kstas16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kstas16_64, kstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstas16_64, v64_kstas16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstsav2hi, kstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kstsav2hi, v_kstsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kstsa16_64, kstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstsa16_64, v64_kstsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv4qi3, ksub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ksubv4qi3, v_ksub8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ksubv8qi3, ksub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv8qi3, v64_ksub8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv2hi3, ksub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ksubv2hi3, v_ksub16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ksubv4hi3, ksub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv4hi3, v64_ksub16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sssubhi3, ksubh, RISCV_HI_FTYPE_HI_HI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (sssubsi3, ksubw, RISCV_SI_FTYPE_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul, kwmmul, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul_round, kwmmul_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul_64, kwmmul, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul64_round, kwmmul_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul_64, v_kwmmul, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kwmmul64_round, v_kwmmul_u, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (maddr32, maddr32, RISCV_SI_FTYPE_SI_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (msubr32, msubr32, RISCV_SI_FTYPE_SI_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (pbsadsi, pbsad, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pbsadasi, pbsada, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pbsadsi, v_pbsad, RISCV_USI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pbsadasi, v_pbsada, RISCV_USI_FTYPE_USI_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pbsaddi, pbsad, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pbsadadi, pbsada, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pbsaddi, v64_pbsad, RISCV_UIXLEN_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pbsadadi, v64_pbsada, RISCV_UIXLEN_FTYPE_UIXLEN_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbbv2hi, pkbb16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkbtv2hi, pkbt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pktbv2hi, pktb16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkttv2hi, pktt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkbbv2hi, v_pkbb16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkbtv2hi, v_pkbt16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pktbv2hi, v_pktb16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkttv2hi, v_pktt16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (pkbb64, pkbb16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbt64, pkbt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktb64, pktb16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktt64, pktt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbb64, v64_pkbb16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbt64, v64_pkbt16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktb64, v64_pktb16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktt64, v64_pktt16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv4qi3, radd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (raddv4qi3, v_radd8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (raddv8qi3, radd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv8qi3, v64_radd8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv2hi3, radd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (raddv2hi3, v_radd16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (raddv4hi3, radd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv4hi3, v64_radd16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddsi3, raddw, RISCV_SI_FTYPE_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (rcrasv2hi, rcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rcrasv2hi, v_rcras16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rcras16_64, rcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcras16_64, v64_rcras16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrsav2hi, rcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rcrsav2hi, v_rcrsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rcrsa16_64, rcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrsa16_64, v64_rcrsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rdovsi, rdov, RISCV_UIXLEN_FTYPE_VOID, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rdovdi, rdov, RISCV_UIXLEN_FTYPE_VOID, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstasv2hi, rstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rstasv2hi, v_rstas16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rstas16_64, rstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstas16_64, v64_rstas16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstsav2hi, rstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rstsav2hi, v_rstsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rstsa16_64, rstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstsa16_64, v64_rstsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv4qi3, rsub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rsubv4qi3, v_rsub8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rsubv8qi3, rsub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv8qi3, v64_rsub8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv2hi3, rsub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rsubv4hi3, rsub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv2hi3, v_rsub16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (rsubv4hi3, v64_rsub16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubsi3, rsubw, RISCV_SI_FTYPE_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (sclip8v4qi, sclip8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sclip8v4qi, v_sclip8, RISCV_V4QI_FTYPE_V4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sclip8v8qi, sclip8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sclip8v8qi, v64_sclip8, RISCV_V8QI_FTYPE_V8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sclip16v2hi, sclip16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sclip16v2hi, v_sclip16, RISCV_V2HI_FTYPE_V2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sclip16v4hi, sclip16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sclip16v4hi, v64_sclip16, RISCV_V4HI_FTYPE_V4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sclip32sisi, sclip32, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sclip32v2sidi, sclip32, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sclip32v2sidi, v_sclip32, RISCV_V2SI_FTYPE_V2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmplev4qi, scmple8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmplev4qi, v_scmple8, RISCV_UV4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmplev8qi, scmple8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmplev8qi, v64_scmple8, RISCV_UV8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmplev2hi, scmple16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmplev2hi, v_scmple16, RISCV_UV2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmplev4hi, scmple16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmplev4hi, v64_scmple16, RISCV_UV4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv4qi, scmplt8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv4qi, v_scmplt8, RISCV_UV4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv8qi, scmplt8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv8qi, v64_scmplt8, RISCV_UV8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv2hi, scmplt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv2hi, v_scmplt16, RISCV_UV2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv4hi, scmplt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (scmpltv4hi, v64_scmplt16, RISCV_UV4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv4qi3, sll8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashlv4qi3, v_sll8, RISCV_UV4QI_FTYPE_UV4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashlv8qi3, sll8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv8qi3, v64_sll8, RISCV_UV8QI_FTYPE_UV8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv2hi3, sll16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashlv2hi3, v_sll16, RISCV_UV2HI_FTYPE_UV2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashlv4hi3, sll16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv4hi3, v64_sll16, RISCV_UV4HI_FTYPE_UV4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaqa, smaqa, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaqa, v_smaqa, RISCV_SI_FTYPE_SI_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaqa64, smaqa, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaqa64, v64_smaqa, RISCV_V2SI_FTYPE_V2SI_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sumaqa, smaqa_su, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sumaqa, v_smaqa_su, RISCV_SI_FTYPE_SI_V4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sumaqa64, smaqa_su, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sumaqa64, v64_smaqa_su, RISCV_V2SI_FTYPE_V2SI_V8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv4qi3, smax8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaxv4qi3, v_smax8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaxv8qi3, smax8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv8qi3, v64_smax8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv2hi3, smax16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaxv2hi3, v_smax16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smaxv4hi3, smax16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv4hi3, v64_smax16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbb, smbb16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smbt, smbt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smtt, smtt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smbb, v_smbb16, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smbt, v_smbt16, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smtt, v_smtt16, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smbb64, smbb16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbt64, smbt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smtt64, smtt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbb64, v64_smbb16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbt64, v64_smbt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smtt64, v64_smtt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smds, smds, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smds, v_smds, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smds64, smds, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smds64, v64_smds, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smdrs, smdrs, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smdrs, v_smdrs, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smdrs64, smdrs, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smdrs64, v64_smdrs, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smxdsv, smxds, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smxdsv, v_smxds, RISCV_SI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smxds64, smxds, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smxds64, v64_smxds, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv4qi3, smin8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sminv4qi3, v_smin8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sminv8qi3, smin8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv8qi3, v64_smin8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv2hi3, smin16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sminv2hi3, v_smin16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sminv4hi3, smin16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv4hi3, v64_smin16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smulsi3_highpart, smmul, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smulv2si3_highpart, smmul, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smulv2si3_highpart, v_smmul, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmul_round, smmul_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmulv2si3_round, smmul_u, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmulv2si3_round, v_smmul_u, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwb, smmwb, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwb_round, smmwb_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwb, v_smmwb, RISCV_IXLEN_FTYPE_IXLEN_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwb_round, v_smmwb_u, RISCV_IXLEN_FTYPE_IXLEN_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwb64, smmwb, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwb64_round, smmwb_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwb64, v64_smmwb, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwb64_round, v64_smmwb_u, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwt, smmwt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwt_round, smmwt_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwt, v_smmwt, RISCV_IXLEN_FTYPE_IXLEN_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwt_round, v_smmwt_u, RISCV_IXLEN_FTYPE_IXLEN_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (smmwt64, smmwt, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwt64_round, smmwt_u, RISCV_IXLEN_FTYPE_IXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwt64, v64_smmwt, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smmwt64_round, v64_smmwt_u, RISCV_V2SI_FTYPE_V2SI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sraiu, sra_u, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sraiu64, sra_u, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv4qi3, sra8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sra8_roundv4qi, sra8_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashrv4qi3, v_sra8, RISCV_V4QI_FTYPE_V4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sra8_roundv4qi, v_sra8_u, RISCV_V4QI_FTYPE_V4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashrv8qi3, sra8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra8_roundv8qi, sra8_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv8qi3, v64_sra8, RISCV_V8QI_FTYPE_V8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra8_roundv8qi, v64_sra8_u, RISCV_V8QI_FTYPE_V8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv2hi3, sra16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sra16_roundv2hi, sra16_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashrv2hi3, v_sra16, RISCV_V2HI_FTYPE_V2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sra16_roundv2hi, v_sra16_u, RISCV_V2HI_FTYPE_V2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ashrv4hi3, sra16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra16_roundv4hi, sra16_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv4hi3, v64_sra16, RISCV_V4HI_FTYPE_V4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra16_roundv4hi, v64_sra16_u, RISCV_V4HI_FTYPE_V4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv4qi3, srl8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (srl8_roundv4qi, srl8_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (lshrv4qi3, v_srl8, RISCV_UV4QI_FTYPE_UV4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (srl8_roundv4qi, v_srl8_u, RISCV_UV4QI_FTYPE_UV4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (lshrv8qi3, srl8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl8_roundv8qi, srl8_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv8qi3, v64_srl8, RISCV_UV8QI_FTYPE_UV8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl8_roundv8qi, v64_srl8_u, RISCV_UV8QI_FTYPE_UV8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv2hi3, srl16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (srl16_roundv2hi, srl16_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (lshrv2hi3, v_srl16, RISCV_UV2HI_FTYPE_UV2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (srl16_roundv2hi, v_srl16_u, RISCV_UV2HI_FTYPE_UV2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (lshrv4hi3, srl16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl16_roundv4hi, srl16_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv4hi3, v64_srl16, RISCV_UV4HI_FTYPE_UV4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl16_roundv4hi, v64_srl16_u, RISCV_UV4HI_FTYPE_UV4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stasv2hi, stas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stasv2hi, v_ustas16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stasv2hi, v_sstas16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stas16_64, stas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stas16_64, v64_ustas16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stas16_64, v64_sstas16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsav2hi, stsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stsav2hi, v_ustsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stsav2hi, v_sstsa16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (stsa16_64, stsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsa16_64, v64_ustsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsa16_64, v64_sstsa16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv4qi3, sub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv4qi3, v_usub8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv4qi3, v_ssub8, RISCV_V4QI_FTYPE_V4QI_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv8qi3, sub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv8qi3, v64_usub8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv8qi3, v64_ssub8, RISCV_V8QI_FTYPE_V8QI_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv2hi3, sub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv2hi3, v_usub16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv2hi3, v_ssub16, RISCV_V2HI_FTYPE_V2HI_V2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (subv4hi3, sub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv4hi3, v64_usub16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv4hi3, v64_ssub16, RISCV_V4HI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd810, sunpkd810, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd820, sunpkd820, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd830, sunpkd830, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd831, sunpkd831, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd832, sunpkd832, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd810, v_sunpkd810, RISCV_V2HI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd820, v_sunpkd820, RISCV_V2HI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd830, v_sunpkd830, RISCV_V2HI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd831, v_sunpkd831, RISCV_V2HI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd832, v_sunpkd832, RISCV_V2HI_FTYPE_V4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd810_64, sunpkd810, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd820_64, sunpkd820, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd830_64, sunpkd830, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd831_64, sunpkd831, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd832_64, sunpkd832, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd810_64, v64_sunpkd810, RISCV_V4HI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd820_64, v64_sunpkd820, RISCV_V4HI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd830_64, v64_sunpkd830, RISCV_V4HI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd831_64, v64_sunpkd831, RISCV_V4HI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sunpkd832_64, v64_sunpkd832, RISCV_V4HI_FTYPE_V8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (bswap8, swap8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (bswap8, v_swap8, RISCV_UV4QI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (bswap8_64, swap8, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (bswap8_64, v64_swap8, RISCV_UV8QI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip8v4qi, uclip8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uclip8v4qi, v_uclip8, RISCV_UV4QI_FTYPE_V4QI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uclip8v8qi, uclip8, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip8v8qi, v64_uclip8, RISCV_UV8QI_FTYPE_V8QI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip16v2hi, uclip16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uclip16v2hi, v_uclip16, RISCV_UV2HI_FTYPE_V2HI_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uclip16v4hi, uclip16, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip16v4hi, v64_uclip16, RISCV_UV4HI_FTYPE_V4HI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip32sisi, uclip32, RISCV_UIXLEN_FTYPE_IXLEN_USI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uclip32v2sidi, uclip32, RISCV_UIXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uclip32v2sidi, v_uclip32, RISCV_UV2SI_FTYPE_V2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv4qi, ucmplt8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev4qi, ucmple8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv4qi, v_ucmplt8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev4qi, v_ucmple8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv8qi, ucmplt8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev8qi, ucmple8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv8qi, v64_ucmplt8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev8qi, v64_ucmple8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv2hi, ucmplt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev2hi, ucmple16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv2hi, v_ucmplt16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev2hi, v_ucmple16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv4hi, ucmplt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev4hi, ucmple16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmpltv4hi, v64_ucmplt16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ucmplev4hi, v64_ucmple16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv4qi3, ukadd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv4qi3, v_ukadd8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv8qi3, ukadd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv8qi3, v64_ukadd8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv2hi3, ukadd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv2hi3, v_ukadd16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv4hi3, ukadd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv4hi3, v64_ukadd16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (usaddhi3, ukaddh, RISCV_UHI_FTYPE_UHI_UHI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (usaddsi3, ukaddw, RISCV_USI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (ssaddsi3, kaddw, RISCV_SI_FTYPE_SI_SI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (ukcrasv2hi, ukcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukcrasv2hi, v_ukcras16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukcras16_64, ukcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcras16_64, v64_ukcras16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsav2hi, ukcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsav2hi, v_ukcrsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsa16_64, ukcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsa16_64, v64_ukcrsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstasv2hi, ukstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukstasv2hi, v_ukstas16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukstas16_64, ukstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstas16_64, v64_ukstas16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstsav2hi, ukstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukstsav2hi, v_ukstsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ukstsa16_64, ukstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstsa16_64, v64_ukstsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv4qi3, uksub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uksubv4qi3, v_uksub8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uksubv8qi3, uksub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv8qi3, v64_uksub8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv2hi3, uksub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uksubv2hi3, v_uksub16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uksubv4hi3, uksub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv4hi3, v64_uksub16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ussubhi3, uksubh, RISCV_UHI_FTYPE_UHI_UHI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (ussubsi3, uksubw, RISCV_USI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (umaqa, umaqa, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaqa, v_umaqa, RISCV_USI_FTYPE_USI_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaqa64, umaqa, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaqa64, v64_umaqa, RISCV_UV2SI_FTYPE_UV2SI_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv4qi3, umax8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaxv4qi3, v_umax8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaxv8qi3, umax8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv8qi3, v64_umax8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv2hi3, umax16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaxv2hi3, v_umax16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (umaxv4hi3, umax16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv4hi3, v64_umax16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv4qi3, umin8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uminv4qi3, v_umin8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uminv8qi3, umin8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv8qi3, v64_umin8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv2hi3, umin16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uminv2hi3, v_umin16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uminv4hi3, umin16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv4hi3, v64_umin16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv4qi3, uradd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uraddv4qi3, v_uradd8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uraddv8qi3, uradd8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv8qi3, v64_uradd8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv2hi3, uradd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uraddv2hi3, v_uradd16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (uraddv4hi3, uradd16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv4hi3, v64_uradd16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddsi3, uraddw, RISCV_USI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (urcrasv2hi, urcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urcrasv2hi, v_urcras16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urcras16_64, urcras16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcras16_64, v64_urcras16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrsav2hi, urcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urcrsav2hi, v_urcrsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urcrsa16_64, urcrsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrsa16_64, v64_urcrsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstasv2hi, urstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urstasv2hi, v_urstas16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urstas16_64, urstas16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstas16_64, v64_urstas16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstsav2hi, urstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urstsav2hi, v_urstsa16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (urstsa16_64, urstsa16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstsa16_64, v64_urstsa16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv4qi3, ursub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ursubv4qi3, v_ursub8, RISCV_UV4QI_FTYPE_UV4QI_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ursubv8qi3, ursub8, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv8qi3, v64_ursub8, RISCV_UV8QI_FTYPE_UV8QI_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv2hi3, ursub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ursubv2hi3, v_ursub16, RISCV_UV2HI_FTYPE_UV2HI_UV2HI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (ursubv4hi3, ursub16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv4hi3, v64_ursub16, RISCV_UV4HI_FTYPE_UV4HI_UV4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubsi3, ursubw, RISCV_USI_FTYPE_USI_USI, zpn),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd810, zunpkd810, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd820, zunpkd820, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd830, zunpkd830, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd831, zunpkd831, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd832, zunpkd832, RISCV_UIXLEN_FTYPE_UIXLEN, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd810, v_zunpkd810, RISCV_UV2HI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd820, v_zunpkd820, RISCV_UV2HI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd830, v_zunpkd830, RISCV_UV2HI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd831, v_zunpkd831, RISCV_UV2HI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd832, v_zunpkd832, RISCV_UV2HI_FTYPE_UV4QI, zpn32),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd810_64, zunpkd810, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd820_64, zunpkd820, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd830_64, zunpkd830, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd831_64, zunpkd831, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd832_64, zunpkd832, RISCV_UIXLEN_FTYPE_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd810_64, v64_zunpkd810, RISCV_UV4HI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd820_64, v64_zunpkd820, RISCV_UV4HI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd830_64, v64_zunpkd830, RISCV_UV4HI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd831_64, v64_zunpkd831, RISCV_UV4HI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (zunpkd832_64, v64_zunpkd832, RISCV_UV4HI_FTYPE_UV8QI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv2si3, add32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv2si3, v_uadd32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (addv2si3, v_sadd32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crasv2si, cras32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crasv2si, v_ucras32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crasv2si, v_scras32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsav2si, crsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsav2si, v_ucrsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (crsav2si, v_scrsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv2si2, kabs32, RISCV_DI_FTYPE_DI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kabsv2si2, v_kabs32, RISCV_V2SI_FTYPE_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kaddv2si3, kadd32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kaddv2si3, v_kadd32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrasv2si, kcras32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrasv2si, v_kcras32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrsav2si, kcrsa32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kcrsav2si, v_kcrsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmbb16, khmbb16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmbt16, khmbt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmtt16, khmtt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmbb16, kdmbb16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmbt16, kdmbt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmtt16, kdmtt16, RISCV_IXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmabb16, kdmabb16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmabt16, kdmabt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmatt16, kdmatt16, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmbb16, v_khmbb16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmbt16, v_khmbt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (khmtt16, v_khmtt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmbb16, v_kdmbb16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmbt16, v_kdmbt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmtt16, v_kdmtt16, RISCV_V2SI_FTYPE_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmabb16, v_kdmabb16, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmabt16, v_kdmabt16, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kdmatt16, v_kdmatt16, RISCV_V2SI_FTYPE_V2SI_V4HI_V4HI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabb32, kmabb32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabt32, kmabt32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmatt32, kmatt32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabb32, v_kmabb32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmabt32, v_kmabt32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmatt32, v_kmatt32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda32, kmaxda32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxda32, v_kmaxda32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmda32, kmda32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmxda32, kmxda32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmda32, v_kmda32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmxda32, v_kmxda32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmads32, kmads32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs32, kmadrs32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds32, kmaxds32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmads32, v_kmads32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmadrs32, v_kmadrs32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmaxds32, v_kmaxds32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsda32, kmsda32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda32, kmsxda32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsda32, v_kmsda32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kmsxda32, v_kmsxda32, RISCV_IXLEN_FTYPE_IXLEN_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksll32, ksll32, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksll32, v_ksll32, RISCV_V2SI_FTYPE_V2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2si, kslra32, RISCV_DI_FTYPE_DI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2si_round, kslra32_u, RISCV_DI_FTYPE_DI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2si, v_kslra32, RISCV_V2SI_FTYPE_V2SI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kslrav2si_round, v_kslra32_u, RISCV_V2SI_FTYPE_V2SI_SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstasv2si, kstas32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstasv2si, v_kstas32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstsav2si, kstsa32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (kstsav2si, v_kstsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv2si3, ksub32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ksubv2si3, v_ksub32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbbv2si, pkbb32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbtv2si, pkbt32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktbv2si, pktb32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkttv2si, pktt32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbbv2si, v_pkbb32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkbtv2si, v_pkbt32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pktbv2si, v_pktb32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (pkttv2si, v_pktt32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv2si3, radd32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (raddv2si3, v_radd32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrasv2si, rcras32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrasv2si, v_rcras32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrsav2si, v_rcrsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rcrsav2si, rcrsa32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstasv2si, rstas32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstasv2si, v_rstas32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstsav2si, rstsa32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rstsav2si, v_rstsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv2si3, rsub32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (rsubv2si3, v_rsub32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv2si3, sll32, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashlv2si3, v_sll32, RISCV_UV2SI_FTYPE_UV2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv2si3, smax32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smaxv2si3, v_smax32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbb32, smbb32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbt32, smbt32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smtt32, smtt32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbb32, v_smbb32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smbt32, v_smbt32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smtt32, v_smtt32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smds32, smds32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smdrs32, smdrs32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smxds32, smxds32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smds32, v_smds32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smdrs32, v_smdrs32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (smxds32, v_smxds32, RISCV_IXLEN_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv2si3, smin32, RISCV_IXLEN_FTYPE_IXLEN_IXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sminv2si3, v_smin32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv2si3, v_sra32, RISCV_V2SI_FTYPE_V2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra32_round, v_sra32_u, RISCV_V2SI_FTYPE_V2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ashrv2si3, sra32, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sra32_round, sra32_u, RISCV_IXLEN_FTYPE_IXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (sraiw_u, sraw_u, RISCV_SI_FTYPE_SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv2si3, srl32, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl32_round, srl32_u, RISCV_UIXLEN_FTYPE_UIXLEN_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (lshrv2si3, v_srl32, RISCV_UV2SI_FTYPE_UV2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (srl32_round, v_srl32_u, RISCV_UV2SI_FTYPE_UV2SI_USI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stasv2si, stas32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stasv2si, v_ustas32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stasv2si, v_sstas32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsav2si, stsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsav2si, v_ustsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (stsav2si, v_sstsa32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv2si3, sub32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv2si3, v_usub32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (subv2si3, v_ssub32, RISCV_V2SI_FTYPE_V2SI_V2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv2si3, ukadd32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukaddv2si3, v_ukadd32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrasv2si, ukcras32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsav2si, ukcrsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrasv2si, v_ukcras32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukcrsav2si, v_ukcrsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstasv2si, ukstas32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstasv2si, v_ukstas32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstsav2si, ukstsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ukstsav2si, v_ukstsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv2si3, uksub32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uksubv2si3, v_uksub32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv2si3, umax32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (umaxv2si3, v_umax32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv2si3, umin32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uminv2si3, v_umin32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv2si3, uradd32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (uraddv2si3, v_uradd32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrasv2si, urcras32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrasv2si, v_urcras32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrsav2si, urcrsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urcrsav2si, v_urcrsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstasv2si, urstas32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstasv2si, v_urstas32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstsav2si, urstsa32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (urstsav2si, v_urstsa32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv2si3, ursub32, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN, zpn64),
+DIRECT_BUILTIN_NO_PREFIX (ursubv2si3, v_ursub32, RISCV_UV2SI_FTYPE_UV2SI_UV2SI, zpn64),
+
+/* zpsf subset */
+DIRECT_BUILTIN_NO_PREFIX (smal, v_smal, RISCV_DI_FTYPE_DI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smal_64, v64_smal, RISCV_DI_FTYPE_DI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smal, smal, RISCV_DI_FTYPE_DI_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smal_64, smal, RISCV_DI_FTYPE_DI_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (adddi3, sadd64, RISCV_DI_FTYPE_DI_DI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (adddi3, uadd64, RISCV_UDI_FTYPE_UDI_UDI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (radddi3, radd64, RISCV_DI_FTYPE_DI_DI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (uradddi3, uradd64, RISCV_UDI_FTYPE_UDI_UDI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (rvp_kadddi3, kadd64, RISCV_DI_FTYPE_DI_DI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (rvp_ukadddi3, ukadd64, RISCV_UDI_FTYPE_UDI_UDI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (subdi3, ssub64, RISCV_DI_FTYPE_DI_DI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (subdi3, usub64, RISCV_UDI_FTYPE_UDI_UDI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (rsubdi3, rsub64, RISCV_DI_FTYPE_DI_DI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (ursubdi3, ursub64, RISCV_UDI_FTYPE_UDI_UDI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (rvp_ksubdi3, ksub64, RISCV_DI_FTYPE_DI_DI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (rvp_uksubdi3, uksub64, RISCV_UDI_FTYPE_UDI_UDI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (maddsidi4, smar64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vsmar64_1, smar64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vsmar64_1, v_smar64, RISCV_DI_FTYPE_DI_V2SI_V2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (msubsidi4, smsr64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vsmsr64, smsr64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vsmsr64, v_smsr64, RISCV_DI_FTYPE_DI_V2SI_V2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umaddsidi4, umar64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vumar64_1, umar64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vumar64_1, v_umar64, RISCV_UDI_FTYPE_UDI_UV2SI_UV2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umsubsidi4, umsr64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vumsr64, umsr64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vumsr64, v_umsr64, RISCV_UDI_FTYPE_UDI_UV2SI_UV2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (ssmaddsidi4, kmar64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vkmar64, kmar64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vkmar64, v_kmar64, RISCV_DI_FTYPE_DI_V2SI_V2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (usmaddsidi4, ukmar64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vukmar64, ukmar64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vukmar64, v_ukmar64, RISCV_UDI_FTYPE_UDI_UV2SI_UV2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (ssmsubsidi4, kmsr64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vkmsr64, kmsr64, RISCV_DI_FTYPE_DI_IXLEN_IXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vkmsr64, v_kmsr64, RISCV_DI_FTYPE_DI_V2SI_V2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (usmsubsidi4, ukmsr64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (vukmsr64, ukmsr64, RISCV_UDI_FTYPE_UDI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (vukmsr64, v_ukmsr64, RISCV_UDI_FTYPE_UDI_UV2SI_UV2SI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalbb, smalbb, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalbt, smalbt, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smaltt, smaltt, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalbb64, smalbb, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalbt64, smalbt, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smaltt64, smaltt, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalbb, v_smalbb, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalbt, v_smalbt, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smaltt, v_smaltt, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalbb64, v64_smalbb, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalbt64, v64_smalbt, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smaltt64, v64_smaltt, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalda1, smalda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalda64, smalda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalda1, v_smalda, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalda64, v64_smalda, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalxda1, smalxda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalxda64, smalxda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalxda1, v_smalxda, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalxda64, v64_smalxda, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalds1, smalds, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smaldrs3, smaldrs, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalxds1, smalxds, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalds64, smalds, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smaldrs64, smaldrs, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalxds64, smalxds, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalds1, v_smalds, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smaldrs3, v_smaldrs, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalxds1, v_smalxds, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smalds64, v64_smalds, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smaldrs64, v64_smaldrs, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smalxds64, v64_smalxds, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smslxda1, smslxda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smslxda64, smslxda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smslxda1, v_smslxda, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smslxda64, v64_smslxda, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smslda1, smslda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smslda64, smslda, RISCV_DI_FTYPE_DI_UIXLEN_UIXLEN, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smslda1, v_smslda, RISCV_DI_FTYPE_DI_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smslda64, v64_smslda, RISCV_DI_FTYPE_DI_V4HI_V4HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umulsidi3, mulr64, RISCV_UDI_FTYPE_USI_USI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (mulsidi3, mulsr64, RISCV_DI_FTYPE_SI_SI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (umul8, umul8, RISCV_UDI_FTYPE_USI_USI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (umul8, v_umul8, RISCV_UV4HI_FTYPE_UV4QI_UV4QI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (umulx8, umulx8, RISCV_UDI_FTYPE_USI_USI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (umulx8, v_umulx8, RISCV_UV4HI_FTYPE_UV4QI_UV4QI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (umul16, umul16, RISCV_UDI_FTYPE_USI_USI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (umul16_64, umul16, RISCV_UDI_FTYPE_USI_USI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umul16, v_umul16, RISCV_UV2SI_FTYPE_UV2HI_UV2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (umul16_64, v_umul16, RISCV_UV2SI_FTYPE_UV2HI_UV2HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umulx16, umulx16, RISCV_UDI_FTYPE_USI_USI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (umulx16_64, umulx16, RISCV_UDI_FTYPE_USI_USI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (umulx16, v_umulx16, RISCV_UV2SI_FTYPE_UV2HI_UV2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (umulx16_64, v_umulx16, RISCV_UV2SI_FTYPE_UV2HI_UV2HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smul8, smul8, RISCV_UDI_FTYPE_USI_USI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (smul8, v_smul8, RISCV_V4HI_FTYPE_V4QI_V4QI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (smulx8, smulx8, RISCV_UDI_FTYPE_USI_USI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (smulx8, v_smulx8, RISCV_V4HI_FTYPE_V4QI_V4QI, zpsf),
+DIRECT_BUILTIN_NO_PREFIX (smul16, smul16, RISCV_DI_FTYPE_USI_USI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smul16_64, smul16, RISCV_DI_FTYPE_USI_USI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smul16, v_smul16, RISCV_V2SI_FTYPE_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smul16_64, v64_smul16, RISCV_V2SI_FTYPE_V2HI_V2HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smulx16, smulx16, RISCV_DI_FTYPE_USI_USI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smulx16_64, smulx16, RISCV_DI_FTYPE_USI_USI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (smulx16, v_smulx16, RISCV_V2SI_FTYPE_V2HI_V2HI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (smulx16_64, v_smulx16, RISCV_V2SI_FTYPE_V2HI_V2HI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (wext, wext, RISCV_IXLEN_FTYPE_UDI_USI, zpsf32),
+DIRECT_BUILTIN_NO_PREFIX (wext64, wext, RISCV_IXLEN_FTYPE_UDI_USI, zpsf64),
+DIRECT_BUILTIN_NO_PREFIX (revsi, rev, RISCV_UIXLEN_FTYPE_UIXLEN, zbpbo32),
+DIRECT_BUILTIN_NO_PREFIX (revdi, rev, RISCV_UIXLEN_FTYPE_UIXLEN, zbpbo64),
+DIRECT_BUILTIN_NO_PREFIX (fsrw, fsrw, RISCV_USI_FTYPE_USI_USI_USI, zbpbo64),
+DIRECT_BUILTIN_NO_PREFIX (fsr, fsr, RISCV_USI_FTYPE_USI_USI_USI, zbpbo32),
+DIRECT_BUILTIN_NO_PREFIX (cmixsi, cmix, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zbpbo32),
+DIRECT_BUILTIN_NO_PREFIX (cmixdi, cmix, RISCV_UIXLEN_FTYPE_UIXLEN_UIXLEN_UIXLEN, zbpbo64),
+DIRECT_BUILTIN_NO_PREFIX (smaxsi3, max, RISCV_SI_FTYPE_SI_SI, zbpbo32),
+DIRECT_BUILTIN_NO_PREFIX (sminsi3, min, RISCV_SI_FTYPE_SI_SI, zbpbo32),
+DIRECT_BUILTIN_NO_PREFIX (smaxdi3, max, RISCV_DI_FTYPE_DI_DI, zbpbo64),
+DIRECT_BUILTIN_NO_PREFIX (smindi3, min, RISCV_DI_FTYPE_DI_DI, zbpbo64),
diff --git a/gcc/config/riscv/riscv-builtins.c b/gcc/config/riscv/riscv-builtins.c
index d187d20e4653..0fe69e607462 100644
--- a/gcc/config/riscv/riscv-builtins.c
+++ b/gcc/config/riscv/riscv-builtins.c
@@ -36,11 +36,23 @@ along with GCC; see the file COPYING3. If not see
#include "stor-layout.h"
#include "expr.h"
#include "langhooks.h"
+#include "function.h"
+#include "emit-rtl.h"
+#include "explow.h"
/* Macros to create an enumeration identifier for a function prototype. */
#define RISCV_FTYPE_NAME0(A) RISCV_##A##_FTYPE
#define RISCV_FTYPE_NAME1(A, B) RISCV_##A##_FTYPE_##B
#define RISCV_FTYPE_NAME2(A, B, C) RISCV_##A##_FTYPE_##B##_##C
+#define RISCV_FTYPE_NAME3(A, B, C, D) RISCV_##A##_FTYPE_##B##_##C##_##D
+#define RISCV_FTYPE_NAME4(A, B, C, D, E) \
+ RISCV_##A##_FTYPE_##B##_##C##_##D##_##E
+#define RISCV_FTYPE_NAME5(A, B, C, D, E, F) \
+ RISCV_##A##_FTYPE_##B##_##C##_##D##_##E##_##F
+#define RISCV_FTYPE_NAME6(A, B, C, D, E, F, G) \
+ RISCV_##A##_FTYPE_##B##_##C##_##D##_##E##_##F##_##G
+#define RISCV_FTYPE_NAME7(A, B, C, D, E, F, G, H) \
+ RISCV_##A##_FTYPE_##B##_##C##_##D##_##E##_##F##_##G##_##H
/* Classifies the prototype of a built-in function. */
enum riscv_function_type {
@@ -103,6 +115,18 @@ AVAIL (crypto_zksed64, TARGET_ZKSED && TARGET_64BIT)
AVAIL (crypto_zkr32, TARGET_ZKR && !TARGET_64BIT)
AVAIL (crypto_zkr64, TARGET_ZKR && TARGET_64BIT)
+/* p ext */
+AVAIL (zpn, TARGET_ZPN)
+AVAIL (zpn64, TARGET_ZPN && TARGET_64BIT)
+AVAIL (zpn32, TARGET_ZPN && !TARGET_64BIT)
+
+AVAIL (zpsf, TARGET_ZPSF)
+AVAIL (zpsf32, TARGET_ZPSF && !TARGET_64BIT)
+AVAIL (zpsf64, TARGET_ZPSF && TARGET_64BIT)
+
+AVAIL (zbpbo32, TARGET_ZBPBO && !TARGET_64BIT)
+AVAIL (zbpbo64, TARGET_ZBPBO && TARGET_64BIT)
+
/* Construct a riscv_builtin_description from the given arguments.
INSN is the name of the associated instruction pattern, without the
@@ -132,11 +156,47 @@ AVAIL (crypto_zkr64, TARGET_ZKR && TARGET_64BIT)
RISCV_BUILTIN (INSN, #INSN, RISCV_BUILTIN_DIRECT_NO_TARGET, \
FUNCTION_TYPE, AVAIL)
+/* Define __builtin_riscv_, which is a RISCV_BUILTIN_DIRECT function
+ mapped to instruction CODE_FOR_, FUNCTION_TYPE and AVAIL
+ are as for RISCV_BUILTIN. */
+#define DIRECT_BUILTIN_NO_PREFIX(INSN, NAME, FUNCTION_TYPE, AVAIL) \
+ { CODE_FOR_ ## INSN, "__builtin_riscv_" # NAME, \
+ RISCV_BUILTIN_DIRECT, FUNCTION_TYPE, riscv_builtin_avail_ ## AVAIL }
+
+/* Define __builtin_riscv_, which is a RISCV_BUILTIN_DIRECT_NO_TARGET function
+ mapped to instruction CODE_FOR_, FUNCTION_TYPE and AVAIL
+ are as for RISCV_BUILTIN. */
+#define DIRECT_NO_TARGET_BUILTIN_NO_PREFIX(INSN, NAME, FUNCTION_TYPE, AVAIL) \
+ { CODE_FOR_ ## INSN, "__builtin_riscv_" # NAME, \
+ RISCV_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, riscv_builtin_avail_ ## AVAIL }
+
+/* type nodes for target-specific width support (xlen_t) */
+tree uint_xlen_node;
+tree int_xlen_node;
+
/* Argument types. */
#define RISCV_ATYPE_VOID void_type_node
+#define RISCV_ATYPE_UHI unsigned_intHI_type_node
+#define RISCV_ATYPE_HI intHI_type_node
#define RISCV_ATYPE_USI unsigned_intSI_type_node
#define RISCV_ATYPE_SI intSI_type_node
#define RISCV_ATYPE_DI intDI_type_node
+#define RISCV_ATYPE_UDI unsigned_intDI_type_node
+#define RISCV_ATYPE_V4QI build_vector_type (intQI_type_node, 4)
+#define RISCV_ATYPE_UV4QI build_vector_type (unsigned_intQI_type_node, 4)
+#define RISCV_ATYPE_V8QI build_vector_type (intQI_type_node, 8)
+#define RISCV_ATYPE_UV8QI build_vector_type (unsigned_intQI_type_node, 8)
+#define RISCV_ATYPE_V2HI build_vector_type (intHI_type_node, 2)
+#define RISCV_ATYPE_UV2HI build_vector_type (unsigned_intHI_type_node, 2)
+#define RISCV_ATYPE_V4HI build_vector_type (intHI_type_node, 4)
+#define RISCV_ATYPE_UV4HI build_vector_type (unsigned_intHI_type_node, 4)
+#define RISCV_ATYPE_V2SI build_vector_type (intSI_type_node, 2)
+#define RISCV_ATYPE_UV2SI build_vector_type (unsigned_intSI_type_node, 2)
+#define RISCV_ATYPE_V8HI build_vector_type (intHI_type_node, 8)
+#define RISCV_ATYPE_UV8HI build_vector_type (unsigned_intHI_type_node, 8)
+
+#define RISCV_ATYPE_IXLEN int_xlen_node
+#define RISCV_ATYPE_UIXLEN uint_xlen_node
/* RISCV_FTYPE_ATYPESN takes N RISCV_FTYPES-like type codes and lists
their associated RISCV_ATYPEs. */
@@ -146,9 +206,24 @@ AVAIL (crypto_zkr64, TARGET_ZKR && TARGET_64BIT)
RISCV_ATYPE_##A, RISCV_ATYPE_##B
#define RISCV_FTYPE_ATYPES2(A, B, C) \
RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C
+#define RISCV_FTYPE_ATYPES3(A, B, C, D) \
+ RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C, RISCV_ATYPE_##D
+#define RISCV_FTYPE_ATYPES4(A, B, C, D, E) \
+ RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C, RISCV_ATYPE_##D, \
+ RISCV_ATYPE_##E
+#define RISCV_FTYPE_ATYPES5(A, B, C, D, E, F) \
+ RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C, RISCV_ATYPE_##D, \
+ RISCV_ATYPE_##E, RISCV_ATYPE_##F
+#define RISCV_FTYPE_ATYPES6(A, B, C, D, E, F, G) \
+ RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C, RISCV_ATYPE_##D, \
+ RISCV_ATYPE_##E, RISCV_ATYPE_##F, RISCV_ATYPE_##G
+#define RISCV_FTYPE_ATYPES7(A, B, C, D, E, F, G, H) \
+ RISCV_ATYPE_##A, RISCV_ATYPE_##B, RISCV_ATYPE_##C, RISCV_ATYPE_##D, \
+ RISCV_ATYPE_##E, RISCV_ATYPE_##F, RISCV_ATYPE_##G, RISCV_ATYPE_##H
static const struct riscv_builtin_description riscv_builtins[] = {
#include "riscv-builtins-crypto.def"
+ #include "riscv-builtins-rvp.def"
DIRECT_BUILTIN (frflags, RISCV_USI_FTYPE, hard_float),
DIRECT_NO_TARGET_BUILTIN (fsflags, RISCV_VOID_FTYPE_USI, hard_float)
@@ -195,6 +270,17 @@ riscv_build_function_type (enum riscv_function_type type)
void
riscv_init_builtins (void)
{
+ if (TARGET_64BIT)
+ {
+ int_xlen_node = intDI_type_node;
+ uint_xlen_node = unsigned_intDI_type_node;
+ }
+ else
+ {
+ int_xlen_node = intSI_type_node;
+ uint_xlen_node = unsigned_intSI_type_node;
+ }
+
for (size_t i = 0; i < ARRAY_SIZE (riscv_builtins); i++)
{
const struct riscv_builtin_description *d = &riscv_builtins[i];
@@ -222,10 +308,47 @@ riscv_builtin_decl (unsigned int code, bool initialize_p ATTRIBUTE_UNUSED)
an expand operand. Store the operand in *OP. */
static void
-riscv_prepare_builtin_arg (struct expand_operand *op, tree exp, unsigned argno)
+riscv_prepare_builtin_arg (struct expand_operand *op, tree exp, unsigned argno,
+ enum insn_code icode, bool has_target_p)
{
- tree arg = CALL_EXPR_ARG (exp, argno);
- create_input_operand (op, expand_normal (arg), TYPE_MODE (TREE_TYPE (arg)));
+ rtx arg_rtx = expand_normal (CALL_EXPR_ARG (exp, argno));
+ enum machine_mode mode = insn_data[icode].operand[argno + has_target_p].mode;
+
+ if (!(*insn_data[icode].operand[argno + has_target_p].predicate) (arg_rtx, mode))
+ {
+ rtx tmp_rtx = gen_reg_rtx (mode);
+ if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (arg_rtx)))
+ {
+ tmp_rtx = simplify_gen_subreg (mode, arg_rtx, GET_MODE (arg_rtx), 0);
+ arg_rtx = tmp_rtx;
+ }
+ else if (VECTOR_MODE_P (mode) && CONST_INT_P (arg_rtx))
+ {
+ /* Handle CONST_INT covert to CONST_VECTOR. */
+ int nunits = GET_MODE_NUNITS (mode);
+ int i, shift = 0;
+ rtvec v = rtvec_alloc (nunits);
+ HOST_WIDE_INT val = INTVAL (arg_rtx);
+ enum machine_mode val_mode = GET_MODE_INNER (mode);
+ int shift_acc = GET_MODE_BITSIZE (val_mode);
+ unsigned HOST_WIDE_INT mask = GET_MODE_MASK (val_mode);
+ HOST_WIDE_INT tmp_val = val;
+ for (i = 0; i < nunits; i++)
+ {
+ tmp_val = (val >> shift) & mask;
+ RTVEC_ELT (v, i) = gen_int_mode (tmp_val, val_mode);
+ shift += shift_acc;
+ }
+
+ arg_rtx = copy_to_mode_reg (mode, gen_rtx_CONST_VECTOR (mode, v));
+ }
+ else
+ {
+ convert_move (tmp_rtx, arg_rtx, false);
+ arg_rtx = tmp_rtx;
+ }
+ }
+ create_input_operand (op, arg_rtx, mode);
}
/* Expand instruction ICODE as part of a built-in function sequence.
@@ -261,14 +384,28 @@ riscv_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
/* Map any target to operand 0. */
int opno = 0;
+ enum machine_mode insn_return_mode = insn_data[icode].operand[opno].mode;
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+
if (has_target_p)
- create_output_operand (&ops[opno++], target, TYPE_MODE (TREE_TYPE (exp)));
+ {
+ /* p extension vector and scalar mode convension */
+ if ((!target
+ || GET_MODE (target) != insn_return_mode
+ || ! (*insn_data[icode].operand[opno].predicate) (target, insn_return_mode)))
+ {
+ mode = insn_return_mode;
+ target = gen_reg_rtx (mode);
+ }
+
+ create_output_operand (&ops[opno++], target, mode);
+ }
/* Map the arguments to the other operands. */
gcc_assert (opno + call_expr_nargs (exp)
== insn_data[icode].n_generator_args);
for (int argno = 0; argno < call_expr_nargs (exp); argno++)
- riscv_prepare_builtin_arg (&ops[opno++], exp, argno);
+ riscv_prepare_builtin_arg (&ops[opno++], exp, argno, icode, has_target_p);
return riscv_expand_builtin_insn (icode, opno, ops, has_target_p);
}
diff --git a/gcc/config/riscv/riscv-c.c b/gcc/config/riscv/riscv-c.c
index c600badb313b..92375d5dfe0d 100644
--- a/gcc/config/riscv/riscv-c.c
+++ b/gcc/config/riscv/riscv-c.c
@@ -52,6 +52,13 @@ riscv_cpu_cpp_builtins (cpp_reader *pfile)
if (TARGET_DIV && TARGET_MUL)
builtin_define ("__riscv_muldiv");
+ if (TARGET_ZPN)
+ builtin_define ("__riscv_zpn");
+ if (TARGET_ZPSF)
+ builtin_define ("__riscv_zpsf");
+ if (TARGET_ZBPBO)
+ builtin_define ("__riscv_zbpbo");
+
builtin_define_with_int_value ("__riscv_xlen", UNITS_PER_WORD * 8);
if (TARGET_HARD_FLOAT)
builtin_define_with_int_value ("__riscv_flen", UNITS_PER_FP_REG * 8);
diff --git a/gcc/config/riscv/riscv-ftypes.def b/gcc/config/riscv/riscv-ftypes.def
index 7d27530d2464..5dcb89e5ad6f 100644
--- a/gcc/config/riscv/riscv-ftypes.def
+++ b/gcc/config/riscv/riscv-ftypes.def
@@ -30,8 +30,131 @@ DEF_RISCV_FTYPE (0, (USI))
DEF_RISCV_FTYPE (0, (SI))
DEF_RISCV_FTYPE (0, (DI))
DEF_RISCV_FTYPE (1, (VOID, USI))
-DEF_RISCV_FTYPE (1, (SI, SI))
DEF_RISCV_FTYPE (1, (DI, DI))
-DEF_RISCV_FTYPE (2, (SI, SI, SI))
+DEF_RISCV_FTYPE (1, (SI, SI))
+DEF_RISCV_FTYPE (1, (UIXLEN, IXLEN))
+DEF_RISCV_FTYPE (1, (UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (1, (UIXLEN, VOID))
+DEF_RISCV_FTYPE (1, (UV2HI, UV2HI))
+DEF_RISCV_FTYPE (1, (UV2HI, UV4QI))
+DEF_RISCV_FTYPE (1, (UV2HI, V2HI))
+DEF_RISCV_FTYPE (1, (UV2SI, UV2SI))
+DEF_RISCV_FTYPE (1, (UV2SI, V2SI))
+DEF_RISCV_FTYPE (1, (UV4HI, UV4HI))
+DEF_RISCV_FTYPE (1, (UV4HI, UV8QI))
+DEF_RISCV_FTYPE (1, (UV4HI, V4HI))
+DEF_RISCV_FTYPE (1, (UV4QI, UV4QI))
+DEF_RISCV_FTYPE (1, (UV4QI, V4QI))
+DEF_RISCV_FTYPE (1, (UV8QI, UV8QI))
+DEF_RISCV_FTYPE (1, (UV8QI, V8QI))
+DEF_RISCV_FTYPE (1, (V2HI, V2HI))
+DEF_RISCV_FTYPE (1, (V2HI, V4QI))
+DEF_RISCV_FTYPE (1, (V2SI, V2SI))
+DEF_RISCV_FTYPE (1, (V4HI, V4HI))
+DEF_RISCV_FTYPE (1, (V4HI, V8QI))
+DEF_RISCV_FTYPE (1, (V4QI, V4QI))
+DEF_RISCV_FTYPE (1, (V8QI, V8QI))
+DEF_RISCV_FTYPE (1, (VOID, VOID))
DEF_RISCV_FTYPE (2, (DI, DI, DI))
DEF_RISCV_FTYPE (2, (DI, DI, SI))
+DEF_RISCV_FTYPE (2, (DI, DI, UIXLEN))
+DEF_RISCV_FTYPE (2, (DI, DI, USI))
+DEF_RISCV_FTYPE (2, (DI, DI, V2HI))
+DEF_RISCV_FTYPE (2, (DI, DI, V4HI))
+DEF_RISCV_FTYPE (2, (DI, USI, USI))
+DEF_RISCV_FTYPE (2, (DI, SI, SI))
+DEF_RISCV_FTYPE (2, (IXLEN, IXLEN, IXLEN))
+DEF_RISCV_FTYPE (2, (IXLEN, IXLEN, UIXLEN))
+DEF_RISCV_FTYPE (2, (IXLEN, IXLEN, USI))
+DEF_RISCV_FTYPE (2, (IXLEN, IXLEN, V2HI))
+DEF_RISCV_FTYPE (2, (IXLEN, SI, SI))
+DEF_RISCV_FTYPE (2, (IXLEN, UDI, USI))
+DEF_RISCV_FTYPE (2, (IXLEN, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (2, (IXLEN, V2HI, V2HI))
+DEF_RISCV_FTYPE (2, (IXLEN, V2SI, V2SI))
+DEF_RISCV_FTYPE (2, (SI, SI, SI))
+DEF_RISCV_FTYPE (2, (SI, SI, USI))
+DEF_RISCV_FTYPE (2, (SI, SI, V2HI))
+DEF_RISCV_FTYPE (2, (SI, USI, USI))
+DEF_RISCV_FTYPE (2, (SI, V2HI, V2HI))
+DEF_RISCV_FTYPE (2, (UDI, UDI, UDI))
+DEF_RISCV_FTYPE (2, (UDI, USI, USI))
+DEF_RISCV_FTYPE (2, (UIXLEN, IXLEN, USI))
+DEF_RISCV_FTYPE (2, (UIXLEN, UIXLEN, SI))
+DEF_RISCV_FTYPE (2, (UIXLEN, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (2, (UIXLEN, UIXLEN, USI))
+DEF_RISCV_FTYPE (2, (UIXLEN, UV8QI, UV8QI))
+DEF_RISCV_FTYPE (2, (USI, USI, USI))
+DEF_RISCV_FTYPE (2, (USI, UV4QI, UV4QI))
+DEF_RISCV_FTYPE (2, (UV2HI, UV2HI, USI))
+DEF_RISCV_FTYPE (2, (UV2HI, UV2HI, UV2HI))
+DEF_RISCV_FTYPE (2, (UV2HI, V2HI, USI))
+DEF_RISCV_FTYPE (2, (UV2HI, V2HI, V2HI))
+DEF_RISCV_FTYPE (2, (UV2SI, UV2HI, UV2HI))
+DEF_RISCV_FTYPE (2, (UV2SI, UV2SI, USI))
+DEF_RISCV_FTYPE (2, (UV2SI, UV2SI, UV2SI))
+DEF_RISCV_FTYPE (2, (UV2SI, V2SI, USI))
+DEF_RISCV_FTYPE (2, (UV4HI, UV4HI, USI))
+DEF_RISCV_FTYPE (2, (UV4HI, UV4HI, UV4HI))
+DEF_RISCV_FTYPE (2, (UV4HI, UV4QI, UV4QI))
+DEF_RISCV_FTYPE (2, (UV4HI, V4HI, USI))
+DEF_RISCV_FTYPE (2, (UV4HI, V4HI, V4HI))
+DEF_RISCV_FTYPE (2, (UV4QI, UV4QI, USI))
+DEF_RISCV_FTYPE (2, (UV4QI, UV4QI, UV4QI))
+DEF_RISCV_FTYPE (2, (UV4QI, V4QI, USI))
+DEF_RISCV_FTYPE (2, (UV4QI, V4QI, V4QI))
+DEF_RISCV_FTYPE (2, (UV8QI, UV8QI, USI))
+DEF_RISCV_FTYPE (2, (UV8QI, UV8QI, UV8QI))
+DEF_RISCV_FTYPE (2, (UV8QI, V8QI, USI))
+DEF_RISCV_FTYPE (2, (UV8QI, V8QI, V8QI))
+DEF_RISCV_FTYPE (2, (V2HI, V2HI, SI))
+DEF_RISCV_FTYPE (2, (V2HI, V2HI, USI))
+DEF_RISCV_FTYPE (2, (V2HI, V2HI, V2HI))
+DEF_RISCV_FTYPE (2, (V2SI, V2HI, V2HI))
+DEF_RISCV_FTYPE (2, (V2SI, V2SI, SI))
+DEF_RISCV_FTYPE (2, (V2SI, V2SI, USI))
+DEF_RISCV_FTYPE (2, (V2SI, V2SI, V2SI))
+DEF_RISCV_FTYPE (2, (V2SI, V2SI, V4HI))
+DEF_RISCV_FTYPE (2, (V2SI, V4HI, V4HI))
+DEF_RISCV_FTYPE (2, (V4HI, V4HI, SI))
+DEF_RISCV_FTYPE (2, (V4HI, V4HI, USI))
+DEF_RISCV_FTYPE (2, (V4HI, V4HI, V4HI))
+DEF_RISCV_FTYPE (2, (V4HI, V4QI, V4QI))
+DEF_RISCV_FTYPE (2, (V4QI, V4QI, SI))
+DEF_RISCV_FTYPE (2, (V4QI, V4QI, USI))
+DEF_RISCV_FTYPE (2, (V4QI, V4QI, V4QI))
+DEF_RISCV_FTYPE (2, (V8QI, V8QI, SI))
+DEF_RISCV_FTYPE (2, (V8QI, V8QI, USI))
+DEF_RISCV_FTYPE (2, (V8QI, V8QI, V8QI))
+DEF_RISCV_FTYPE (2, (HI, HI, HI))
+DEF_RISCV_FTYPE (2, (UHI, UHI, UHI))
+DEF_RISCV_FTYPE (3, (DI, DI, IXLEN, IXLEN))
+DEF_RISCV_FTYPE (3, (DI, DI, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (3, (DI, DI, V2HI, V2HI))
+DEF_RISCV_FTYPE (3, (DI, DI, V2SI, V2SI))
+DEF_RISCV_FTYPE (3, (DI, DI, V4HI, V4HI))
+DEF_RISCV_FTYPE (3, (IXLEN, IXLEN, IXLEN, IXLEN))
+DEF_RISCV_FTYPE (3, (IXLEN, IXLEN, IXLEN, UIXLEN))
+DEF_RISCV_FTYPE (3, (IXLEN, IXLEN, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (3, (IXLEN, IXLEN, V2HI, V2HI))
+DEF_RISCV_FTYPE (3, (IXLEN, IXLEN, V2SI, V2SI))
+DEF_RISCV_FTYPE (3, (SI, IXLEN, V2HI, V2HI))
+DEF_RISCV_FTYPE (3, (SI, SI, SI, SI))
+DEF_RISCV_FTYPE (3, (SI, SI, SI, V2HI))
+DEF_RISCV_FTYPE (3, (SI, SI, USI, USI))
+DEF_RISCV_FTYPE (3, (SI, SI, V2HI, V2HI))
+DEF_RISCV_FTYPE (3, (SI, SI, V4QI, UV4QI))
+DEF_RISCV_FTYPE (3, (SI, SI, V4QI, V4QI))
+DEF_RISCV_FTYPE (3, (UDI, UDI, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (3, (UDI, UDI, UV2SI, UV2SI))
+DEF_RISCV_FTYPE (3, (UIXLEN, UIXLEN, UIXLEN, UIXLEN))
+DEF_RISCV_FTYPE (3, (UIXLEN, UIXLEN, UIXLEN, USI))
+DEF_RISCV_FTYPE (3, (UIXLEN, UIXLEN, UV8QI, UV8QI))
+DEF_RISCV_FTYPE (3, (USI, USI, USI, USI))
+DEF_RISCV_FTYPE (3, (USI, USI, UV4QI, UV4QI))
+DEF_RISCV_FTYPE (3, (UV2SI, UV2SI, UV8QI, UV8QI))
+DEF_RISCV_FTYPE (3, (V2SI, V2SI, V2SI, V2SI))
+DEF_RISCV_FTYPE (3, (V2SI, V2SI, V2SI, V4HI))
+DEF_RISCV_FTYPE (3, (V2SI, V2SI, V4HI, V4HI))
+DEF_RISCV_FTYPE (3, (V2SI, V2SI, V8QI, UV8QI))
+DEF_RISCV_FTYPE (3, (V2SI, V2SI, V8QI, V8QI))
\ No newline at end of file
diff --git a/gcc/config/riscv/riscv-modes.def b/gcc/config/riscv/riscv-modes.def
index d66c198b66c8..605e44d6d777 100644
--- a/gcc/config/riscv/riscv-modes.def
+++ b/gcc/config/riscv/riscv-modes.def
@@ -20,3 +20,8 @@ along with GCC; see the file COPYING3. If not see
. */
FLOAT_MODE (TF, 16, ieee_quad_format);
+
+/* vector mode for p extension */
+VECTOR_MODES (INT, 4); /* V4QI V2HI */
+VECTOR_MODES (INT, 8); /* V8QI V4HI V2SI */
+VECTOR_MODES (INT, 16); /* V16QI V8HI V4SI V2DI */
diff --git a/gcc/config/riscv/riscv-opts.h b/gcc/config/riscv/riscv-opts.h
index 6ee08bc206f0..3db664ba1898 100644
--- a/gcc/config/riscv/riscv-opts.h
+++ b/gcc/config/riscv/riscv-opts.h
@@ -69,4 +69,12 @@ enum riscv_align_data {
#define TARGET_ZKSED ((riscv_crypto_subext & MASK_ZKSED) != 0)
#define TARGET_ZKSH ((riscv_crypto_subext & MASK_ZKSH) != 0)
+/* P extension subset */
+#define MASK_ZPN (1 << 0)
+#define MASK_ZBPBO (1 << 1)
+#define MASK_ZPSF (1 << 2)
+
+#define TARGET_ZPN ((riscv_rvp_subext & MASK_ZPN) != 0)
+#define TARGET_ZBPBO ((riscv_rvp_subext & MASK_ZBPBO) != 0)
+#define TARGET_ZPSF ((riscv_rvp_subext & MASK_ZPSF) != 0)
#endif /* ! GCC_RISCV_OPTS_H */
diff --git a/gcc/config/riscv/riscv.c b/gcc/config/riscv/riscv.c
index d489717b2a51..05a4f407f2ae 100644
--- a/gcc/config/riscv/riscv.c
+++ b/gcc/config/riscv/riscv.c
@@ -4516,6 +4516,14 @@ riscv_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
!= call_used_or_fixed_reg_p (regno + i))
return false;
+ /* use even/odd pair of registers in rv32 zpsf subset */
+ if (TARGET_ZPSF && !TARGET_64BIT)
+ {
+ if ((GET_MODE_CLASS (mode) == MODE_INT ||
+ GET_MODE_CLASS (mode) == MODE_VECTOR_INT) &&
+ GET_MODE_UNIT_SIZE (mode) == GET_MODE_SIZE (DImode))
+ return !(regno & 1);
+ }
return true;
}
@@ -5253,6 +5261,27 @@ riscv_new_address_profitable_p (rtx memref, rtx_insn *insn, rtx new_addr)
return new_cost <= old_cost;
}
+/* return true if vector mode is supported in rvp */
+static bool
+riscv_rvp_support_vector_mode_p (machine_mode mode)
+{
+ if (mode == V2HImode || mode == V4QImode)
+ return true;
+
+ if (TARGET_64BIT
+ && (mode == V8QImode || mode == V4HImode || mode == V2SImode))
+ return true;
+
+ return false;
+}
+
+/* implement TARGET_VECTOR_MODE_SUPPORTED_P. */
+bool
+riscv_vector_mode_supported_p (enum machine_mode mode)
+{
+ return TARGET_ZPN && riscv_rvp_support_vector_mode_p (mode);
+}
+
/* Initialize the GCC target structure. */
#undef TARGET_ASM_ALIGNED_HI_OP
#define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
@@ -5436,6 +5465,10 @@ riscv_new_address_profitable_p (rtx memref, rtx_insn *insn, rtx new_addr)
#undef TARGET_NEW_ADDRESS_PROFITABLE_P
#define TARGET_NEW_ADDRESS_PROFITABLE_P riscv_new_address_profitable_p
+/* rvp */
+#undef TARGET_VECTOR_MODE_SUPPORTED_P
+#define TARGET_VECTOR_MODE_SUPPORTED_P riscv_vector_mode_supported_p
+
struct gcc_target targetm = TARGET_INITIALIZER;
#include "gt-riscv.h"
diff --git a/gcc/config/riscv/riscv.md b/gcc/config/riscv/riscv.md
index 7d2edb63195c..d97f08cfa385 100644
--- a/gcc/config/riscv/riscv.md
+++ b/gcc/config/riscv/riscv.md
@@ -45,6 +45,60 @@
;; Stack tie
UNSPEC_TIE
+
+ ;; rvp
+ UNSPEC_KABS
+ UNSPEC_KADDW
+ UNSPEC_KSUBW
+ UNSPEC_KADDH
+ UNSPEC_KSUBH
+ UNSPEC_UKADDW
+ UNSPEC_UKSUBW
+ UNSPEC_UKADDH
+ UNSPEC_UKSUBH
+ UNSPEC_BITREV
+ UNSPEC_VEC_COMPARE
+ UNSPEC_KDMABB
+ UNSPEC_KDMABT
+ UNSPEC_KDMATT
+ UNSPEC_KHMBB
+ UNSPEC_KHMBT
+ UNSPEC_KHMTT
+ UNSPEC_KDMTT
+ UNSPEC_KDMBT
+ UNSPEC_KDMBB
+ UNSPEC_KHM
+ UNSPEC_KHMX
+ UNSPEC_ROUND
+ UNSPEC_KMMWU
+ UNSPEC_KMMW
+ UNSPEC_KSLRAW
+ UNSPEC_KSLRAWU
+ UNSPEC_PBSAD
+ UNSPEC_PBSADA
+ UNSPEC_RDOV
+ UNSPEC_CLIPS
+ UNSPEC_CLIPS_OV
+ UNSPEC_SMUL8
+ UNSPEC_SMULX8
+ UNSPEC_UMUL8
+ UNSPEC_UMULX8
+ UNSPEC_SMUL16
+ UNSPEC_SMULX16
+ UNSPEC_UMUL16
+ UNSPEC_UMULX16
+ UNSPEC_ROUND64
+ UNSPEC_BSWAP
+ UNSPEC_UCLIP
+ UNSPEC_UCLIP_OV
+ UNSPEC_KDMBB16
+ UNSPEC_KDMBT16
+ UNSPEC_KDMTT16
+ UNSPEC_KHMBB16
+ UNSPEC_KHMBT16
+ UNSPEC_KHMTT16
+ UNSPEC_FSR
+ UNSPEC_FSRW
])
(define_c_enum "unspecv" [
@@ -65,6 +119,9 @@
UNSPECV_BLOCKAGE
UNSPECV_FENCE
UNSPECV_FENCE_I
+
+ ;; RVP
+ UNSPEC_CLROV
])
(define_constants
@@ -119,7 +176,7 @@
(const_string "unknown"))
;; Main data type used by the insn
-(define_attr "mode" "unknown,none,QI,HI,SI,DI,TI,SF,DF,TF"
+(define_attr "mode" "unknown,none,QI,HI,SI,DI,TI,SF,DF,TF,V2HI,V4HI,V8HI,V4QI,V8QI,V2SI,V4SI"
(const_string "unknown"))
;; True if the main data type is twice the size of a word.
@@ -162,10 +219,15 @@
;; multi multiword sequence (or user asm statements)
;; nop no operation
;; ghost an instruction that produces no real code
+;; simd simd instruction for p extension
+;; psimd partial-simd data processing instructions
+;; dsp instructions for increasing the DSP processing capabilities
+;; dsp64 as the same as dsp, but RV64P only
(define_attr "type"
"unknown,branch,jump,call,load,fpload,store,fpstore,
mtc,mfc,const,arith,logical,shift,slt,imul,idiv,move,fmove,fadd,fmul,
- fmadd,fdiv,fcmp,fcvt,fsqrt,multi,auipc,sfb_alu,nop,ghost"
+ fmadd,fdiv,fcmp,fcvt,fsqrt,multi,auipc,sfb_alu,nop,ghost,
+ simd,psimd,dsp,dsp64"
(cond [(eq_attr "got" "load") (const_string "load")
;; If a doubleword move uses these expensive instructions,
@@ -297,9 +359,9 @@
;; Iterator for floating-point modes that can be loaded into X registers.
(define_mode_iterator SOFTF [SF (DF "TARGET_64BIT")])
-;; This attribute gives the length suffix for a sign- or zero-extension
-;; instruction.
-(define_mode_attr size [(QI "b") (HI "h")])
+;; This attribute gives the length suffix for a sign-, zero-extension
+;; ,ksub- or kadd- instruction in RVP.
+(define_mode_attr size [(QI "b") (HI "h") (SI "w")])
;; Mode attributes for loads.
(define_mode_attr load [(QI "lb") (HI "lh") (SI "lw") (DI "ld") (SF "flw") (DF "fld")])
@@ -453,7 +515,19 @@
[(set_attr "type" "arith")
(set_attr "mode" "SI")])
-(define_insn "adddi3"
+(define_expand "adddi3"
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
+ (plus:DI (match_operand:DI 1 "register_operand" " r,r")
+ (match_operand:DI 2 "arith_operand" " r,I")))]
+ "TARGET_64BIT || TARGET_ZPSF"
+ {
+ if (!TARGET_64BIT)
+ operands[2] = force_reg (DImode, operands[2]);
+ }
+ [(set_attr "type" "arith")
+ (set_attr "mode" "DI")])
+
+(define_insn "*adddi_rv64"
[(set (match_operand:DI 0 "register_operand" "=r,r")
(plus:DI (match_operand:DI 1 "register_operand" " r,r")
(match_operand:DI 2 "arith_operand" " r,I")))]
@@ -504,8 +578,8 @@
[(set (match_operand:DI 0 "register_operand" "= r")
(minus:DI (match_operand:DI 1 "reg_or_0_operand" " rJ")
(match_operand:DI 2 "register_operand" " r")))]
- "TARGET_64BIT"
- "sub\t%0,%z1,%2"
+ "TARGET_64BIT || TARGET_ZPSF"
+ { return TARGET_64BIT ? "sub\t%0,%z1,%2" : "sub64\t%0,%z1,%2"; }
[(set_attr "type" "arith")
(set_attr "mode" "DI")])
@@ -709,11 +783,18 @@
(match_operand:SI 2 "register_operand" " r"))))]
"TARGET_MUL && !TARGET_64BIT"
{
- rtx temp = gen_reg_rtx (SImode);
- emit_insn (gen_mulsi3 (temp, operands[1], operands[2]));
- emit_insn (gen_mulsi3_highpart (riscv_subword (operands[0], true),
- operands[1], operands[2]));
- emit_insn (gen_movsi (riscv_subword (operands[0], false), temp));
+ if (TARGET_ZPN)
+ {
+ emit_insn (gen_rvp_mulsidi3 (operands[0], operands[1], operands[2]));
+ }
+ else
+ {
+ rtx temp = gen_reg_rtx (SImode);
+ emit_insn (gen_mulsi3 (temp, operands[1], operands[2]));
+ emit_insn (gen_mulsi3_highpart (riscv_subword (operands[0], true),
+ operands[1], operands[2]));
+ emit_insn (gen_movsi (riscv_subword (operands[0], false), temp));
+ }
DONE;
})
@@ -2499,6 +2580,7 @@
)
(include "crypto.md")
+(include "rvp.md")
;; This fixes a failure with gcc.c-torture/execute/pr64242.c at -O2 for a
;; 32-bit target when using -mtune=sifive-7-series. The first sched pass
diff --git a/gcc/config/riscv/riscv.opt b/gcc/config/riscv/riscv.opt
index cd4807c1d27d..93957db09e07 100644
--- a/gcc/config/riscv/riscv.opt
+++ b/gcc/config/riscv/riscv.opt
@@ -158,3 +158,6 @@ Enum(riscv_align_data) String(natural) Value(riscv_align_data_type_natural)
TargetVariable
int riscv_crypto_subext
+
+TargetVariable
+int riscv_rvp_subext
diff --git a/gcc/config/riscv/rvp.md b/gcc/config/riscv/rvp.md
new file mode 100644
index 000000000000..0b552f9bb141
--- /dev/null
+++ b/gcc/config/riscv/rvp.md
@@ -0,0 +1,6912 @@
+;; Machine description for P extension.
+;; Copyright (C) 2021 Free Software Foundation, Inc.
+
+;; This file is part of GCC.
+
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 3, or (at your option)
+;; any later version.
+
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING3. If not see
+;; .
+
+;; A list of the modes that are up to one word long vector.
+(define_mode_iterator VECI [(V4QI "!TARGET_64BIT") (V2HI "!TARGET_64BIT")
+ (V8QI "TARGET_64BIT") (V4HI "TARGET_64BIT")
+ (V2SI "TARGET_64BIT")])
+;; v2hi, v4qi is also used in rv64p
+(define_mode_iterator VPMOVE [(V4QI "") (V2HI "")
+ (V8QI "TARGET_64BIT") (V4HI "TARGET_64BIT")])
+(define_mode_iterator VQIHI [(V4QI "!TARGET_64BIT") (V2HI "!TARGET_64BIT")
+ (V8QI "TARGET_64BIT") (V4HI "TARGET_64BIT")])
+(define_mode_iterator VSHI [(V2HI "!TARGET_64BIT") (V2SI "TARGET_64BIT")])
+(define_mode_iterator VHI [(V2HI "!TARGET_64BIT") (V4HI "TARGET_64BIT")])
+(define_mode_iterator VQI [(V4QI "!TARGET_64BIT") (V8QI "TARGET_64BIT")])
+(define_mode_iterator VD_SI [(SI "!TARGET_64BIT") (V2SI "TARGET_64BIT")])
+
+;; for specific bit number in 'simd' type instruction
+(define_mode_attr bits [(V8QI "8") (V4QI "8") (QI "8") (V4HI "16") (V2HI "16")
+ (HI "16") (V2SI "32") (DI "64")])
+
+(define_mode_attr VNHALF [(V2SI "SI") (V2HI "HI")])
+(define_mode_attr VSH_EXT [(V2SI "DI") (V2HI "HI")])
+(define_mode_attr VEXT [(V4QI "V4HI") (V2HI "V2SI") (V8QI "V8HI") (V4HI "V4SI")
+ (V2SI "V2DI")])
+
+;; clz, clrs
+(define_code_iterator unop [clrsb clz])
+
+;; add/sub iterator
+(define_code_iterator all_plus [plus ss_plus us_plus])
+(define_code_iterator all_minus [minus ss_minus us_minus])
+(define_code_iterator saturation_plus [ss_plus us_plus])
+(define_code_iterator saturation_minus [ss_minus us_minus])
+(define_code_iterator ssat_op [ss_plus ss_minus])
+(define_code_iterator usat_op [us_plus us_minus])
+(define_code_iterator plus_minus [plus minus])
+
+;; smax[8|16] and umax[8|16]
+(define_code_iterator sumax [smax umax])
+(define_code_iterator sumin [smin umin])
+
+;; rvp shift
+(define_code_attr shift [(ashift "ashl") (ashiftrt "ashr") (lshiftrt "lshr")])
+
+;; smalxd[s|a] smald[s|a]
+(define_code_attr add_sub [(plus "a")
+ (ss_plus "a")
+ (us_plus "a")
+ (ss_minus "s")
+ (us_minus "s")
+ (minus "s")])
+
+;; (un)signed unpacking patterns
+(define_code_attr zs [(sign_extend "s") (zero_extend "z")])
+
+(define_code_attr opcode [(plus "add")
+ (minus "sub")
+ (ss_plus "add")
+ (us_plus "add")
+ (ss_minus "sub")
+ (us_minus "sub")
+ (smax "smax")
+ (umax "umax")
+ (smin "smin")
+ (umin "umin")])
+
+;; expands to (un)signed (saturating) arithmetic operations
+(define_code_attr uk
+ [(plus "") (ss_plus "k") (us_plus "uk")
+ (minus "") (ss_minus "k") (us_minus "uk")])
+
+;; expands to the name of the optab for a particular code.
+(define_code_attr rvp_optab [(clrsb "clrsb")
+ (clz "clz")
+ (ashift "ashl")
+ (ashiftrt "ashr")
+ (lshiftrt "lshr")])
+
+;; expands to the name of the insn that implements a particular code.
+(define_code_attr rvp_insn [(clrsb "clrs")
+ (clz "clz")
+ (ashift "sll")
+ (ashiftrt "sra")
+ (lshiftrt "srl")])
+
+;; kabs
+(define_insn "kabsw"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")] UNSPEC_KABS))]
+ "TARGET_ZPN"
+ "kabsw\t%0, %1"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "kabs2"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (ss_abs:VECI (match_operand:VECI 1 "register_operand" " r")))]
+ "TARGET_ZPN"
+ "kabs\t%0, %1"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; k|(uk)|? add
+(define_insn "add3"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (all_plus:VECI (match_operand:VECI 1 "register_operand" " r")
+ (match_operand:VECI 2 "register_operand" " r")))]
+ "TARGET_ZPN"
+ "add\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; add64/sub64
+(define_insn "*add64_rvp"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (plus:DI (match_operand:DI 1 "register_operand" " r")
+ (match_operand:DI 2 "register_operand" " r")))]
+ "!TARGET_64BIT && TARGET_ZPSF"
+ "add64\t%0,%1,%2"
+ [(set_attr "type" "arith")
+ (set_attr "mode" "DI")])
+
+;; rv64
+(define_insn "rvp_adddi3"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (saturation_plus:DI (match_operand:DI 1 "register_operand" " r")
+ (match_operand:DI 2 "register_operand" " r")))]
+ "TARGET_ZPSF"
+ "add64 %0, %1, %2"
+ [(set_attr "type" "dsp64")
+ (set_attr "mode" "DI")])
+
+;; k|(uk)|? sub
+(define_insn "sub3"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (all_minus:VECI (match_operand:VECI 1 "register_operand" " r")
+ (match_operand:VECI 2 "register_operand" " r")))]
+ "TARGET_ZPN"
+ "sub %0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; rv64
+(define_insn "rvp_subdi3"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (saturation_minus:DI (match_operand:DI 1 "register_operand" " r")
+ (match_operand:DI 2 "register_operand" " r")))]
+ "TARGET_ZPSF"
+ "sub64 %0, %1, %2"
+ [(set_attr "type" "dsp64")
+ (set_attr "mode" "DI")])
+
+;; uk|k add|sub w|h
+(define_code_iterator sat_op [ss_plus us_plus ss_minus us_minus])
+(define_code_attr us [(ss_plus "s") (us_plus "u") (ss_minus "s") (us_minus "u")])
+
+(define_insn "s3"
+ [(set (match_operand:HISI 0 "register_operand" "=r")
+ (sat_op:HISI (match_operand:HISI 1 "register_operand" " r")
+ (match_operand:HISI 2 "register_operand" " r")))]
+ "TARGET_ZPN"
+ "\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+;; ave
+(define_insn "ave"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (truncate:SI
+ (ashiftrt:DI
+ (plus:DI
+ (plus:DI
+ (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
+ (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
+ (const_int 1))
+ (const_int 1))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "ave\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "avedi"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (truncate:DI
+ (ashiftrt:TI
+ (plus:TI
+ (plus:TI
+ (sign_extend:TI (match_operand:DI 1 "register_operand" "r"))
+ (sign_extend:TI (match_operand:DI 2 "register_operand" "r")))
+ (const_int 1))
+ (const_int 1))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "ave\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "DI")])
+
+;; bitrev
+(define_insn "bitrevsi"
+ [(set (match_operand:SI 0 "register_operand" "=r, r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" " r, r")
+ (match_operand:SI 2 "rimm5u_operand" " r, u05")]
+ UNSPEC_BITREV))]
+ "TARGET_ZPN && !TARGET_64BIT && !TARGET_ZBPBO"
+ "@
+ bitrev\t%0, %1, %2
+ bitrevi\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "bitrevdi"
+ [(set (match_operand:DI 0 "register_operand" "=r, r")
+ (unspec:DI [(match_operand:DI 1 "register_operand" " r, r")
+ (match_operand:SI 2 "rimm6u_operand" " r, u06")]
+ UNSPEC_BITREV))]
+ "TARGET_ZPN && TARGET_64BIT && !TARGET_ZBPBO"
+ "@
+ bitrev\t%0, %1, %2
+ bitrevi\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; cmix
+(define_insn "cmix"
+ [(set (match_operand:X 0 "register_operand" "=r")
+ (ior:X
+ (and:X
+ (match_operand:X 1 "register_operand" " r")
+ (match_operand:X 3 "register_operand" " r"))
+ (and:X
+ (match_operand:X 2 "register_operand" " r")
+ (not:X (match_dup 3)))))]
+ "TARGET_ZBPBO"
+ "cmix\t%0, %3, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+;; clrov
+(define_insn "clrov"
+ [(unspec_volatile:X [(const_int 0)] UNSPEC_CLROV)]
+ "TARGET_ZPN"
+ "csrrci zero, vxsat, 1"
+ [(set_attr "mode" "")])
+
+;; clrs, clz
+(define_insn "2"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (unop:VECI (match_operand:VECI 1 "register_operand" "r")))]
+ "TARGET_ZPN"
+ "\t%0, %1"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; clrs32, clz32
+(define_insn "clrsbsi2"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (clrsb:SI (match_operand:SI 1 "register_operand" "r")))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "clrs32\t%0, %1"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "SI")])
+
+(define_insn "clzsi2"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (clz:SI (match_operand:SI 1 "register_operand" "r")))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ { return TARGET_ZBPBO ? "clz\t%0, %1" : "clz32\t%0, %1"; }
+ [(set_attr "type" "simd")
+ (set_attr "mode" "SI")])
+
+;; simd integer compare equal
+(define_insn "cmpeq"
+ [(set (match_operand:VQIHI 0 "register_operand" "=r")
+ (unspec:VQIHI [(eq:VQIHI (match_operand:VQIHI 1 "register_operand" " r")
+ (match_operand:VQIHI 2 "register_operand" " r"))]
+ UNSPEC_VEC_COMPARE))]
+ "TARGET_ZPN"
+ "cmpeq\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; cras, crsa
+(define_expand "cras"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_cras_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "cras_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (vec_duplicate:VSHI
+ (plus:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "cras\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "cras16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_cras16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "cras16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (minus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "cras16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "crsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_crsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "crsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 1)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (vec_duplicate:VSHI
+ (plus:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "crsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "crsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_crsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "crsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (plus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "crsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; kcras, kcrsa
+(define_expand "kcras"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kcras_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kcras_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (ss_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (vec_duplicate:VSHI
+ (ss_plus:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "kcras\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "kcras16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kcras16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kcras16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (ss_minus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kcras16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "kcrsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kcrsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kcrsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (ss_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 1)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (vec_duplicate:VSHI
+ (ss_plus:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "kcrsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "kcrsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kcrsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kcrsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (ss_plus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kcrsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; ukcras ukcrsa
+(define_expand "ukcras"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_ukcras_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "ukcras_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (us_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (vec_duplicate:VSHI
+ (us_plus:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "ukcras\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "ukcras16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_ukcras16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "ukcras16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (us_minus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (us_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (us_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (us_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "ukcras16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "ukcrsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_ukcrsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "ukcrsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (us_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 1)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (vec_duplicate:VSHI
+ (us_plus:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "ukcrsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "ukcrsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_ukcrsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "ukcrsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (us_plus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (us_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (vec_concat:V2HI
+ (us_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)])))
+ (us_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "ukcrsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; rcras rcrsa
+(define_expand "rcras"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_rcras_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "rcras_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (truncate:
+ (ashiftrt:
+ (minus:
+ (sign_extend:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (vec_duplicate:VSHI
+ (truncate:
+ (ashiftrt:
+ (plus:
+ (sign_extend:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "rcras\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "rcras16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_rcras16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "rcras16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (truncate:HI
+ (ashiftrt:SI
+ (minus:SI
+ (sign_extend:SI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1)))
+ (truncate:HI
+ (ashiftrt:SI
+ (plus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_concat:V2HI
+ (truncate:HI
+ (ashiftrt:SI
+ (minus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 2)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))
+ (const_int 1)))
+ (truncate:HI
+ (ashiftrt:SI
+ (plus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 3)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))
+ (const_int 1))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "rcras16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "rcrsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_rcrsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "rcrsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (truncate:
+ (ashiftrt:
+ (minus:
+ (sign_extend:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_duplicate:VSHI
+ (truncate:
+ (ashiftrt:
+ (plus:
+ (sign_extend:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 0)])))
+ (sign_extend:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "rcrsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "rcrsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_rcrsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "rcrsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (truncate:HI
+ (ashiftrt:SI
+ (plus:SI
+ (sign_extend:SI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1)))
+ (truncate:HI
+ (ashiftrt:SI
+ (minus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_concat:V2HI
+ (truncate:HI
+ (ashiftrt:SI
+ (plus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 2)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))
+ (const_int 1)))
+ (truncate:HI
+ (ashiftrt:SI
+ (minus:SI
+ (sign_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 3)])))
+ (sign_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))
+ (const_int 1))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "rcrsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; urcras urcrsa
+(define_expand "urcras"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_urcras_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "urcras_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (truncate:
+ (lshiftrt:
+ (minus:
+ (zero_extend:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (zero_extend:
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (vec_duplicate:VSHI
+ (truncate:
+ (lshiftrt:
+ (plus:
+ (zero_extend:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (zero_extend:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "urcras\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "urcras16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_urcras16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "urcras16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (truncate:HI
+ (lshiftrt:SI
+ (minus:SI
+ (zero_extend:SI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (zero_extend:SI (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1)))
+ (truncate:HI
+ (lshiftrt:SI
+ (plus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 1)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_concat:V2HI
+ (truncate:HI
+ (lshiftrt:SI
+ (minus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 2)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))
+ (const_int 1)))
+ (truncate:HI
+ (lshiftrt:SI
+ (plus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 3)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))
+ (const_int 1))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "urcras16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "urcrsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_urcrsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "urcrsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (truncate:
+ (lshiftrt:
+ (minus:
+ (zero_extend:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (zero_extend:
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_duplicate:VSHI
+ (truncate:
+ (lshiftrt:
+ (plus:
+ (zero_extend:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 0)])))
+ (zero_extend:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 1))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "urcrsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "urcrsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_urcrsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "urcrsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (truncate:HI
+ (lshiftrt:SI
+ (plus:SI
+ (zero_extend:SI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (zero_extend:SI (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (const_int 1)))
+ (truncate:HI
+ (lshiftrt:SI
+ (minus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 1)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 0)]))))
+ (const_int 1))))
+ (vec_concat:V2HI
+ (truncate:HI
+ (lshiftrt:SI
+ (plus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 2)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))
+ (const_int 1)))
+ (truncate:HI
+ (lshiftrt:SI
+ (minus:SI
+ (zero_extend:SI (vec_select:HI (match_dup 1) (parallel [(const_int 3)])))
+ (zero_extend:SI (vec_select:HI (match_dup 2) (parallel [(const_int 2)]))))
+ (const_int 1))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "urcrsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; insb
+(define_expand "insb"
+ [(match_operand:V4QI 0 "register_operand" "")
+ (match_operand:V4QI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:SI 3 "const_int_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ if (INTVAL (operands[3]) > 3 || INTVAL (operands[3]) < 0)
+ gcc_unreachable ();
+
+ rtx src = gen_reg_rtx (QImode);
+
+ convert_move (src, operands[2], false);
+
+ HOST_WIDE_INT selector_index;
+ selector_index = INTVAL (operands[3]);
+ rtx selector = gen_int_mode (1 << selector_index, SImode);
+ emit_insn (gen_vec_setv4qi_internal (operands[0], src,
+ operands[1], selector));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_setv4qi_internal"
+ [(set (match_operand:V4QI 0 "register_operand" "= r, r, r, r")
+ (vec_merge:V4QI
+ (vec_duplicate:V4QI
+ (match_operand:QI 1 "register_operand" " r, r, r, r"))
+ (match_operand:V4QI 2 "register_operand" " 0, 0, 0, 0")
+ (match_operand:SI 3 "imm_1_2_4_8_operand" " C01, C02, C04, C08")))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ insb\t%0, %1, 0
+ insb\t%0, %1, 1
+ insb\t%0, %1, 2
+ insb\t%0, %1, 3"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V4QI")])
+
+(define_expand "insb64"
+ [(match_operand:V8QI 0 "register_operand" "")
+ (match_operand:V8QI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:SI 3 "const_insb64_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ if (INTVAL (operands[3]) > 7 || INTVAL (operands[3]) < 0)
+ gcc_unreachable ();
+
+ rtx src = gen_reg_rtx (QImode);
+
+ convert_move (src, operands[2], false);
+
+ HOST_WIDE_INT selector_index;
+ selector_index = INTVAL (operands[3]);
+ rtx selector = gen_int_mode (1 << selector_index, SImode);
+ emit_insn (gen_vec_setv8qi_internal (operands[0], src,
+ operands[1], selector));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_setv8qi_internal"
+ [(set (match_operand:V8QI 0 "register_operand" "=r")
+ (vec_merge:V8QI
+ (vec_duplicate:V8QI
+ (match_operand:QI 1 "register_operand" "r"))
+ (match_operand:V8QI 2 "register_operand" "0")
+ (match_operand:SI 3 "pwr_7_operand" " D07")))]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ operands[3] = GEN_INT (exact_log2 (INTVAL (operands[3])));
+ return "insb\t%0, %1, %3";
+}
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V8QI")])
+
+;; KDMBB, KDMBT, KDMTT
+(define_expand "kdmbb"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdm_internal (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "kdmbt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdm_internal (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "kdmtt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdm_internal (operands[0], operands[1], operands[2],
+ GEN_INT (1), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "kdm_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (ashift:SI
+ (mult:SI
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C00, C01, C01")])))
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 4 "imm_0_1_operand" " C00, C01, C01, C00")]))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "@
+ kdmbb\t%0, %1, %2
+ kdmbt\t%0, %1, %2
+ kdmtt\t%0, %1, %2
+ kdmbt\t%0, %2, %1"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; KDMABB, KDMABT, KDMATT
+(define_expand "kdmabb"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (0), operands[1]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "kdmabt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (1), operands[1]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "kdmatt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kdma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (1), operands[1]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "kdma_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (ss_plus:SI
+ (ashift:SI
+ (mult:SI
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C00, C01, C01")])))
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 4 "imm_0_1_operand" " C00, C01, C01, C00")]))))
+ (const_int 1))
+ (match_operand:SI 5 "register_operand" " 0, 0, 0, 0")))]
+ "TARGET_ZPN"
+ "@
+ kdmabb\t%0, %1, %2
+ kdmabt\t%0, %1, %2
+ kdmatt\t%0, %1, %2
+ kdmabt\t%0, %2, %1"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; KHMBB, KHMBT, KHMTT
+(define_expand "khmbbsi"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_khmsi_internal (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "khmbtsi"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_khmsi_internal (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "khmttsi"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:V2HI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_khmsi_internal (operands[0], operands[1], operands[2],
+ GEN_INT (1), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "khmsi_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (sign_extend:SI
+ (ss_truncate:QI
+ (ashiftrt:SI
+ (mult:SI
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C00, C01, C01")])))
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 4 "imm_0_1_operand" " C00, C01, C01, C00")]))))
+ (const_int 15)))))]
+ "TARGET_ZPN"
+ "@
+ khmbb\t%0, %1, %2
+ khmbt\t%0, %1, %2
+ khmtt\t%0, %1, %2
+ khmbt\t%0, %2, %1"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; KMABB, KMABT, KMATT
+(define_expand "kmabb"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (0),
+ operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmabt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (1),
+ operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmatt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kma_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (1),
+ operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kma_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (ss_plus:SI
+ (mult:SI
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C00, C01, C01")])))
+ (sign_extend:SI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 4 "imm_0_1_operand" " C00, C01, C01, C00")]))))
+ (match_operand:SI 5 "register_operand" " 0, 0, 0, 0")))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ kmabb\t%0, %1, %2
+ kmabt\t%0, %1, %2
+ kmatt\t%0, %1, %2
+ kmabt\t%0, %2, %1"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "SI")])
+
+(define_expand "kmabb64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kma64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), GEN_INT (0),
+ GEN_INT (2), operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmabt64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kma64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), GEN_INT (1),
+ GEN_INT (3), operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmatt64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kma64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (3), GEN_INT (1),
+ GEN_INT (3), operands[1]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kma64_internal"
+ [(set (match_operand:V2SI 0 "register_operand" "= r, r, r, r")
+ (ss_plus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI
+ (vec_select:V2HI
+ (match_operand:V4HI 1 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C00, C01, C01")
+ (match_operand:SI 4 "imm_2_3_operand" " C02, C02, C03, C03")])))
+ (sign_extend:V2SI
+ (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 5 "imm_0_1_operand" " C00, C01, C01, C00")
+ (match_operand:SI 6 "imm_2_3_operand" " C02, C03, C03, C02")]))))
+ (match_operand:V2SI 7 "register_operand" " 0, 0, 0, 0")))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "@
+ kmabb\t%0, %1, %2
+ kmabt\t%0, %1, %2
+ kmatt\t%0, %1, %2
+ kmabt\t%0, %2, %1"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V2SI")])
+
+;; KHM8, KHMX8, KHM16, KHMX16
+(define_insn "khm8"
+ [(set (match_operand:VQI 0 "register_operand" "=r")
+ (unspec:VQI [(match_operand:VQI 1 "register_operand" " r")
+ (match_operand:VQI 2 "register_operand" " r")]
+ UNSPEC_KHM))]
+ "TARGET_ZPN"
+ "khm8\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+(define_insn "khmx8"
+ [(set (match_operand:VQI 0 "register_operand" "=r")
+ (unspec:VQI [(match_operand:VQI 1 "register_operand" " r")
+ (match_operand:VQI 2 "register_operand" " r")]
+ UNSPEC_KHMX))]
+ "TARGET_ZPN"
+ "khmx8\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+(define_insn "khm16"
+ [(set (match_operand:VHI 0 "register_operand" "=r")
+ (unspec:VHI [(match_operand:VHI 1 "register_operand" " r")
+ (match_operand:VHI 2 "register_operand" " r")]
+ UNSPEC_KHM))]
+ "TARGET_ZPN"
+ "khm16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+(define_insn "khmx16"
+ [(set (match_operand:VHI 0 "register_operand" "=r")
+ (unspec:VHI [(match_operand:VHI 1 "register_operand" " r")
+ (match_operand:VHI 2 "register_operand" " r")]
+ UNSPEC_KHMX))]
+ "TARGET_ZPN"
+ "khmx16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; KMADA, KMAXDA
+(define_insn "kmada"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_plus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 0)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmada\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmada64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_plus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 0) (const_int 2)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmada\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmaxda"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_plus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 1)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmaxda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmaxda64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_plus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 0) (const_int 2)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 1) (const_int 3)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmaxda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+;; KMADS, KMADRS, KMAXDS
+(define_insn "kmads"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_minus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 0)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmads\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmads64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_minus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 0) (const_int 2)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmads\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmadrs"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_minus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 1)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmadrs\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmadrs64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_minus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 0) (const_int 2)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 1) (const_int 3)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmadrs\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmaxds"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_minus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 1)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmaxds\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmaxds64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_minus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 0) (const_int 2)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 1) (const_int 3)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmaxds\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+;; RV64P KMAR64
+(define_insn "vkmar64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ss_plus:DI (match_operand:DI 1 "register_operand" " 0")
+ (plus:DI
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 2 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 3 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI (match_dup 2) (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:SI (match_dup 3) (parallel [(const_int 1)])))))))]
+ "TARGET_ZPSF && TARGET_64BIT"
+ "kmar64\t%0, %2, %3"
+ [(set_attr "type" "dsp64")
+ (set_attr "mode" "DI")])
+
+;; KMDA, KMXDA
+(define_insn "kmda"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" "r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" "r")
+ (parallel [(const_int 1)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 1)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)]))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmda\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmxda"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 1 "register_operand" "r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" "r")
+ (parallel [(const_int 0)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 1)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 1)]))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmxda\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmxda64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 1 "register_operand" "r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" "r")
+ (parallel [(const_int 0) (const_int 2)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 1)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 1) (const_int 3)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmxda\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmda64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 1 "register_operand" "r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" "r")
+ (parallel [(const_int 1) (const_int 3)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 1)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmda\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+;; KMMAC, KMMAC.u
+(define_insn "kmmac"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI (match_operand:SI 1 "register_operand" " 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 2 "register_operand" " r"))
+ (sign_extend:DI (match_operand:SI 3 "register_operand" " r")))
+ (const_int 32)))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmmac\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmac_64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI (match_operand:V2SI 1 "register_operand" " 0")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (mult:V2DI
+ (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r"))
+ (sign_extend:V2DI (match_operand:V2SI 3 "register_operand" " r")))
+ (const_int 32)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmmac\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmac_round"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_plus:SI (match_operand:SI 1 "register_operand" " 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [(mult:DI
+ (sign_extend:DI (match_operand:SI 2 "register_operand" " r"))
+ (sign_extend:DI (match_operand:SI 3 "register_operand" " r")))]
+ UNSPEC_ROUND)
+ (const_int 32)))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmmac.u\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmac64_round"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_plus:V2SI (match_operand:V2SI 1 "register_operand" " 0")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (unspec:V2DI [(mult:V2DI
+ (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r"))
+ (sign_extend:V2DI (match_operand:V2SI 3 "register_operand" " r")))]
+ UNSPEC_ROUND)
+ (const_int 32)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmmac.u\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+;; KMMAWB, KMMAWB.u, KMMAWB2, KMMAWB2.u, KMMAWT2, KMMAWT2.u
+(define_insn "kmmaw_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (ss_plus:SI
+ (match_operand:SI 4 "register_operand" " 0, 0, 0, 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 1 "register_operand" " r, r, r, r"))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C01, C00, C01")]))))
+ (match_operand:SI 5 "imm_15_16_operand" " C16, C16, C15, C15" )))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ kmmawb\t%0, %1, %2
+ kmmawt\t%0, %1, %2
+ kmmawb2\t%0, %1, %2
+ kmmawt2\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmawb"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), operands[1], GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), operands[1], GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawb2"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), operands[1], GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt2"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), operands[1], GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmaw_round_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r, r, r")
+ (ss_plus:SI
+ (match_operand:SI 4 "register_operand" " 0, 0, 0, 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(mult:DI
+ (sign_extend:DI (match_operand:SI 1 "register_operand" " r, r, r, r"))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C01, C00, C01")]))))]
+ UNSPEC_ROUND)
+ (match_operand:SI 5 "imm_15_16_operand" " C16, C16, C15, C15")))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ kmmawb.u\t%0, %1, %2
+ kmmawt.u\t%0, %1, %2
+ kmmawb2.u\t%0, %1, %2
+ kmmawt2.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmawb_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), operands[1], GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), operands[1], GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawb2_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), operands[1], GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt2_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")
+ (match_operand:V2HI 3 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), operands[1], GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmaw64_internal"
+[(set (match_operand:V2SI 0 "register_operand" "=r, r, r, r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 5 "register_operand" "0, 0, 0, 0")
+ (vec_concat:V2SI
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 1 "register_operand" "r, r, r, r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V4HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" "C00, C01, C00, C01")]))))
+ (match_operand:SI 6 "imm_15_16_operand" "C16, C16, C15, C15")))
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_dup 1)
+ (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_dup 2)
+ (parallel [(match_operand:SI 4 "imm_2_3_operand" "C02, C03, C02, C03")]))))
+ (match_dup 6))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "@
+ kmmawb\t%0, %1, %2
+ kmmawt\t%0, %1, %2
+ kmmawb2\t%0, %1, %2
+ kmmawt2\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmawb64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), operands[1],
+ GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (3), operands[1],
+ GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawb2_64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), operands[1],
+ GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt2_64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (3), operands[1],
+ GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmaw64_round_internal"
+[(set (match_operand:V2SI 0 "register_operand" "=r, r, r, r")
+ (ss_plus:V2SI
+ (match_operand:V2SI 5 "register_operand" " 0, 0, 0, 0")
+ (vec_concat:V2SI
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 1 "register_operand" " r, r, r, r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V4HI 2 "register_operand" " r, r, r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" "C00, C01, C00, C01")]))))]
+ UNSPEC_ROUND)
+ (const_int 16)))
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_dup 1)
+ (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_dup 2)
+ (parallel [(match_operand:SI 4 "imm_2_3_operand" "C02, C03, C02, C03")]))))]
+ UNSPEC_ROUND)
+ (match_operand:SI 6 "imm_15_16_operand" "C16, C16, C15, C15"))))))]
+"TARGET_ZPN && TARGET_64BIT"
+ "@
+ kmmawb.u\t%0, %1, %2
+ kmmawt.u\t%0, %1, %2
+ kmmawb2.u\t%0, %1, %2
+ kmmawt2.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmawb64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), operands[1],
+ GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (3), operands[1],
+ GEN_INT (16)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawb2_64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (0), GEN_INT (2), operands[1],
+ GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmawt2_64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V2SI 2 "register_operand" "")
+ (match_operand:V4HI 3 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmaw64_round_internal (operands[0], operands[2], operands[3],
+ GEN_INT (1), GEN_INT (3), operands[1],
+ GEN_INT (15)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+;; KMMSB, KMMSB.u
+(define_insn "kmmsb"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_minus:SI (match_operand:SI 1 "register_operand" " 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI
+ (sign_extend:DI (match_operand:SI 2 "register_operand" " r"))
+ (sign_extend:DI (match_operand:SI 3 "register_operand" " r")))
+ (const_int 32)))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmmsb\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmsb_64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_minus:V2SI (match_operand:V2SI 1 "register_operand" " 0")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (mult:V2DI
+ (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r"))
+ (sign_extend:V2DI (match_operand:V2SI 3 "register_operand" " r")))
+ (const_int 32)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmmsb\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmsb_round"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_minus:SI (match_operand:SI 1 "register_operand" " 0")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [(mult:DI
+ (sign_extend:DI (match_operand:SI 2 "register_operand" " r"))
+ (sign_extend:DI (match_operand:SI 3 "register_operand" " r")))]
+ UNSPEC_ROUND)
+ (const_int 32)))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmmsb.u\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmmsb64_round"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_minus:V2SI (match_operand:V2SI 1 "register_operand" " 0")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (unspec:V2DI [(mult:V2DI
+ (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r"))
+ (sign_extend:V2DI (match_operand:V2SI 3 "register_operand" " r")))]
+ UNSPEC_ROUND)
+ (const_int 32)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmmsb.u\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+;; KMMWB2, KMMWB2.u, KMMWT2, KMMWT2.u
+(define_insn "kmmw2_round_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(ss_mult:DI
+ (sign_extend:DI (match_operand:SI 1 "register_operand" " r, r"))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C01")]))))]
+ UNSPEC_KMMWU)
+ (const_int 15))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ kmmwb2.u\t%0, %1, %2
+ kmmwt2.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmwb2_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmw2_round_internal (operands[0], operands[1],
+ operands[2], GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmwt2_round"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmw2_round_internal (operands[0], operands[1],
+ operands[2], GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmw64_round"
+ [(set (match_operand:V2SI 0 "register_operand" "=r, r")
+ (vec_concat:V2SI
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 1 "register_operand" " r, r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V4HI 2 "register_operand" " r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" "C00, C01")]))))]
+ UNSPEC_KMMWU)
+ (const_int 15)))
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI
+ [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_dup 1)
+ (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_dup 2)
+ (parallel [(match_operand:SI 4 "imm_2_3_operand" "C02, C03")]))))]
+ UNSPEC_KMMWU)
+ (const_int 15)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "@
+ kmmwb2.u\t%0, %1, %2
+ kmmwt2.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_expand "kmmwb64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmw64_round (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (2)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmwt64_round"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmw64_round (operands[0], operands[1], operands[2],
+ GEN_INT (1), GEN_INT (3)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+;; KMSDA, KMSXDA
+(define_insn "kmsda"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_minus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_minus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 1)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 0)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmsda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmsda64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_minus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_minus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 0) (const_int 2)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmsda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmsxda"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ss_minus:SI
+ (match_operand:SI 1 "register_operand" " 0")
+ (ss_minus:SI
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r")
+ (parallel [(const_int 1)])))
+ (sign_extend:SI (vec_select:HI
+ (match_operand:V2HI 3 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (mult:SI
+ (sign_extend:SI (vec_select:HI
+ (match_dup 2)
+ (parallel [(const_int 0)])))
+ (sign_extend:SI (vec_select:HI
+ (match_dup 3)
+ (parallel [(const_int 1)])))))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kmsxda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+(define_insn "kmsxda64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (ss_minus:V2SI
+ (match_operand:V2SI 1 "register_operand" " 0")
+ (ss_minus:V2SI
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 1) (const_int 3)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_operand:V4HI 3 "register_operand" " r")
+ (parallel [(const_int 0) (const_int 2)]))))
+ (mult:V2SI
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 2)
+ (parallel [(const_int 0) (const_int 2)])))
+ (sign_extend:V2SI (vec_select:V2HI
+ (match_dup 3)
+ (parallel [(const_int 1) (const_int 3)])))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmsxda\t%0, %2, %3"
+ [(set_attr "type" "simd")])
+
+;; RV64P KMSR64
+(define_insn "vkmsr64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ss_minus:DI
+ (minus:DI
+ (match_operand:DI 1 "register_operand" " 0")
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 2 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 3 "register_operand" " r")
+ (parallel [(const_int 0)])))))
+ (mult:DI
+ (sign_extend:DI
+ (vec_select:SI (match_dup 2) (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:SI (match_dup 3) (parallel [(const_int 1)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kmsr64\t%0, %2, %3"
+ [(set_attr "type" "dsp64")
+ (set_attr "mode" "DI")])
+
+(define_expand "kmmwb64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmw64 (operands[0], operands[1], operands[2],
+ GEN_INT (0), GEN_INT (2)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmwt64"
+ [(match_operand:V2SI 0 "register_operand" "")
+ (match_operand:V2SI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kmmw64 (operands[0], operands[1], operands[2],
+ GEN_INT (1), GEN_INT (3)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmw64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r, r")
+ (vec_concat:V2SI
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_operand:V2SI 1 "register_operand" " r, r")
+ (parallel [(const_int 0)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V4HI 2 "register_operand" " r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" "C00, C01")]))))]
+ UNSPEC_KMMW)
+ (const_int 15)))
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [(mult:DI
+ (sign_extend:DI
+ (vec_select:SI
+ (match_dup 1)
+ (parallel [(const_int 1)])))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_dup 2)
+ (parallel [(match_operand:SI 4 "imm_2_3_operand" "C02, C03")]))))]
+ UNSPEC_KMMW)
+ (const_int 15)))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "@
+ kmmwb2\t%0, %1, %2
+ kmmwt2\t%0, %1, %2"
+[(set_attr "type" "simd")])
+
+(define_expand "kmmwb2"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmw2_internal (operands[0], operands[1], operands[2], GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_expand "kmmwt2"
+ [(match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" "")
+ (match_operand:V2HI 2 "register_operand" "")]
+ "TARGET_ZPN && !TARGET_64BIT"
+{
+ emit_insn (gen_kmmw2_internal (operands[0], operands[1], operands[2], GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kmmw2_internal"
+ [(set (match_operand:SI 0 "register_operand" "= r, r")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [(mult:DI
+ (sign_extend:DI (match_operand:SI 1 "register_operand" " r, r"))
+ (sign_extend:DI
+ (vec_select:HI
+ (match_operand:V2HI 2 "register_operand" " r, r")
+ (parallel [(match_operand:SI 3 "imm_0_1_operand" " C00, C01")]))))]
+ UNSPEC_KMMW)
+ (const_int 15))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "@
+ kmmwb2\t%0, %1, %2
+ kmmwt2\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+;; KSLLW
+(define_insn "ksll"
+ [(set (match_operand:SI 0 "register_operand" "= r, r")
+ (ss_ashift:SI (match_operand:SI 1 "register_operand" " r, r")
+ (match_operand:SI 2 "rimm5u_operand" " Iu05, r")))]
+ "TARGET_ZPN"
+ "@
+ kslliw\t%0, %1, %2
+ ksllw\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; KSLL8
+(define_insn "kslli8"
+ [(set (match_operand:VQI 0 "register_operand" "= r, r")
+ (ss_ashift:VQI (match_operand:VQI 1 "register_operand" " r, r")
+ (match_operand:X 2 "rimm3u_operand" " u03, r")))]
+ "TARGET_ZPN"
+ "@
+ kslli8\t%0, %1, %2
+ ksll8\t%0, %1, %2"
+ [(set_attr "type" "simd, simd")
+ (set_attr "mode" ", ")])
+
+;; KSLL16
+(define_insn "kslli16"
+ [(set (match_operand:VHI 0 "register_operand" "= r, r")
+ (ss_ashift:VHI (match_operand:VHI 1 "register_operand" " r, r")
+ (match_operand:SI 2 "rimm4u_operand" " u04, r")))]
+ "TARGET_ZPN"
+ "@
+ kslli16\t%0, %1, %2
+ ksll16\t%0, %1, %2"
+ [(set_attr "type" "simd, simd")
+ (set_attr "mode" ", ")])
+
+;; KSLL32, KSLLI32
+(define_insn "ksll32"
+ [(set (match_operand:V2SI 0 "register_operand" "= r, r")
+ (ss_ashift:V2SI (match_operand:V2SI 1 "register_operand" " r, r")
+ (match_operand:SI 2 "rimm5u_operand" " u05, r")))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "@
+ kslli32\t%0, %1, %2
+ ksll32\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V2SI")])
+
+;; KSLRA 8|16|32
+(define_expand "kslra"
+ [(match_operand:VECI 0 "register_operand" "")
+ (match_operand:VECI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ unsigned int extract_bits;
+ switch ()
+ {
+ case 8: extract_bits = 4; break;
+ case 16: extract_bits = 5; break;
+ case 32: extract_bits = 6; break;
+ default: gcc_unreachable();
+ }
+ emit_insn (gen_kslra_internal (operands[0],
+ operands[1], operands[2], GEN_INT (extract_bits)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kslra_internal"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (if_then_else:VECI
+ (lt:SI
+ (sign_extract:SI
+ (match_operand:SI 2 "register_operand" " r")
+ (match_operand:SI 3 "imm3u_operand" " I")
+ (const_int 0))
+ (const_int 0))
+ (ashiftrt:VECI (match_operand:VECI 1 "register_operand" " r")
+ (neg:SI (sign_extract:SI (match_dup 2) (match_dup 3) (const_int 0))))
+ (ss_ashift:VECI (match_dup 1)
+ (sign_extract:SI (match_dup 2) (match_dup 3) (const_int 0)))))]
+ "TARGET_ZPN"
+ "kslra\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; kslra32.u kslrav2sidi_round
+(define_expand "kslra_round"
+ [(match_operand:VECI 0 "register_operand" "")
+ (match_operand:VECI 1 "register_operand" "")
+ (match_operand:SI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ unsigned int extract_bits;
+ switch ()
+ {
+ case 8: extract_bits = 4; break;
+ case 16: extract_bits = 5; break;
+ case 32: extract_bits = 6; break;
+ default: gcc_unreachable();
+ }
+ emit_insn (gen_kslra_round_internal (operands[0],
+ operands[1], operands[2], GEN_INT (extract_bits)));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kslra_round_internal"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (if_then_else:VECI
+ (lt:SI
+ (sign_extract:SI
+ (match_operand:SI 2 "register_operand" " r")
+ (match_operand:SI 3 "imm3u_operand" " I")
+ (const_int 0))
+ (const_int 0))
+ (unspec:VECI [(ashiftrt:VECI (match_operand:VECI 1 "register_operand" " r")
+ (neg:SI (sign_extract:SI (match_dup 2) (match_dup 3) (const_int 0))))]
+ UNSPEC_ROUND)
+ (ss_ashift:VECI (match_dup 1)
+ (sign_extract:SI (match_dup 2) (match_dup 3) (const_int 0)))))]
+ "TARGET_ZPN"
+ "kslra.u\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "")])
+
+;; kslraw
+(define_insn "kslraw"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")] UNSPEC_KSLRAW))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kslraw\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "kslraw64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (sign_extend:DI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")] UNSPEC_KSLRAW)))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kslraw\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "DI")])
+
+(define_insn "kslrawu"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")] UNSPEC_KSLRAWU))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kslraw.u\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "kslrawu64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (sign_extend:DI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")] UNSPEC_KSLRAWU)))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kslraw.u\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "DI")])
+
+;; KSTAS16|32, KSTSA16|32
+(define_expand "kstas"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kstas_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kstas_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (ss_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (vec_duplicate:VSHI
+ (ss_plus:
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))))
+ (const_int 1)))]
+ "TARGET_ZPN"
+ "kstas\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+;; rv64 kstas16
+(define_expand "kstas16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kstas16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kstas16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (ss_minus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 1)]))))
+ (vec_concat:V2HI
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)])))
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kstas16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "kstsa"
+ [(match_operand:VSHI 0 "register_operand" "")
+ (match_operand:VSHI 1 "register_operand" "")
+ (match_operand:VSHI 2 "register_operand" "")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_kstsa_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kstsa_le"
+ [(set (match_operand:VSHI 0 "register_operand" "=r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (ss_minus:
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r")
+ (parallel [(const_int 0)]))))
+ (vec_duplicate:VSHI
+ (ss_plus:
+ (vec_select:
+ (match_dup 1)
+ (parallel [(const_int 1)]))
+ (vec_select:
+ (match_dup 2)
+ (parallel [(const_int 1)]))))
+ (const_int 2)))]
+ "TARGET_ZPN"
+ "kstsa\t%0, %1, %2"
+ [(set_attr "type" "simd")]
+)
+
+(define_expand "kstsa16_64"
+ [(match_operand:V4HI 0 "register_operand" "")
+ (match_operand:V4HI 1 "register_operand" "")
+ (match_operand:V4HI 2 "register_operand" "")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_kstsa16_64_le (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "simd")])
+
+(define_insn "kstsa16_64_le"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_concat:V4HI
+ (vec_concat:V2HI
+ (ss_plus:HI (vec_select:HI (match_operand:V4HI 1 "register_operand" " r")
+ (parallel [(const_int 0)]))
+ (vec_select:HI (match_operand:V4HI 2 "register_operand" " r")
+ (parallel [(const_int 0)])))
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 1)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 1)]))))
+ (vec_concat:V2HI
+ (ss_plus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 2)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 2)])))
+ (ss_minus:HI (vec_select:HI (match_dup 1) (parallel [(const_int 3)]))
+ (vec_select:HI (match_dup 2) (parallel [(const_int 3)]))))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kstsa16\t%0, %1, %2"
+ [(set_attr "type" "simd")
+ (set_attr "mode" "V4HI")])
+
+;; KWMMUL[.u]
+(define_insn "kwmmul"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (ss_mult:DI
+ (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" " r")) (const_int 2))
+ (mult:DI (sign_extend:DI (match_operand:SI 2 "register_operand" " r")) (const_int 2)))
+ (const_int 32))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kwmmul\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kwmmul_64"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (ss_mult:V2DI
+ (mult:V2DI (sign_extend:V2DI (match_operand:V2SI 1 "register_operand" " r")) (const_int 2))
+ (mult:V2DI (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r")) (const_int 2)))
+ (const_int 32))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kwmmul\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kwmmul_round"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (truncate:SI
+ (lshiftrt:DI
+ (unspec:DI [
+ (ss_mult:DI
+ (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" " r")) (const_int 2))
+ (mult:DI (sign_extend:DI (match_operand:SI 2 "register_operand" " r")) (const_int 2)))]
+ UNSPEC_ROUND)
+ (const_int 32))))]
+ "TARGET_ZPN && !TARGET_64BIT"
+ "kwmmul.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+(define_insn "kwmmul64_round"
+ [(set (match_operand:V2SI 0 "register_operand" "=r")
+ (truncate:V2SI
+ (lshiftrt:V2DI
+ (unspec:V2DI [
+ (ss_mult:V2DI
+ (mult:V2DI (sign_extend:V2DI (match_operand:V2SI 1 "register_operand" " r")) (const_int 2))
+ (mult:V2DI (sign_extend:V2DI (match_operand:V2SI 2 "register_operand" " r")) (const_int 2)))]
+ UNSPEC_ROUND)
+ (const_int 32))))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "kwmmul.u\t%0, %1, %2"
+ [(set_attr "type" "simd")])
+
+;; MADDR32, MSUBR32
+(define_insn "maddr32"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (plus:SI (mult:SI (match_operand:SI 1 "register_operand" " r")
+ (match_operand:SI 2 "register_operand" " r"))
+ (match_operand:SI 3 "register_operand" " 0")))]
+ "TARGET_ZPN"
+ "maddr32\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+(define_insn "msubr32"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (minus:SI (match_operand:SI 3 "register_operand" " 0")
+ (mult:SI (match_operand:SI 1 "register_operand" " r")
+ (match_operand:SI 2 "register_operand" " r"))))]
+ "TARGET_ZPN"
+ "msubr32\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "SI")])
+
+;; MULR64, MULSR64
+(define_insn "rvp_umulsidi3"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" " r"))
+ (zero_extend:DI (match_operand:SI 2 "register_operand" " r"))))]
+ "TARGET_ZPSF"
+ "mulr64\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "DI")])
+
+(define_insn "rvp_mulsidi3"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" " r"))
+ (sign_extend:DI (match_operand:SI 2 "register_operand" " r"))))]
+ "TARGET_ZPSF"
+ "mulsr64\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "DI")])
+
+;; MAX, MIN
+(define_insn "smax3"
+ [(set (match_operand:X 0 "register_operand" "=r")
+ (smax:X (match_operand:X 1 "register_operand" " r")
+ (match_operand:X 2 "register_operand" " r")))]
+ "TARGET_ZBPBO"
+ "max\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+(define_insn "smin3"
+ [(set (match_operand:X 0 "register_operand" "=r")
+ (smin:X (match_operand:X 1 "register_operand" " r")
+ (match_operand:X 2 "register_operand" " r")))]
+ "TARGET_ZBPBO"
+ "min\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+;; PBSAD, PBSADA
+(define_insn "pbsad"
+ [(set (match_operand:X 0 "register_operand" "=r")
+ (unspec:X [(match_operand:X 1 "register_operand" " r")
+ (match_operand:X 2 "register_operand" " r")] UNSPEC_PBSAD))]
+ "TARGET_ZPN"
+ "pbsad\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+(define_insn "pbsada"
+ [(set (match_operand:X 0 "register_operand" "=r")
+ (unspec:X [(match_operand:X 1 "register_operand" " 0")
+ (match_operand:X 2 "register_operand" " r")
+ (match_operand:X 3 "register_operand" " r")] UNSPEC_PBSADA))]
+ "TARGET_ZPN"
+ "pbsada\t%0, %2, %3"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+;; PKBB[16|32], PKBT[16|32], PKTT[16|32], PKTB[16|32]
+(define_insn "vec_merge"
+ [(set (match_operand:VSHI 0 "register_operand" "= r, r, r, r, r, r, r, r")
+ (vec_merge:VSHI
+ (vec_duplicate:VSHI
+ (vec_select:
+ (match_operand:VSHI 1 "register_operand" " r, r, r, r, r, r, r, r")
+ (parallel [(match_operand:SI 4 "imm_0_1_operand" "C00, C00, C01, C01, C00, C00, C01, C01")])))
+ (vec_duplicate:VSHI
+ (vec_select:
+ (match_operand:VSHI 2 "register_operand" " r, r, r, r, r, r, r, r")
+ (parallel [(match_operand:SI 5 "imm_0_1_operand" "C00, C01, C01, C00, C00, C01, C01, C00")])))
+ (match_operand:SI 3 "imm_1_2_operand" "C01, C01, C01, C01, C02, C02, C02, C02")))]
+ "TARGET_ZPN"
+ {
+ const char *pats[] = {
+ TARGET_ZBPBO ? "pack\t%0, %2, %1" : "pkbb\t%0, %2, %1",
+ "pktb\t%0, %2, %1",
+ TARGET_ZBPBO ? "packu\t%0, %2, %1" : "pktt\t%0, %2, %1",
+ "pkbt\t%0, %2, %1",
+ TARGET_ZBPBO ? "pack\t%0, %1, %2" : "pkbb\t%0, %1, %2",
+ "pkbt\t%0, %1, %2",
+ TARGET_ZBPBO ? "packu\t%0, %1, %2" : "pktt\t%0, %1, %2",
+ "pktb\t%0, %1, %2" };
+ return pats[which_alternative];
+ }
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "")])
+
+(define_expand "pkbb"
+ [(match_operand:VSHI 0 "register_operand")
+ (match_operand:VSHI 1 "register_operand")
+ (match_operand:VSHI 2 "register_operand")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_vec_merge (operands[0], operands[1], operands[2],
+ GEN_INT (2), GEN_INT (0), GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "pkbt"
+ [(match_operand:VSHI 0 "register_operand")
+ (match_operand:VSHI 1 "register_operand")
+ (match_operand:VSHI 2 "register_operand")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_vec_merge (operands[0], operands[1], operands[2],
+ GEN_INT (2), GEN_INT (0), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "pktt"
+ [(match_operand:VSHI 0 "register_operand")
+ (match_operand:VSHI 1 "register_operand")
+ (match_operand:VSHI 2 "register_operand")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_vec_merge (operands[0], operands[1], operands[2],
+ GEN_INT (2), GEN_INT (1), GEN_INT (1)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_expand "pktb"
+ [(match_operand:VSHI 0 "register_operand")
+ (match_operand:VSHI 1 "register_operand")
+ (match_operand:VSHI 2 "register_operand")]
+ "TARGET_ZPN"
+{
+ emit_insn (gen_vec_merge (operands[0], operands[1], operands[2],
+ GEN_INT (2), GEN_INT (1), GEN_INT (0)));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+;; pkbb16 rv64p
+(define_expand "pkbb64"
+ [(match_operand:V4HI 0 "register_operand")
+ (match_operand:V4HI 1 "register_operand")
+ (match_operand:V4HI 2 "register_operand")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_vec_pkbb64 (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_pkbb64"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_select:V4HI
+ (vec_concat:V8HI (match_operand:V4HI 1 "register_operand" "r")
+ (match_operand:V4HI 2 "register_operand" "r"))
+ (parallel [(const_int 0) (const_int 4)
+ (const_int 2) (const_int 6)])))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "pkbb16\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "pkbt64"
+ [(match_operand:V4HI 0 "register_operand")
+ (match_operand:V4HI 1 "register_operand")
+ (match_operand:V4HI 2 "register_operand")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_vec_pkbt64 (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_pkbt64"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_select:V4HI
+ (vec_concat:V8HI (match_operand:V4HI 1 "register_operand" "r")
+ (match_operand:V4HI 2 "register_operand" "r"))
+ (parallel [(const_int 0) (const_int 5)
+ (const_int 2) (const_int 7)])))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "pkbt16\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "pktt64"
+ [(match_operand:V4HI 0 "register_operand")
+ (match_operand:V4HI 1 "register_operand")
+ (match_operand:V4HI 2 "register_operand")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_vec_pktt64 (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_pktt64"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_select:V4HI
+ (vec_concat:V8HI (match_operand:V4HI 1 "register_operand" "r")
+ (match_operand:V4HI 2 "register_operand" "r"))
+ (parallel [(const_int 1) (const_int 5)
+ (const_int 3) (const_int 7)])))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "pktt16\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V4HI")])
+
+(define_expand "pktb64"
+ [(match_operand:V4HI 0 "register_operand")
+ (match_operand:V4HI 1 "register_operand")
+ (match_operand:V4HI 2 "register_operand")]
+ "TARGET_ZPN && TARGET_64BIT"
+{
+ emit_insn (gen_vec_pktb64 (operands[0], operands[1], operands[2]));
+ DONE;
+}
+[(set_attr "type" "dsp")])
+
+(define_insn "vec_pktb64"
+ [(set (match_operand:V4HI 0 "register_operand" "=r")
+ (vec_select:V4HI
+ (vec_concat:V8HI (match_operand:V4HI 1 "register_operand" "r")
+ (match_operand:V4HI 2 "register_operand" "r"))
+ (parallel [(const_int 1) (const_int 4)
+ (const_int 3) (const_int 6)])))]
+ "TARGET_ZPN && TARGET_64BIT"
+ "pktb16\t%0, %1, %2"
+ [(set_attr "type" "dsp")
+ (set_attr "mode" "V4HI")])
+
+;; [U]RADD[8|16|32|64|W], [U]RSUB[8|16|32|64|W]
+(define_insn "radd3"
+ [(set (match_operand:VECI 0 "register_operand" "=r")
+ (truncate:VECI
+ (ashiftrt: