From c671b5de16267b73fdb1b0455ce09046a640b768 Mon Sep 17 00:00:00 2001 From: Kiva Date: Fri, 26 Jan 2024 14:17:41 +0800 Subject: [PATCH] [Clang][XTHeadVector] Implement `vadd/vsub/vrsub/vneg` intrinsics (#58) * [Clang][XTHeadVector] Implement `vadd/vsub/vrsub` * [Clang][XTHeadVector] Test some handwritten `vadd` cases * [Clang][XTHeadVector] Add tests * [Clang][XTHeadVector] Add wrappers and tests * [Clang][XTHeadVector] Add `vneg` --- .../clang/Basic/riscv_vector_xtheadv.td | 41 ++ .../Basic/riscv_vector_xtheadv_wrappers.td | 205 ++++++ .../vector-single-width-add/thead/vadd.c | 646 ++++++++++++++++++ .../vector-single-width-add/thead/vneg.c | 166 +++++ .../vector-single-width-add/thead/vrsub.c | 326 +++++++++ .../vector-single-width-add/thead/vsub.c | 646 ++++++++++++++++++ .../vector-single-width-add/wrappers/vadd.c | 646 ++++++++++++++++++ .../vector-single-width-add/wrappers/vneg.c | 166 +++++ .../vector-single-width-add/wrappers/vrsub.c | 326 +++++++++ .../vector-single-width-add/wrappers/vsub.c | 646 ++++++++++++++++++ 10 files changed, 3814 insertions(+) create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index 89c372942a02ec6..2e717e43f3c29c8 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -600,6 +600,7 @@ multiclass RVVUSSegLoad types if (IsMasked) Operands.push_back(Ops[0]); Operands.push_back(Ops[Offset + 1]); // VL + // TODO: no policy in LLVM side for masked intrinsics. if (IsMasked) Operands.push_back(ConstantInt::get(Ops.back()->getType(), PolicyAttrs)); @@ -712,6 +713,7 @@ multiclass RVVUSSegLoadFF typ if (IsMasked) Operands.push_back(Ops[0]); Operands.push_back(Ops[Offset + 2]); // vl + // TODO: no policy in LLVM side for masked intrinsics. if (IsMasked) Operands.push_back(ConstantInt::get(Ops.back()->getType(), PolicyAttrs)); @@ -779,8 +781,47 @@ let UnMaskedPolicyScheme = NonePolicy, // 12. Vector Integer Arithmetic Operations //===----------------------------------------------------------------------===// +multiclass RVVPseudoUnaryBuiltin { + let Name = NAME, + IRName = IR, + MaskedIRName = IR # "_mask", + UnMaskedPolicyScheme = HasPassthruOperand, + ManualCodegen = [{ + { + if (IsMasked) { + std::rotate(Ops.begin(), Ops.begin() + 1, Ops.end() - 1); + if ((PolicyAttrs & RVV_VTA) && (PolicyAttrs & RVV_VMA)) + Ops.insert(Ops.begin(), llvm::PoisonValue::get(ResultType)); + } else { + if (PolicyAttrs & RVV_VTA) + Ops.insert(Ops.begin(), llvm::PoisonValue::get(ResultType)); + } + auto ElemTy = cast(ResultType)->getElementType(); + Ops.insert(Ops.begin() + 2, llvm::Constant::getNullValue(ElemTy)); + + if (IsMasked) { + // TODO: no policy in LLVM side for masked intrinsics. + Ops.push_back(ConstantInt::get(Ops.back()->getType(), PolicyAttrs)); + // maskedoff, op1, op2, mask, vl, policy + IntrinsicTypes = {ResultType, ElemTy, Ops[4]->getType()}; + } else { + // passthru, op1, op2, vl + IntrinsicTypes = {ResultType, ElemTy, Ops[3]->getType()}; + } + break; + } + }] in { + def : RVVBuiltin<"v", "vv", type_range>; + } +} + let UnMaskedPolicyScheme = HasPassthruOperand in { defm th_vadd : RVVIntBinBuiltinSet; + defm th_vsub : RVVIntBinBuiltinSet; + defm th_vrsub : RVVOutOp1BuiltinSet<"th_vrsub", "csil", + [["vx", "v", "vve"], + ["vx", "Uv", "UvUvUe"]]>; } +defm th_vneg_v : RVVPseudoUnaryBuiltin<"th_vrsub", "csil">; include "riscv_vector_xtheadv_wrappers.td" diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td index c26268342e6abb2..41a39e9f20e8215 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td @@ -843,3 +843,208 @@ let HeaderCode = #define __riscv_vsxw_v_u64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u64m8(dst_ptr, indexed, value, vl) }] in def th_indexed_wrapper_macros: RVVHeader; + + +let HeaderCode = +[{ +// Vector Single-Width Integer Add and Subtract +#define __riscv_vadd_vv_i8m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_i8m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i8m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_i8m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i8m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_i8m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i8m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_i8m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i16m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_i16m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i16m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_i16m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i16m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_i16m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i16m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_i16m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i32m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_i32m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i32m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_i32m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i32m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_i32m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i32m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_i32m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i64m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_i64m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i64m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_i64m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i64m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_i64m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_i64m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_i64m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u8m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_u8m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u8m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_u8m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u8m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_u8m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u8m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_u8m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u16m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_u16m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u16m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_u16m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u16m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_u16m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u16m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_u16m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u32m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_u32m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u32m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_u32m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u32m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_u32m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u32m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_u32m8(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u64m1(op1_v, op2_v, vl) __riscv_th_vadd_vv_u64m1(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u64m2(op1_v, op2_v, vl) __riscv_th_vadd_vv_u64m2(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u64m4(op1_v, op2_v, vl) __riscv_th_vadd_vv_u64m4(op1_v, op2_v, vl) +#define __riscv_vadd_vv_u64m8(op1_v, op2_v, vl) __riscv_th_vadd_vv_u64m8(op1_v, op2_v, vl) + +#define __riscv_vadd_vx_i8m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_i8m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i8m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_i8m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i8m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_i8m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i8m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_i8m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i16m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_i16m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i16m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_i16m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i16m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_i16m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i16m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_i16m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i32m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_i32m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i32m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_i32m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i32m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_i32m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i32m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_i32m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i64m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_i64m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i64m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_i64m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i64m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_i64m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_i64m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_i64m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u8m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_u8m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u8m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_u8m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u8m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_u8m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u8m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_u8m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u16m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_u16m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u16m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_u16m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u16m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_u16m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u16m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_u16m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u32m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_u32m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u32m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_u32m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u32m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_u32m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u32m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_u32m8(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u64m1(op1_v, op2_x, vl) __riscv_th_vadd_vx_u64m1(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u64m2(op1_v, op2_x, vl) __riscv_th_vadd_vx_u64m2(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u64m4(op1_v, op2_x, vl) __riscv_th_vadd_vx_u64m4(op1_v, op2_x, vl) +#define __riscv_vadd_vx_u64m8(op1_v, op2_x, vl) __riscv_th_vadd_vx_u64m8(op1_v, op2_x, vl) + +#define __riscv_vsub_vv_i8m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_i8m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i8m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_i8m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i8m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_i8m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i8m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_i8m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i16m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_i16m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i16m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_i16m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i16m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_i16m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i16m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_i16m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i32m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_i32m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i32m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_i32m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i32m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_i32m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i32m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_i32m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i64m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_i64m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i64m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_i64m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i64m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_i64m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_i64m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_i64m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u8m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_u8m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u8m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_u8m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u8m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_u8m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u8m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_u8m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u16m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_u16m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u16m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_u16m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u16m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_u16m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u16m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_u16m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u32m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_u32m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u32m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_u32m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u32m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_u32m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u32m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_u32m8(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u64m1(op1_v, op2_v, vl) __riscv_th_vsub_vv_u64m1(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u64m2(op1_v, op2_v, vl) __riscv_th_vsub_vv_u64m2(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u64m4(op1_v, op2_v, vl) __riscv_th_vsub_vv_u64m4(op1_v, op2_v, vl) +#define __riscv_vsub_vv_u64m8(op1_v, op2_v, vl) __riscv_th_vsub_vv_u64m8(op1_v, op2_v, vl) + +#define __riscv_vsub_vx_i8m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_i8m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i8m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_i8m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i8m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_i8m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i8m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_i8m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i16m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_i16m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i16m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_i16m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i16m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_i16m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i16m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_i16m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i32m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_i32m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i32m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_i32m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i32m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_i32m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i32m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_i32m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i64m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_i64m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i64m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_i64m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i64m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_i64m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_i64m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_i64m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u8m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_u8m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u8m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_u8m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u8m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_u8m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u8m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_u8m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u16m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_u16m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u16m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_u16m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u16m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_u16m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u16m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_u16m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u32m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_u32m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u32m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_u32m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u32m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_u32m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u32m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_u32m8(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u64m1(op1_v, op2_x, vl) __riscv_th_vsub_vx_u64m1(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u64m2(op1_v, op2_x, vl) __riscv_th_vsub_vx_u64m2(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u64m4(op1_v, op2_x, vl) __riscv_th_vsub_vx_u64m4(op1_v, op2_x, vl) +#define __riscv_vsub_vx_u64m8(op1_v, op2_x, vl) __riscv_th_vsub_vx_u64m8(op1_v, op2_x, vl) + +#define __riscv_vrsub_vx_i8m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i8m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i8m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i8m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i8m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i8m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i8m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i8m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i16m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i16m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i16m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i16m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i16m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i16m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i16m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i16m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i32m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i32m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i32m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i32m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i32m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i32m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i32m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i32m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i64m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i64m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i64m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i64m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i64m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i64m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_i64m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_i64m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u8m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u8m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u8m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u8m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u8m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u8m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u8m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u8m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u16m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u16m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u16m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u16m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u16m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u16m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u16m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u16m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u32m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u32m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u32m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u32m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u32m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u32m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u32m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u32m8(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u64m1(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u64m1(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u64m2(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u64m2(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u64m4(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u64m4(op1_v, op2_x, vl) +#define __riscv_vrsub_vx_u64m8(op1_v, op2_x, vl) __riscv_th_vrsub_vx_u64m8(op1_v, op2_x, vl) + +#define __riscv_vneg_v_i8m1(op1_v, vl) __riscv_th_vneg_v_i8m1(op1_v, vl) +#define __riscv_vneg_v_i8m2(op1_v, vl) __riscv_th_vneg_v_i8m2(op1_v, vl) +#define __riscv_vneg_v_i8m4(op1_v, vl) __riscv_th_vneg_v_i8m4(op1_v, vl) +#define __riscv_vneg_v_i8m8(op1_v, vl) __riscv_th_vneg_v_i8m8(op1_v, vl) +#define __riscv_vneg_v_i16m1(op1_v, vl) __riscv_th_vneg_v_i16m1(op1_v, vl) +#define __riscv_vneg_v_i16m2(op1_v, vl) __riscv_th_vneg_v_i16m2(op1_v, vl) +#define __riscv_vneg_v_i16m4(op1_v, vl) __riscv_th_vneg_v_i16m4(op1_v, vl) +#define __riscv_vneg_v_i16m8(op1_v, vl) __riscv_th_vneg_v_i16m8(op1_v, vl) +#define __riscv_vneg_v_i32m1(op1_v, vl) __riscv_th_vneg_v_i32m1(op1_v, vl) +#define __riscv_vneg_v_i32m2(op1_v, vl) __riscv_th_vneg_v_i32m2(op1_v, vl) +#define __riscv_vneg_v_i32m4(op1_v, vl) __riscv_th_vneg_v_i32m4(op1_v, vl) +#define __riscv_vneg_v_i32m8(op1_v, vl) __riscv_th_vneg_v_i32m8(op1_v, vl) +#define __riscv_vneg_v_i64m1(op1_v, vl) __riscv_th_vneg_v_i64m1(op1_v, vl) +#define __riscv_vneg_v_i64m2(op1_v, vl) __riscv_th_vneg_v_i64m2(op1_v, vl) +#define __riscv_vneg_v_i64m4(op1_v, vl) __riscv_th_vneg_v_i64m4(op1_v, vl) +#define __riscv_vneg_v_i64m8(op1_v, vl) __riscv_th_vneg_v_i64m8(op1_v, vl) +#define __riscv_vneg_v_u8m1(op1_v, vl) __riscv_th_vneg_v_u8m1(op1_v, vl) +#define __riscv_vneg_v_u8m2(op1_v, vl) __riscv_th_vneg_v_u8m2(op1_v, vl) +#define __riscv_vneg_v_u8m4(op1_v, vl) __riscv_th_vneg_v_u8m4(op1_v, vl) +#define __riscv_vneg_v_u8m8(op1_v, vl) __riscv_th_vneg_v_u8m8(op1_v, vl) +#define __riscv_vneg_v_u16m1(op1_v, vl) __riscv_th_vneg_v_u16m1(op1_v, vl) +#define __riscv_vneg_v_u16m2(op1_v, vl) __riscv_th_vneg_v_u16m2(op1_v, vl) +#define __riscv_vneg_v_u16m4(op1_v, vl) __riscv_th_vneg_v_u16m4(op1_v, vl) +#define __riscv_vneg_v_u16m8(op1_v, vl) __riscv_th_vneg_v_u16m8(op1_v, vl) +#define __riscv_vneg_v_u32m1(op1_v, vl) __riscv_th_vneg_v_u32m1(op1_v, vl) +#define __riscv_vneg_v_u32m2(op1_v, vl) __riscv_th_vneg_v_u32m2(op1_v, vl) +#define __riscv_vneg_v_u32m4(op1_v, vl) __riscv_th_vneg_v_u32m4(op1_v, vl) +#define __riscv_vneg_v_u32m8(op1_v, vl) __riscv_th_vneg_v_u32m8(op1_v, vl) +#define __riscv_vneg_v_u64m1(op1_v, vl) __riscv_th_vneg_v_u64m1(op1_v, vl) +#define __riscv_vneg_v_u64m2(op1_v, vl) __riscv_th_vneg_v_u64m2(op1_v, vl) +#define __riscv_vneg_v_u64m4(op1_v, vl) __riscv_th_vneg_v_u64m4(op1_v, vl) +#define __riscv_vneg_v_u64m8(op1_v, vl) __riscv_th_vneg_v_u64m8(op1_v, vl) + +}] in +def th_single_width_integer_add_wrapper_macros: RVVHeader; diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c new file mode 100644 index 000000000000000..10279c7487a7b4a --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vadd.c @@ -0,0 +1,646 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vadd_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vadd_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vadd_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vadd_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vadd_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vadd_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vadd_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vadd_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vadd_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vadd_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vadd_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vadd_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vadd_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vadd_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vadd_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vadd_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vadd_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vadd_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vadd_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vadd_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vadd_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vadd_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vadd_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vadd_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vadd_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vadd_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vadd_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vadd_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vadd_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vadd_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vadd_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vadd_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vadd_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vadd_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vadd_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vadd_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vadd_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vadd_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vadd_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vadd_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vadd_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vadd_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vadd_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vadd_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vadd_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vadd_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vadd_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vadd_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vadd_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vadd_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vadd_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vadd_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vadd_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vadd_vv_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vadd_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vadd_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vadd_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vadd_vv_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vadd_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vadd_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vadd_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vadd_vv_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vadd_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vadd_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vadd_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vadd_vv_u64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vadd_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vadd_vx_u64m8(op1, op2, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c new file mode 100644 index 000000000000000..8f9d36a0c0a6ef8 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vneg.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vneg_v_i8m1(vint8m1_t op1, size_t vl) { + return __riscv_th_vneg_v_i8m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vneg_v_i8m2(vint8m2_t op1, size_t vl) { + return __riscv_th_vneg_v_i8m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vneg_v_i8m4(vint8m4_t op1, size_t vl) { + return __riscv_th_vneg_v_i8m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vneg_v_i8m8(vint8m8_t op1, size_t vl) { + return __riscv_th_vneg_v_i8m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vneg_v_i16m1(vint16m1_t op1, size_t vl) { + return __riscv_th_vneg_v_i16m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vneg_v_i16m2(vint16m2_t op1, size_t vl) { + return __riscv_th_vneg_v_i16m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vneg_v_i16m4(vint16m4_t op1, size_t vl) { + return __riscv_th_vneg_v_i16m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vneg_v_i16m8(vint16m8_t op1, size_t vl) { + return __riscv_th_vneg_v_i16m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vneg_v_i32m1(vint32m1_t op1, size_t vl) { + return __riscv_th_vneg_v_i32m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vneg_v_i32m2(vint32m2_t op1, size_t vl) { + return __riscv_th_vneg_v_i32m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vneg_v_i32m4(vint32m4_t op1, size_t vl) { + return __riscv_th_vneg_v_i32m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vneg_v_i32m8(vint32m8_t op1, size_t vl) { + return __riscv_th_vneg_v_i32m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vneg_v_i64m1(vint64m1_t op1, size_t vl) { + return __riscv_th_vneg_v_i64m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vneg_v_i64m2(vint64m2_t op1, size_t vl) { + return __riscv_th_vneg_v_i64m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vneg_v_i64m4(vint64m4_t op1, size_t vl) { + return __riscv_th_vneg_v_i64m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vneg_v_i64m8(vint64m8_t op1, size_t vl) { + return __riscv_th_vneg_v_i64m8(op1, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c new file mode 100644 index 000000000000000..ac65dcae869a734 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vrsub.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vrsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vrsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vrsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vrsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vrsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vrsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vrsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vrsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vrsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vrsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vrsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vrsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vrsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vrsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vrsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vrsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrsub_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vrsub_vx_u64m8(op1, op2, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c new file mode 100644 index 000000000000000..b2577fec84c2ff6 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/thead/vsub.c @@ -0,0 +1,646 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vsub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vsub_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vsub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vsub_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vsub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vsub_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vsub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vsub_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vsub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vsub_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vsub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vsub_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vsub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vsub_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vsub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vsub_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vsub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vsub_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vsub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vsub_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vsub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vsub_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vsub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vsub_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vsub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vsub_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vsub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vsub_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vsub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vsub_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vsub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vsub_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vsub_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vsub_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vsub_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vsub_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vsub_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vsub_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vsub_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vsub_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vsub_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vsub_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vsub_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vsub_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vsub_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vsub_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vsub_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vsub_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsub_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vsub_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsub_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vsub_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsub_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vsub_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsub_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vsub_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsub_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vsub_vv_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vsub_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsub_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vsub_vv_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vsub_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsub_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vsub_vv_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vsub_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsub_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vsub_vv_u64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsub_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vsub_vx_u64m8(op1, op2, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c new file mode 100644 index 000000000000000..572aa571b4ef6b7 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vadd.c @@ -0,0 +1,646 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_vadd_vv_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_vadd_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_vadd_vv_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_vadd_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_vadd_vv_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_vadd_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_vadd_vv_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_vadd_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_vadd_vv_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_vadd_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_vadd_vv_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_vadd_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_vadd_vv_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_vadd_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_vadd_vv_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_vadd_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_vadd_vv_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_vadd_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_vadd_vv_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_vadd_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_vadd_vv_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_vadd_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_vadd_vv_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_vadd_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_vadd_vv_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_vadd_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_vadd_vv_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_vadd_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_vadd_vv_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_vadd_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_vadd_vv_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_vadd_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vadd_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_vadd_vv_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vadd_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_vadd_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vadd_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_vadd_vv_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vadd_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_vadd_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vadd_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_vadd_vv_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vadd_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_vadd_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vadd_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_vadd_vv_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vadd_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_vadd_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vadd_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_vadd_vv_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vadd_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_vadd_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vadd_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_vadd_vv_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vadd_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_vadd_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vadd_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_vadd_vv_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vadd_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_vadd_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vadd_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_vadd_vv_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vadd_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_vadd_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vadd_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_vadd_vv_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vadd_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_vadd_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vadd_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_vadd_vv_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vadd_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_vadd_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vadd_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_vadd_vv_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vadd_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_vadd_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vadd_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_vadd_vv_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vadd_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_vadd_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vadd_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_vadd_vv_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vadd_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_vadd_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vadd_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_vadd_vv_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vadd_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_vadd_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vadd_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_vadd_vv_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vadd_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_vadd_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vv_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vadd_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_vadd_vv_u64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vadd_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vadd_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_vadd_vx_u64m8(op1, op2, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c new file mode 100644 index 000000000000000..7a8be59ae85b00b --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vneg.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vneg_v_i8m1(vint8m1_t op1, size_t vl) { + return __riscv_vneg_v_i8m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vneg_v_i8m2(vint8m2_t op1, size_t vl) { + return __riscv_vneg_v_i8m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vneg_v_i8m4(vint8m4_t op1, size_t vl) { + return __riscv_vneg_v_i8m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vneg_v_i8m8(vint8m8_t op1, size_t vl) { + return __riscv_vneg_v_i8m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vneg_v_i16m1(vint16m1_t op1, size_t vl) { + return __riscv_vneg_v_i16m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vneg_v_i16m2(vint16m2_t op1, size_t vl) { + return __riscv_vneg_v_i16m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vneg_v_i16m4(vint16m4_t op1, size_t vl) { + return __riscv_vneg_v_i16m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vneg_v_i16m8(vint16m8_t op1, size_t vl) { + return __riscv_vneg_v_i16m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vneg_v_i32m1(vint32m1_t op1, size_t vl) { + return __riscv_vneg_v_i32m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vneg_v_i32m2(vint32m2_t op1, size_t vl) { + return __riscv_vneg_v_i32m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vneg_v_i32m4(vint32m4_t op1, size_t vl) { + return __riscv_vneg_v_i32m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vneg_v_i32m8(vint32m8_t op1, size_t vl) { + return __riscv_vneg_v_i32m8(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vneg_v_i64m1(vint64m1_t op1, size_t vl) { + return __riscv_vneg_v_i64m1(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vneg_v_i64m2(vint64m2_t op1, size_t vl) { + return __riscv_vneg_v_i64m2(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vneg_v_i64m4(vint64m4_t op1, size_t vl) { + return __riscv_vneg_v_i64m4(op1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vneg_v_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vneg_v_i64m8(vint64m8_t op1, size_t vl) { + return __riscv_vneg_v_i64m8(op1, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c new file mode 100644 index 000000000000000..c4afe312ba9a116 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vrsub.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vrsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_vrsub_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vrsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_vrsub_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vrsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_vrsub_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vrsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_vrsub_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vrsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_vrsub_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vrsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_vrsub_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vrsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_vrsub_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vrsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_vrsub_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vrsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_vrsub_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vrsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_vrsub_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vrsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_vrsub_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vrsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_vrsub_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vrsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_vrsub_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vrsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_vrsub_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vrsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_vrsub_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vrsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_vrsub_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_vrsub_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_vrsub_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_vrsub_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_vrsub_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_vrsub_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_vrsub_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_vrsub_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_vrsub_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_vrsub_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_vrsub_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_vrsub_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_vrsub_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_vrsub_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_vrsub_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_vrsub_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrsub_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vrsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrsub_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_vrsub_vx_u64m8(op1, op2, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c new file mode 100644 index 000000000000000..ac55e5f88b354d8 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-single-width-add/wrappers/vsub.c @@ -0,0 +1,646 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vsub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_vsub_vv_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vsub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_vsub_vx_i8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vsub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_vsub_vv_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vsub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_vsub_vx_i8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vsub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_vsub_vv_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vsub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_vsub_vx_i8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vsub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_vsub_vv_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vsub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_vsub_vx_i8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vsub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_vsub_vv_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vsub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_vsub_vx_i16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vsub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_vsub_vv_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vsub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_vsub_vx_i16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vsub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_vsub_vv_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vsub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_vsub_vx_i16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vsub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_vsub_vv_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vsub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_vsub_vx_i16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vsub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_vsub_vv_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vsub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_vsub_vx_i32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vsub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_vsub_vv_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vsub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_vsub_vx_i32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vsub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_vsub_vv_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vsub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_vsub_vx_i32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vsub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_vsub_vv_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vsub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_vsub_vx_i32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vsub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_vsub_vv_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vsub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_vsub_vx_i64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vsub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_vsub_vv_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vsub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_vsub_vx_i64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vsub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_vsub_vv_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vsub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_vsub_vx_i64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vsub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_vsub_vv_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vsub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_vsub_vx_i64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vsub_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_vsub_vv_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vsub_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_vsub_vx_u8m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vsub_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_vsub_vv_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vsub_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_vsub_vx_u8m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vsub_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_vsub_vv_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vsub_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_vsub_vx_u8m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vsub_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_vsub_vv_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vsub_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_vsub_vx_u8m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vsub_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_vsub_vv_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vsub_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_vsub_vx_u16m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vsub_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_vsub_vv_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vsub_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_vsub_vx_u16m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vsub_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_vsub_vv_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vsub_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_vsub_vx_u16m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vsub_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_vsub_vv_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vsub_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_vsub_vx_u16m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsub_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_vsub_vv_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsub_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_vsub_vx_u32m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsub_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_vsub_vv_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsub_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_vsub_vx_u32m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsub_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_vsub_vv_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsub_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_vsub_vx_u32m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsub_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_vsub_vv_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsub_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_vsub_vx_u32m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsub_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_vsub_vv_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsub_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_vsub_vx_u64m1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsub_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_vsub_vv_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsub_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_vsub_vx_u64m2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsub_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_vsub_vv_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsub_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_vsub_vx_u64m4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vv_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsub_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_vsub_vv_u64m8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsub_vx_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vsub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsub_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_vsub_vx_u64m8(op1, op2, vl); +}