; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+d,+experimental-zvlsseg,+experimental-zfh \ ; RUN: -verify-machineinstrs < %s | FileCheck %s declare {,} @llvm.riscv.vlsseg2.nxv16i16(i16*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv16i16(,, i16*, i64, , i64) define @test_vlsseg2_nxv16i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv16i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv16i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e16,m4,tu,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv16i16( %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4i32(i32*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4i32(,, i32*, i64, , i64) define @test_vlsseg2_nxv4i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4i32( %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv4i32(i32*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv4i32(,,, i32*, i64, , i64) define @test_vlsseg3_nxv4i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv4i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv4i32( %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv4i32(i32*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv4i32(,,,, i32*, i64, , i64) define @test_vlsseg4_nxv4i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv4i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv4i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv4i32( %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv16i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv16i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv16i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv16i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e8,m2,tu,mu ; CHECK-NEXT: vlsseg2e8.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv16i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv16i8(i8*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv16i8(,,, i8*, i64, , i64) define @test_vlsseg3_nxv16i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv16i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e8,m2,tu,mu ; CHECK-NEXT: vlsseg3e8.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv16i8( %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv16i8(i8*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv16i8(,,,, i8*, i64, , i64) define @test_vlsseg4_nxv16i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv16i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv16i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m2,ta,mu ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e8,m2,tu,mu ; CHECK-NEXT: vlsseg4e8.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv16i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv16i8( %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1i64(i64*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1i64(,, i64*, i64, , i64) define @test_vlsseg2_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1i64( %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1i64(i64*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1i64(,,, i64*, i64, , i64) define @test_vlsseg3_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1i64( %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1i64(i64*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1i64(,,,, i64*, i64, , i64) define @test_vlsseg4_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1i64( %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1i64(i64*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i64(,,,,, i64*, i64, , i64) define @test_vlsseg5_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i64( %1, %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1i64(i64*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i64(,,,,,, i64*, i64, , i64) define @test_vlsseg6_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i64( %1, %1, %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1i64(i64*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i64(,,,,,,, i64*, i64, , i64) define @test_vlsseg7_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i64( %1, %1, %1, %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i64(i64*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i64(,,,,,,,, i64*, i64, , i64) define @test_vlsseg8_nxv1i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i64( %1, %1, %1, %1, %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1i32(i32*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1i32(,, i32*, i64, , i64) define @test_vlsseg2_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1i32( %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1i32(i32*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1i32(,,, i32*, i64, , i64) define @test_vlsseg3_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1i32( %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1i32(i32*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1i32(,,,, i32*, i64, , i64) define @test_vlsseg4_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1i32( %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1i32(i32*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i32(,,,,, i32*, i64, , i64) define @test_vlsseg5_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i32( %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1i32(i32*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i32(,,,,,, i32*, i64, , i64) define @test_vlsseg6_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i32( %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32(i32*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i32(,,,,,,, i32*, i64, , i64) define @test_vlsseg7_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i32( %1, %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32(i32*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i32(,,,,,,,, i32*, i64, , i64) define @test_vlsseg8_nxv1i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i32( %1, %1, %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv8i16(i16*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv8i16(,, i16*, i64, , i64) define @test_vlsseg2_nxv8i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv8i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv8i16( %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv8i16(i16*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv8i16(,,, i16*, i64, , i64) define @test_vlsseg3_nxv8i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv8i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv8i16( %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv8i16(i16*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv8i16(,,,, i16*, i64, , i64) define @test_vlsseg4_nxv8i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv8i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv8i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv8i16( %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv4i8(i8*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv4i8(,,, i8*, i64, , i64) define @test_vlsseg3_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv4i8( %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv4i8(i8*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv4i8(,,,, i8*, i64, , i64) define @test_vlsseg4_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv4i8( %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv4i8(i8*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv4i8(,,,,, i8*, i64, , i64) define @test_vlsseg5_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv4i8( %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv4i8(i8*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4i8(,,,,,, i8*, i64, , i64) define @test_vlsseg6_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4i8( %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8(i8*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4i8(,,,,,,, i8*, i64, , i64) define @test_vlsseg7_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4i8( %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8(i8*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4i8(,,,,,,,, i8*, i64, , i64) define @test_vlsseg8_nxv4i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv4i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv4i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf2,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf2,tu,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4i8( %1, %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1i16(i16*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1i16(,, i16*, i64, , i64) define @test_vlsseg2_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1i16( %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1i16(i16*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1i16(,,, i16*, i64, , i64) define @test_vlsseg3_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1i16( %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1i16(i16*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1i16(,,,, i16*, i64, , i64) define @test_vlsseg4_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1i16( %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1i16(i16*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i16(,,,,, i16*, i64, , i64) define @test_vlsseg5_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i16( %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1i16(i16*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i16(,,,,,, i16*, i64, , i64) define @test_vlsseg6_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i16( %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16(i16*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i16(,,,,,,, i16*, i64, , i64) define @test_vlsseg7_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i16( %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16(i16*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i16(,,,,,,,, i16*, i64, , i64) define @test_vlsseg8_nxv1i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i16( %1, %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2i32(i32*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2i32(,, i32*, i64, , i64) define @test_vlsseg2_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2i32( %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2i32(i32*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2i32(,,, i32*, i64, , i64) define @test_vlsseg3_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2i32( %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2i32(i32*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2i32(,,,, i32*, i64, , i64) define @test_vlsseg4_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2i32( %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv2i32(i32*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i32(,,,,, i32*, i64, , i64) define @test_vlsseg5_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i32( %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv2i32(i32*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i32(,,,,,, i32*, i64, , i64) define @test_vlsseg6_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i32( %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32(i32*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i32(,,,,,,, i32*, i64, , i64) define @test_vlsseg7_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i32( %1, %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32(i32*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i32(,,,,,,,, i32*, i64, , i64) define @test_vlsseg8_nxv2i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv2i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv2i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i32( %1, %1, %1, %1, %1, %1, %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv8i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv8i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv8i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv8i8(i8*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv8i8(,,, i8*, i64, , i64) define @test_vlsseg3_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv8i8( %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv8i8(i8*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv8i8(,,,, i8*, i64, , i64) define @test_vlsseg4_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv8i8( %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv8i8(i8*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv8i8(,,,,, i8*, i64, , i64) define @test_vlsseg5_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv8i8( %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv8i8(i8*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv8i8(,,,,,, i8*, i64, , i64) define @test_vlsseg6_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv8i8( %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8(i8*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv8i8(,,,,,,, i8*, i64, , i64) define @test_vlsseg7_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv8i8( %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8(i8*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv8i8(,,,,,,,, i8*, i64, , i64) define @test_vlsseg8_nxv8i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv8i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv8i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m1,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,m1,tu,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv8i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv8i8( %1, %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4i64(i64*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4i64(,, i64*, i64, , i64) define @test_vlsseg2_nxv4i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m4,ta,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m4,ta,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e64,m4,tu,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4i64( %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4i16(i16*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4i16(,, i16*, i64, , i64) define @test_vlsseg2_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4i16( %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv4i16(i16*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv4i16(,,, i16*, i64, , i64) define @test_vlsseg3_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv4i16( %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv4i16(i16*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv4i16(,,,, i16*, i64, , i64) define @test_vlsseg4_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv4i16( %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv4i16(i16*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv4i16(,,,,, i16*, i64, , i64) define @test_vlsseg5_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv4i16( %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv4i16(i16*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4i16(,,,,,, i16*, i64, , i64) define @test_vlsseg6_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4i16( %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16(i16*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4i16(,,,,,,, i16*, i64, , i64) define @test_vlsseg7_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4i16( %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16(i16*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4i16(,,,,,,,, i16*, i64, , i64) define @test_vlsseg8_nxv4i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv4i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv4i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4i16( %1, %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1i8(i8*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1i8(,,, i8*, i64, , i64) define @test_vlsseg3_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1i8( %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1i8(i8*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1i8(,,,, i8*, i64, , i64) define @test_vlsseg4_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1i8( %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1i8(i8*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i8(,,,,, i8*, i64, , i64) define @test_vlsseg5_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1i8( %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1i8(i8*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i8(,,,,,, i8*, i64, , i64) define @test_vlsseg6_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1i8( %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8(i8*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i8(,,,,,,, i8*, i64, , i64) define @test_vlsseg7_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1i8( %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8(i8*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i8(,,,,,,,, i8*, i64, , i64) define @test_vlsseg8_nxv1i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf8,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf8,tu,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1i8( %1, %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg2e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2i8(i8*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2i8(,,, i8*, i64, , i64) define @test_vlsseg3_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg3e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2i8( %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2i8(i8*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2i8(,,,, i8*, i64, , i64) define @test_vlsseg4_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg4e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2i8( %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv2i8(i8*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i8(,,,,, i8*, i64, , i64) define @test_vlsseg5_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg5e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i8( %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv2i8(i8*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i8(,,,,,, i8*, i64, , i64) define @test_vlsseg6_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg6e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i8( %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8(i8*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i8(,,,,,,, i8*, i64, , i64) define @test_vlsseg7_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg7e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i8( %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8(i8*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i8(,,,,,,,, i8*, i64, , i64) define @test_vlsseg8_nxv2i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv2i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv2i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,mf4,ta,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e8,mf4,tu,mu ; CHECK-NEXT: vlsseg8e8.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i8( %1, %1, %1, %1, %1, %1, %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv8i32(i32*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv8i32(,, i32*, i64, , i64) define @test_vlsseg2_nxv8i32(i32* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m4,ta,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv8i32(i32* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv8i32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m4,ta,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e32,m4,tu,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8i32(i32* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv8i32( %1, %1, i32* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv32i8(i8*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv32i8(,, i8*, i64, , i64) define @test_vlsseg2_nxv32i8(i8* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv32i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e8,m4,ta,mu ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv32i8(i8* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv32i8: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e8,m4,ta,mu ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e8,m4,tu,mu ; CHECK-NEXT: vlsseg2e8.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv32i8(i8* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv32i8( %1, %1, i8* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2i16(i16*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2i16(,, i16*, i64, , i64) define @test_vlsseg2_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2i16( %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2i16(i16*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2i16(,,, i16*, i64, , i64) define @test_vlsseg3_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2i16( %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2i16(i16*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2i16(,,,, i16*, i64, , i64) define @test_vlsseg4_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2i16( %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv2i16(i16*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i16(,,,,, i16*, i64, , i64) define @test_vlsseg5_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv2i16( %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv2i16(i16*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i16(,,,,,, i16*, i64, , i64) define @test_vlsseg6_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2i16( %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16(i16*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i16(,,,,,,, i16*, i64, , i64) define @test_vlsseg7_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2i16( %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16(i16*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i16(,,,,,,,, i16*, i64, , i64) define @test_vlsseg8_nxv2i16(i16* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv2i16(i16* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv2i16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2i16(i16* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2i16( %1, %1, %1, %1, %1, %1, %1, %1, i16* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2i64(i64*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2i64(,, i64*, i64, , i64) define @test_vlsseg2_nxv2i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2i64( %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2i64(i64*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2i64(,,, i64*, i64, , i64) define @test_vlsseg3_nxv2i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2i64( %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2i64(i64*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2i64(,,,, i64*, i64, , i64) define @test_vlsseg4_nxv2i64(i64* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2i64(i64* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2i64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2i64(i64* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2i64( %1, %1, %1, %1, i64* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv16f16(half*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv16f16(,, half*, i64, , i64) define @test_vlsseg2_nxv16f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv16f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv16f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv16f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e16,m4,tu,mu ; CHECK-NEXT: vlsseg2e16.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv16f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv16f16( %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4f64(double*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4f64(,, double*, i64, , i64) define @test_vlsseg2_nxv4f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m4,ta,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m4,ta,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e64,m4,tu,mu ; CHECK-NEXT: vlsseg2e64.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4f64( %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1f64(double*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1f64(,, double*, i64, , i64) define @test_vlsseg2_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg2e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1f64( %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1f64(double*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1f64(,,, double*, i64, , i64) define @test_vlsseg3_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg3e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1f64( %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1f64(double*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1f64(,,,, double*, i64, , i64) define @test_vlsseg4_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg4e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1f64( %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1f64(double*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f64(,,,,, double*, i64, , i64) define @test_vlsseg5_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg5e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f64( %1, %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1f64(double*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f64(,,,,,, double*, i64, , i64) define @test_vlsseg6_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg6e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f64( %1, %1, %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64(double*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f64(,,,,,,, double*, i64, , i64) define @test_vlsseg7_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg7e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f64( %1, %1, %1, %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64(double*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f64(,,,,,,,, double*, i64, , i64) define @test_vlsseg8_nxv1f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m1,ta,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e64,m1,tu,mu ; CHECK-NEXT: vlsseg8e64.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f64( %1, %1, %1, %1, %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2f32(float*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2f32(,, float*, i64, , i64) define @test_vlsseg2_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2f32( %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2f32(float*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2f32(,,, float*, i64, , i64) define @test_vlsseg3_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2f32( %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2f32(float*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2f32(,,,, float*, i64, , i64) define @test_vlsseg4_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2f32( %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv2f32(float*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv2f32(,,,,, float*, i64, , i64) define @test_vlsseg5_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv2f32( %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv2f32(float*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2f32(,,,,,, float*, i64, , i64) define @test_vlsseg6_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2f32( %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32(float*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2f32(,,,,,,, float*, i64, , i64) define @test_vlsseg7_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2f32( %1, %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32(float*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2f32(,,,,,,,, float*, i64, , i64) define @test_vlsseg8_nxv2f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv2f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv2f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m1,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,m1,tu,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2f32( %1, %1, %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1f16(half*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1f16(,, half*, i64, , i64) define @test_vlsseg2_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1f16( %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1f16(half*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1f16(,,, half*, i64, , i64) define @test_vlsseg3_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1f16( %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1f16(half*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1f16(,,,, half*, i64, , i64) define @test_vlsseg4_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1f16( %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1f16(half*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f16(,,,,, half*, i64, , i64) define @test_vlsseg5_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f16( %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1f16(half*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f16(,,,,,, half*, i64, , i64) define @test_vlsseg6_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f16( %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16(half*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f16(,,,,,,, half*, i64, , i64) define @test_vlsseg7_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f16( %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16(half*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f16(,,,,,,,, half*, i64, , i64) define @test_vlsseg8_nxv1f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf4,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf4,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f16( %1, %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv1f32(float*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv1f32(,, float*, i64, , i64) define @test_vlsseg2_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg2e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv1f32( %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv1f32(float*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv1f32(,,, float*, i64, , i64) define @test_vlsseg3_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg3e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv1f32( %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv1f32(float*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv1f32(,,,, float*, i64, , i64) define @test_vlsseg4_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg4e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv1f32( %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv1f32(float*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f32(,,,,, float*, i64, , i64) define @test_vlsseg5_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg5e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv1f32( %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv1f32(float*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f32(,,,,,, float*, i64, , i64) define @test_vlsseg6_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg6e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv1f32( %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32(float*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f32(,,,,,,, float*, i64, , i64) define @test_vlsseg7_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg7e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv1f32( %1, %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32(float*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f32(,,,,,,,, float*, i64, , i64) define @test_vlsseg8_nxv1f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv1f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv1f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,mf2,ta,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e32,mf2,tu,mu ; CHECK-NEXT: vlsseg8e32.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv1f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv1f32( %1, %1, %1, %1, %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv8f16(half*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv8f16(,, half*, i64, , i64) define @test_vlsseg2_nxv8f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv8f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg2e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv8f16( %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv8f16(half*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv8f16(,,, half*, i64, , i64) define @test_vlsseg3_nxv8f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv8f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg3e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv8f16( %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv8f16(half*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv8f16(,,,, half*, i64, , i64) define @test_vlsseg4_nxv8f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv8f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv8f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e16,m2,tu,mu ; CHECK-NEXT: vlsseg4e16.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv8f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv8f16( %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv8f32(float*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv8f32(,, float*, i64, , i64) define @test_vlsseg2_nxv8f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv8f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m4,ta,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv8f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv8f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m4,ta,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1 ; CHECK-NEXT: vmv4r.v v8, v4 ; CHECK-NEXT: vsetvli a2, a2, e32,m4,tu,mu ; CHECK-NEXT: vlsseg2e32.v v4, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v4m4_v8m4 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv8f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv8f32( %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2f64(double*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2f64(,, double*, i64, , i64) define @test_vlsseg2_nxv2f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg2e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2f64( %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2f64(double*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2f64(,,, double*, i64, , i64) define @test_vlsseg3_nxv2f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg3e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2f64( %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2f64(double*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2f64(,,,, double*, i64, , i64) define @test_vlsseg4_nxv2f64(double* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2f64(double* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2f64: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e64,m2,ta,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e64,m2,tu,mu ; CHECK-NEXT: vlsseg4e64.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f64(double* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2f64( %1, %1, %1, %1, double* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4f16(half*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4f16(,, half*, i64, , i64) define @test_vlsseg2_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4f16( %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv4f16(half*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv4f16(,,, half*, i64, , i64) define @test_vlsseg3_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv4f16( %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv4f16(half*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv4f16(,,,, half*, i64, , i64) define @test_vlsseg4_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv4f16( %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv4f16(half*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv4f16(,,,,, half*, i64, , i64) define @test_vlsseg5_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv4f16( %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv4f16(half*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4f16(,,,,,, half*, i64, , i64) define @test_vlsseg6_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv4f16( %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16(half*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4f16(,,,,,,, half*, i64, , i64) define @test_vlsseg7_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv4f16( %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16(half*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4f16(,,,,,,,, half*, i64, , i64) define @test_vlsseg8_nxv4f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv4f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv4f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,m1,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,m1,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv4f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv4f16( %1, %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv2f16(half*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv2f16(,, half*, i64, , i64) define @test_vlsseg2_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg2e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv2f16( %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv2f16(half*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv2f16(,,, half*, i64, , i64) define @test_vlsseg3_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg3e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv2f16( %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv2f16(half*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv2f16(,,,, half*, i64, , i64) define @test_vlsseg4_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg4e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv2f16( %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 } declare {,,,,} @llvm.riscv.vlsseg5.nxv2f16(half*, i64, i64) declare {,,,,} @llvm.riscv.vlsseg5.mask.nxv2f16(,,,,, half*, i64, , i64) define @test_vlsseg5_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg5_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 1 ret %1 } define @test_vlsseg5_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg5_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg5e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,} @llvm.riscv.vlsseg5.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,} %0, 0 %2 = tail call {,,,,} @llvm.riscv.vlsseg5.mask.nxv2f16( %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,} %2, 1 ret %3 } declare {,,,,,} @llvm.riscv.vlsseg6.nxv2f16(half*, i64, i64) declare {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2f16(,,,,,, half*, i64, , i64) define @test_vlsseg6_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg6_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 1 ret %1 } define @test_vlsseg6_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg6_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg6e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,} @llvm.riscv.vlsseg6.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,} %0, 0 %2 = tail call {,,,,,} @llvm.riscv.vlsseg6.mask.nxv2f16( %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,} %2, 1 ret %3 } declare {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16(half*, i64, i64) declare {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2f16(,,,,,,, half*, i64, , i64) define @test_vlsseg7_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg7_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 1 ret %1 } define @test_vlsseg7_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg7_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg7e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,} @llvm.riscv.vlsseg7.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,} %0, 0 %2 = tail call {,,,,,,} @llvm.riscv.vlsseg7.mask.nxv2f16( %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,} %2, 1 ret %3 } declare {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16(half*, i64, i64) declare {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2f16(,,,,,,,, half*, i64, , i64) define @test_vlsseg8_nxv2f16(half* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg8_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 1 ret %1 } define @test_vlsseg8_mask_nxv2f16(half* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg8_mask_nxv2f16: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e16,mf2,ta,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1 ; CHECK-NEXT: vmv1r.v v8, v7 ; CHECK-NEXT: vmv1r.v v9, v7 ; CHECK-NEXT: vmv1r.v v10, v7 ; CHECK-NEXT: vmv1r.v v11, v7 ; CHECK-NEXT: vmv1r.v v12, v7 ; CHECK-NEXT: vmv1r.v v13, v7 ; CHECK-NEXT: vmv1r.v v14, v7 ; CHECK-NEXT: vsetvli a2, a2, e16,mf2,tu,mu ; CHECK-NEXT: vlsseg8e16.v v7, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v7_v8_v9_v10_v11_v12_v13_v14 ; CHECK-NEXT: ret entry: %0 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.nxv2f16(half* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,,,,,} %0, 0 %2 = tail call {,,,,,,,} @llvm.riscv.vlsseg8.mask.nxv2f16( %1, %1, %1, %1, %1, %1, %1, %1, half* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,,,,,} %2, 1 ret %3 } declare {,} @llvm.riscv.vlsseg2.nxv4f32(float*, i64, i64) declare {,} @llvm.riscv.vlsseg2.mask.nxv4f32(,, float*, i64, , i64) define @test_vlsseg2_nxv4f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg2_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 1 ret %1 } define @test_vlsseg2_mask_nxv4f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg2_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg2e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2 ; CHECK-NEXT: ret entry: %0 = tail call {,} @llvm.riscv.vlsseg2.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,} %0, 0 %2 = tail call {,} @llvm.riscv.vlsseg2.mask.nxv4f32( %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,} %2, 1 ret %3 } declare {,,} @llvm.riscv.vlsseg3.nxv4f32(float*, i64, i64) declare {,,} @llvm.riscv.vlsseg3.mask.nxv4f32(,,, float*, i64, , i64) define @test_vlsseg3_nxv4f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg3_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 1 ret %1 } define @test_vlsseg3_mask_nxv4f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg3_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg3e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,} @llvm.riscv.vlsseg3.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,} %0, 0 %2 = tail call {,,} @llvm.riscv.vlsseg3.mask.nxv4f32( %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,} %2, 1 ret %3 } declare {,,,} @llvm.riscv.vlsseg4.nxv4f32(float*, i64, i64) declare {,,,} @llvm.riscv.vlsseg4.mask.nxv4f32(,,,, float*, i64, , i64) define @test_vlsseg4_nxv4f32(float* %base, i64 %offset, i64 %vl) { ; CHECK-LABEL: test_vlsseg4_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a2, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 1 ret %1 } define @test_vlsseg4_mask_nxv4f32(float* %base, i64 %offset, i64 %vl, %mask) { ; CHECK-LABEL: test_vlsseg4_mask_nxv4f32: ; CHECK: # %bb.0: # %entry ; CHECK-NEXT: vsetvli a3, a2, e32,m2,ta,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1 ; CHECK-NEXT: vmv2r.v v8, v6 ; CHECK-NEXT: vmv2r.v v10, v6 ; CHECK-NEXT: vmv2r.v v12, v6 ; CHECK-NEXT: vsetvli a2, a2, e32,m2,tu,mu ; CHECK-NEXT: vlsseg4e32.v v6, (a0), a1, v0.t ; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v6m2_v8m2_v10m2_v12m2 ; CHECK-NEXT: ret entry: %0 = tail call {,,,} @llvm.riscv.vlsseg4.nxv4f32(float* %base, i64 %offset, i64 %vl) %1 = extractvalue {,,,} %0, 0 %2 = tail call {,,,} @llvm.riscv.vlsseg4.mask.nxv4f32( %1, %1, %1, %1, float* %base, i64 %offset, %mask, i64 %vl) %3 = extractvalue {,,,} %2, 1 ret %3 }