; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2 < %s 2>%t | FileCheck %s ; RUN: FileCheck --check-prefix=WARN --allow-empty %s <%t ; If this check fails please read test/CodeGen/AArch64/README for instructions on how to resolve it. ; WARN-NOT: warning ; ; ADDP ; define @addp_i8( %pg, %a, %b) { ; CHECK-LABEL: addp_i8: ; CHECK: addp z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.addp.nxv16i8( %pg, %a, %b) ret %out } define @addp_i16( %pg, %a, %b) { ; CHECK-LABEL: addp_i16: ; CHECK: addp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.addp.nxv8i16( %pg, %a, %b) ret %out } define @addp_i32( %pg, %a, %b) { ; CHECK-LABEL: addp_i32: ; CHECK: addp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.addp.nxv4i32( %pg, %a, %b) ret %out } define @addp_i64( %pg, %a, %b) { ; CHECK-LABEL: addp_i64: ; CHECK: addp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.addp.nxv2i64( %pg, %a, %b) ret %out } ; ; FADDP ; define @faddp_f16( %pg, %a, %b) { ; CHECK-LABEL: faddp_f16: ; CHECK: faddp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.faddp.nxv8f16( %pg, %a, %b) ret %out } define @faddp_f32( %pg, %a, %b) { ; CHECK-LABEL: faddp_f32: ; CHECK: faddp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.faddp.nxv4f32( %pg, %a, %b) ret %out } define @faddp_f64( %pg, %a, %b) { ; CHECK-LABEL: faddp_f64: ; CHECK: faddp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.faddp.nxv2f64( %pg, %a, %b) ret %out } ; ; FMAXP ; define @fmaxp_f16( %pg, %a, %b) { ; CHECK-LABEL: fmaxp_f16: ; CHECK: fmaxp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxp.nxv8f16( %pg, %a, %b) ret %out } define @fmaxp_f32( %pg, %a, %b) { ; CHECK-LABEL: fmaxp_f32: ; CHECK: fmaxp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxp.nxv4f32( %pg, %a, %b) ret %out } define @fmaxp_f64( %pg, %a, %b) { ; CHECK-LABEL: fmaxp_f64: ; CHECK: fmaxp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxp.nxv2f64( %pg, %a, %b) ret %out } ; ; FMAXNMP ; define @fmaxnmp_f16( %pg, %a, %b) { ; CHECK-LABEL: fmaxnmp_f16: ; CHECK: fmaxnmp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxnmp.nxv8f16( %pg, %a, %b) ret %out } define @fmaxnmp_f32( %pg, %a, %b) { ; CHECK-LABEL: fmaxnmp_f32: ; CHECK: fmaxnmp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxnmp.nxv4f32( %pg, %a, %b) ret %out } define @fmaxnmp_f64( %pg, %a, %b) { ; CHECK-LABEL: fmaxnmp_f64: ; CHECK: fmaxnmp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fmaxnmp.nxv2f64( %pg, %a, %b) ret %out } ; ; FMINP ; define @fminp_f16( %pg, %a, %b) { ; CHECK-LABEL: fminp_f16: ; CHECK: fminp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminp.nxv8f16( %pg, %a, %b) ret %out } define @fminp_f32( %pg, %a, %b) { ; CHECK-LABEL: fminp_f32: ; CHECK: fminp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminp.nxv4f32( %pg, %a, %b) ret %out } define @fminp_f64( %pg, %a, %b) { ; CHECK-LABEL: fminp_f64: ; CHECK: fminp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminp.nxv2f64( %pg, %a, %b) ret %out } ; ; FMINNMP ; define @fminnmp_f16( %pg, %a, %b) { ; CHECK-LABEL: fminnmp_f16: ; CHECK: fminnmp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminnmp.nxv8f16( %pg, %a, %b) ret %out } define @fminnmp_f32( %pg, %a, %b) { ; CHECK-LABEL: fminnmp_f32: ; CHECK: fminnmp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminnmp.nxv4f32( %pg, %a, %b) ret %out } define @fminnmp_f64( %pg, %a, %b) { ; CHECK-LABEL: fminnmp_f64: ; CHECK: fminnmp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.fminnmp.nxv2f64( %pg, %a, %b) ret %out } ; ; SMAXP ; define @smaxp_i8( %pg, %a, %b) { ; CHECK-LABEL: smaxp_i8: ; CHECK: smaxp z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.smaxp.nxv16i8( %pg, %a, %b) ret %out } define @smaxp_i16( %pg, %a, %b) { ; CHECK-LABEL: smaxp_i16: ; CHECK: smaxp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.smaxp.nxv8i16( %pg, %a, %b) ret %out } define @smaxp_i32( %pg, %a, %b) { ; CHECK-LABEL: smaxp_i32: ; CHECK: smaxp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.smaxp.nxv4i32( %pg, %a, %b) ret %out } define @smaxp_i64( %pg, %a, %b) { ; CHECK-LABEL: smaxp_i64: ; CHECK: smaxp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.smaxp.nxv2i64( %pg, %a, %b) ret %out } ; ; SMINP ; define @sminp_i8( %pg, %a, %b) { ; CHECK-LABEL: sminp_i8: ; CHECK: sminp z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sminp.nxv16i8( %pg, %a, %b) ret %out } define @sminp_i16( %pg, %a, %b) { ; CHECK-LABEL: sminp_i16: ; CHECK: sminp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sminp.nxv8i16( %pg, %a, %b) ret %out } define @sminp_i32( %pg, %a, %b) { ; CHECK-LABEL: sminp_i32: ; CHECK: sminp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sminp.nxv4i32( %pg, %a, %b) ret %out } define @sminp_i64( %pg, %a, %b) { ; CHECK-LABEL: sminp_i64: ; CHECK: sminp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sminp.nxv2i64( %pg, %a, %b) ret %out } ; ; UMINP ; define @uminp_i8( %pg, %a, %b) { ; CHECK-LABEL: uminp_i8: ; CHECK: uminp z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uminp.nxv16i8( %pg, %a, %b) ret %out } define @uminp_i16( %pg, %a, %b) { ; CHECK-LABEL: uminp_i16: ; CHECK: uminp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uminp.nxv8i16( %pg, %a, %b) ret %out } define @uminp_i32( %pg, %a, %b) { ; CHECK-LABEL: uminp_i32: ; CHECK: uminp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uminp.nxv4i32( %pg, %a, %b) ret %out } define @uminp_i64( %pg, %a, %b) { ; CHECK-LABEL: uminp_i64: ; CHECK: uminp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uminp.nxv2i64( %pg, %a, %b) ret %out } ; ; UMAXP ; define @umaxp_i8( %pg, %a, %b) { ; CHECK-LABEL: umaxp_i8: ; CHECK: umaxp z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.umaxp.nxv16i8( %pg, %a, %b) ret %out } define @umaxp_i16( %pg, %a, %b) { ; CHECK-LABEL: umaxp_i16: ; CHECK: umaxp z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.umaxp.nxv8i16( %pg, %a, %b) ret %out } define @umaxp_i32( %pg, %a, %b) { ; CHECK-LABEL: umaxp_i32: ; CHECK: umaxp z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.umaxp.nxv4i32( %pg, %a, %b) ret %out } define @umaxp_i64( %pg, %a, %b) { ; CHECK-LABEL: umaxp_i64: ; CHECK: umaxp z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.umaxp.nxv2i64( %pg, %a, %b) ret %out } declare @llvm.aarch64.sve.addp.nxv16i8(, , ) declare @llvm.aarch64.sve.addp.nxv8i16(, , ) declare @llvm.aarch64.sve.addp.nxv4i32(, , ) declare @llvm.aarch64.sve.addp.nxv2i64(, , ) declare @llvm.aarch64.sve.faddp.nxv8f16(, , ) declare @llvm.aarch64.sve.faddp.nxv4f32(, , ) declare @llvm.aarch64.sve.faddp.nxv2f64(, , ) declare @llvm.aarch64.sve.fmaxp.nxv8f16(, , ) declare @llvm.aarch64.sve.fmaxp.nxv4f32(, , ) declare @llvm.aarch64.sve.fmaxp.nxv2f64(, , ) declare @llvm.aarch64.sve.fmaxnmp.nxv8f16(, , ) declare @llvm.aarch64.sve.fmaxnmp.nxv4f32(, , ) declare @llvm.aarch64.sve.fmaxnmp.nxv2f64(, , ) declare @llvm.aarch64.sve.fminp.nxv8f16(, , ) declare @llvm.aarch64.sve.fminp.nxv4f32(, , ) declare @llvm.aarch64.sve.fminp.nxv2f64(, , ) declare @llvm.aarch64.sve.fminnmp.nxv8f16(, , ) declare @llvm.aarch64.sve.fminnmp.nxv4f32(, , ) declare @llvm.aarch64.sve.fminnmp.nxv2f64(, , ) declare @llvm.aarch64.sve.smaxp.nxv16i8(, , ) declare @llvm.aarch64.sve.smaxp.nxv8i16(, , ) declare @llvm.aarch64.sve.smaxp.nxv4i32(, , ) declare @llvm.aarch64.sve.smaxp.nxv2i64(, , ) declare @llvm.aarch64.sve.sminp.nxv16i8(, , ) declare @llvm.aarch64.sve.sminp.nxv8i16(, , ) declare @llvm.aarch64.sve.sminp.nxv4i32(, , ) declare @llvm.aarch64.sve.sminp.nxv2i64(, , ) declare @llvm.aarch64.sve.umaxp.nxv16i8(, , ) declare @llvm.aarch64.sve.umaxp.nxv8i16(, , ) declare @llvm.aarch64.sve.umaxp.nxv4i32(, , ) declare @llvm.aarch64.sve.umaxp.nxv2i64(, , ) declare @llvm.aarch64.sve.uminp.nxv16i8(, , ) declare @llvm.aarch64.sve.uminp.nxv8i16(, , ) declare @llvm.aarch64.sve.uminp.nxv4i32(, , ) declare @llvm.aarch64.sve.uminp.nxv2i64(, , )