; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s 2>%t | FileCheck %s ; RUN: FileCheck --check-prefix=WARN --allow-empty %s <%t ; If this check fails please read test/CodeGen/AArch64/README for instructions on how to resolve it. ; WARN-NOT: warning ; ; ABS ; define @abs_i8( %a, %pg, %b) { ; CHECK-LABEL: abs_i8: ; CHECK: abs z0.b, p0/m, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv16i8( %a, %pg, %b) ret %out } define @abs_i16( %a, %pg, %b) { ; CHECK-LABEL: abs_i16: ; CHECK: abs z0.h, p0/m, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv8i16( %a, %pg, %b) ret %out } define @abs_i32( %a, %pg, %b) { ; CHECK-LABEL: abs_i32: ; CHECK: abs z0.s, p0/m, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv4i32( %a, %pg, %b) ret %out } define @abs_i64( %a, %pg, %b) { ; CHECK-LABEL: abs_i64: ; CHECK: abs z0.d, p0/m, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.abs.nxv2i64( %a, %pg, %b) ret %out } ; ; NEG ; define @neg_i8( %a, %pg, %b) { ; CHECK-LABEL: neg_i8: ; CHECK: neg z0.b, p0/m, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv16i8( %a, %pg, %b) ret %out } define @neg_i16( %a, %pg, %b) { ; CHECK-LABEL: neg_i16: ; CHECK: neg z0.h, p0/m, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv8i16( %a, %pg, %b) ret %out } define @neg_i32( %a, %pg, %b) { ; CHECK-LABEL: neg_i32: ; CHECK: neg z0.s, p0/m, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv4i32( %a, %pg, %b) ret %out } define @neg_i64( %a, %pg, %b) { ; CHECK-LABEL: neg_i64: ; CHECK: neg z0.d, p0/m, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.neg.nxv2i64( %a, %pg, %b) ret %out } ; SDOT define @sdot_i32( %a, %b, %c) { ; CHECK-LABEL: sdot_i32: ; CHECK: sdot z0.s, z1.b, z2.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.nxv4i32( %a, %b, %c) ret %out } define @sdot_i64( %a, %b, %c) { ; CHECK-LABEL: sdot_i64: ; CHECK: sdot z0.d, z1.h, z2.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.nxv2i64( %a, %b, %c) ret %out } ; SDOT (Indexed) define @sdot_lane_i32( %a, %b, %c) { ; CHECK-LABEL: sdot_lane_i32: ; CHECK: sdot z0.s, z1.b, z2.b[2] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.lane.nxv4i32( %a, %b, %c, i32 2) ret %out } define @sdot_lane_i64( %a, %b, %c) { ; CHECK-LABEL: sdot_lane_i64: ; CHECK: sdot z0.d, z1.h, z2.h[1] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sdot.lane.nxv2i64( %a, %b, %c, i32 1) ret %out } ; SQADD define @sqadd_i8( %a, %b) { ; CHECK-LABEL: sqadd_i8: ; CHECK: sqadd z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv16i8( %a, %b) ret %out } define @sqadd_i16( %a, %b) { ; CHECK-LABEL: sqadd_i16: ; CHECK: sqadd z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv8i16( %a, %b) ret %out } define @sqadd_i32( %a, %b) { ; CHECK-LABEL: sqadd_i32: ; CHECK: sqadd z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv4i32( %a, %b) ret %out } define @sqadd_i64( %a, %b) { ; CHECK-LABEL: sqadd_i64: ; CHECK: sqadd z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqadd.x.nxv2i64( %a, %b) ret %out } ; SQSUB define @sqsub_i8( %a, %b) { ; CHECK-LABEL: sqsub_i8: ; CHECK: sqsub z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv16i8( %a, %b) ret %out } define @sqsub_i16( %a, %b) { ; CHECK-LABEL: sqsub_i16: ; CHECK: sqsub z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv8i16( %a, %b) ret %out } define @sqsub_i32( %a, %b) { ; CHECK-LABEL: sqsub_i32: ; CHECK: sqsub z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv4i32( %a, %b) ret %out } define @sqsub_i64( %a, %b) { ; CHECK-LABEL: sqsub_i64: ; CHECK: sqsub z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.sqsub.x.nxv2i64( %a, %b) ret %out } ; UDOT define @udot_i32( %a, %b, %c) { ; CHECK-LABEL: udot_i32: ; CHECK: udot z0.s, z1.b, z2.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.nxv4i32( %a, %b, %c) ret %out } define @udot_i64( %a, %b, %c) { ; CHECK-LABEL: udot_i64: ; CHECK: udot z0.d, z1.h, z2.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.nxv2i64( %a, %b, %c) ret %out } ; UDOT (Indexed) define @udot_lane_i32( %a, %b, %c) { ; CHECK-LABEL: udot_lane_i32: ; CHECK: udot z0.s, z1.b, z2.b[2] ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.udot.lane.nxv4i32( %a, %b, %c, i32 2) ret %out } ; UQADD define @uqadd_i8( %a, %b) { ; CHECK-LABEL: uqadd_i8: ; CHECK: uqadd z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv16i8( %a, %b) ret %out } define @uqadd_i16( %a, %b) { ; CHECK-LABEL: uqadd_i16: ; CHECK: uqadd z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv8i16( %a, %b) ret %out } define @uqadd_i32( %a, %b) { ; CHECK-LABEL: uqadd_i32: ; CHECK: uqadd z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv4i32( %a, %b) ret %out } define @uqadd_i64( %a, %b) { ; CHECK-LABEL: uqadd_i64: ; CHECK: uqadd z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqadd.x.nxv2i64( %a, %b) ret %out } ; UQSUB define @uqsub_i8( %a, %b) { ; CHECK-LABEL: uqsub_i8: ; CHECK: uqsub z0.b, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv16i8( %a, %b) ret %out } define @uqsub_i16( %a, %b) { ; CHECK-LABEL: uqsub_i16: ; CHECK: uqsub z0.h, z0.h, z1.h ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv8i16( %a, %b) ret %out } define @uqsub_i32( %a, %b) { ; CHECK-LABEL: uqsub_i32: ; CHECK: uqsub z0.s, z0.s, z1.s ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv4i32( %a, %b) ret %out } define @uqsub_i64( %a, %b) { ; CHECK-LABEL: uqsub_i64: ; CHECK: uqsub z0.d, z0.d, z1.d ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.uqsub.x.nxv2i64( %a, %b) ret %out } ; ADD (tuples) define @add_i64_tuple2(* %out, %in1, %in2) { ; CHECK-LABEL: add_i64_tuple2 ; CHECK: add z0.d, z0.d, z0.d ; CHECK: add z1.d, z1.d, z1.d %tuple = tail call @llvm.aarch64.sve.tuple.create2.nxv4i64.nxv2i64( %in1, %in2) %res = add %tuple, %tuple ret %res } define @add_i64_tuple3(* %out, %in1, %in2, %in3) { ; CHECK-LABEL: add_i64_tuple3 ; CHECK: add z0.d, z0.d, z0.d ; CHECK: add z1.d, z1.d, z1.d ; CHECK: add z2.d, z2.d, z2.d %tuple = tail call @llvm.aarch64.sve.tuple.create3.nxv6i64.nxv2i64( %in1, %in2, %in3) %res = add %tuple, %tuple ret %res } define @add_i64_tuple4(* %out, %in1, %in2, %in3, %in4) { ; CHECK-LABEL: add_i64_tuple4 ; CHECK: add z0.d, z0.d, z0.d ; CHECK: add z1.d, z1.d, z1.d ; CHECK: add z2.d, z2.d, z2.d ; CHECK: add z3.d, z3.d, z3.d %tuple = tail call @llvm.aarch64.sve.tuple.create4.nxv8i64.nxv2i64( %in1, %in2, %in3, %in4) %res = add %tuple, %tuple ret %res } declare @llvm.aarch64.sve.abs.nxv16i8(, , ) declare @llvm.aarch64.sve.abs.nxv8i16(, , ) declare @llvm.aarch64.sve.abs.nxv4i32(, , ) declare @llvm.aarch64.sve.abs.nxv2i64(, , ) declare @llvm.aarch64.sve.neg.nxv16i8(, , ) declare @llvm.aarch64.sve.neg.nxv8i16(, , ) declare @llvm.aarch64.sve.neg.nxv4i32(, , ) declare @llvm.aarch64.sve.neg.nxv2i64(, , ) declare @llvm.aarch64.sve.sdot.nxv4i32(, , ) declare @llvm.aarch64.sve.sdot.nxv2i64(, , ) declare @llvm.aarch64.sve.sdot.lane.nxv4i32(, , , i32) declare @llvm.aarch64.sve.sdot.lane.nxv2i64(, , , i32) declare @llvm.aarch64.sve.sqadd.x.nxv16i8(, ) declare @llvm.aarch64.sve.sqadd.x.nxv8i16(, ) declare @llvm.aarch64.sve.sqadd.x.nxv4i32(, ) declare @llvm.aarch64.sve.sqadd.x.nxv2i64(, ) declare @llvm.aarch64.sve.sqsub.x.nxv16i8(, ) declare @llvm.aarch64.sve.sqsub.x.nxv8i16(, ) declare @llvm.aarch64.sve.sqsub.x.nxv4i32(, ) declare @llvm.aarch64.sve.sqsub.x.nxv2i64(, ) declare @llvm.aarch64.sve.udot.nxv4i32(, , ) declare @llvm.aarch64.sve.udot.nxv2i64(, , ) declare @llvm.aarch64.sve.udot.lane.nxv4i32(, , , i32) declare @llvm.aarch64.sve.udot.lane.nxv2i64(, , , i32) declare @llvm.aarch64.sve.uqadd.x.nxv16i8(, ) declare @llvm.aarch64.sve.uqadd.x.nxv8i16(, ) declare @llvm.aarch64.sve.uqadd.x.nxv4i32(, ) declare @llvm.aarch64.sve.uqadd.x.nxv2i64(, ) declare @llvm.aarch64.sve.uqsub.x.nxv16i8(, ) declare @llvm.aarch64.sve.uqsub.x.nxv8i16(, ) declare @llvm.aarch64.sve.uqsub.x.nxv4i32(, ) declare @llvm.aarch64.sve.uqsub.x.nxv2i64(, ) declare @llvm.aarch64.sve.tuple.create2.nxv4i64.nxv2i64(, ) declare @llvm.aarch64.sve.tuple.create3.nxv6i64.nxv2i64(, , ) declare @llvm.aarch64.sve.tuple.create4.nxv8i64.nxv2i64(, , , )