; RUN: llc -mtriple=aarch64--linux-gnu -mattr=+sve --asm-verbose=false < %s 2>%t | FileCheck %s ; RUN: FileCheck --check-prefix=WARN --allow-empty %s <%t ; If this check fails please read test/CodeGen/AArch64/README for instructions on how to resolve it. ; WARN-NOT: warning ; PRFB , , [.S{, #}] -> 32-bit element, imm = 0, 1, ..., 31 define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #32 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 32, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 -1, i32 1) ret void } ; PRFB , , [.D{, #}] -> 64-bit element, imm = 0, 1, ..., 31 define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #32 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 32, i32 1) ret void } define void @llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfb_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 -1, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFH , , [.S{, #}] -> 32-bit element, imm = 0, 2, ..., 62 define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #63 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 63, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_2( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_2: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 33, i32 1) ret void } ; PRFH , , [.D{, #}] -> 64-bit element, imm = 0, 2, ..., 62 define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #63 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 63, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_2( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfh_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_2: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 33, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFW , , [.S{, #}] -> 32-bit element, imm = 0, 4, ..., 124 define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #125 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 125, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_4( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_4: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 33, i32 1) ret void } ; PRFW , , [.D{, #}] -> 64-bit element, imm = 0, 4, ..., 124 define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #125 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 125, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_4( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfw_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_4: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 33, i32 1) ret void } ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; PRFD , , [.S{, #}] -> 32-bit element, imm = 0, 8, ..., 248 define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #125 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 125, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_8( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx4vi32_invalid_immediate_offset_inbound_not_multiple_of_8: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.s, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 33, i32 1) ret void } ; PRFD , , [.D{, #}] -> 64-bit element, imm = 0, 4, ..., 248 define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_runtime_offset( %bases, i64 %offset, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_runtime_offset: ; CHECK-NEXT: prfb pldl1strm, p0, [x0, z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_upper_bound: ; CHECK-NEXT: mov w[[N:[0-9]+]], #125 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 125, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_lower_bound: ; CHECK-NEXT: mov x[[N:[0-9]+]], #-1 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 -1, i32 1) ret void } define void @llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_8( %bases, %Pg) nounwind { ; CHECK-LABEL: llvm_aarch64_sve_prfd_gather_scalar_offset_nx2vi64_invalid_immediate_offset_inbound_not_multiple_of_8: ; CHECK-NEXT: mov w[[N:[0-9]+]], #33 ; CHECK-NEXT: prfb pldl1strm, p0, [x[[N:[0-9]+]], z0.d, uxtw] ; CHECK-NEXT: ret call void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 33, i32 1) ret void } declare void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfb.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfh.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfw.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx4vi32( %Pg, %bases, i64 %offset, i32 %prfop) declare void @llvm.aarch64.sve.prfd.gather.scalar.offset.nx2vi64( %Pg, %bases, i64 %offset, i32 %prfop)