llvm-for-llvmta/test/CodeGen/X86/tbm-intrinsics-fast-isel-x8...

133 lines
3.1 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X64
; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/tbm-builtins.c
define i64 @test__bextri_u64(i64 %a0) {
; X64-LABEL: test__bextri_u64:
; X64: # %bb.0:
; X64-NEXT: bextrq $3841, %rdi, %rax # imm = 0xF01
; X64-NEXT: retq
%1 = call i64 @llvm.x86.tbm.bextri.u64(i64 %a0, i64 3841)
ret i64 %1
}
define i64 @test__blcfill_u64(i64 %a0) {
; X64-LABEL: test__blcfill_u64:
; X64: # %bb.0:
; X64-NEXT: leaq 1(%rdi), %rax
; X64-NEXT: andq %rdi, %rax
; X64-NEXT: retq
%1 = add i64 %a0, 1
%2 = and i64 %a0, %1
ret i64 %2
}
define i64 @test__blci_u64(i64 %a0) {
; X64-LABEL: test__blci_u64:
; X64: # %bb.0:
; X64-NEXT: leaq 1(%rdi), %rax
; X64-NEXT: xorq $-1, %rax
; X64-NEXT: orq %rdi, %rax
; X64-NEXT: retq
%1 = add i64 %a0, 1
%2 = xor i64 %1, -1
%3 = or i64 %a0, %2
ret i64 %3
}
define i64 @test__blcic_u64(i64 %a0) {
; X64-LABEL: test__blcic_u64:
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: movq %rdi, %rcx
; X64-NEXT: xorq $-1, %rcx
; X64-NEXT: addq $1, %rax
; X64-NEXT: andq %rcx, %rax
; X64-NEXT: retq
%1 = xor i64 %a0, -1
%2 = add i64 %a0, 1
%3 = and i64 %1, %2
ret i64 %3
}
define i64 @test__blcmsk_u64(i64 %a0) {
; X64-LABEL: test__blcmsk_u64:
; X64: # %bb.0:
; X64-NEXT: leaq 1(%rdi), %rax
; X64-NEXT: xorq %rdi, %rax
; X64-NEXT: retq
%1 = add i64 %a0, 1
%2 = xor i64 %a0, %1
ret i64 %2
}
define i64 @test__blcs_u64(i64 %a0) {
; X64-LABEL: test__blcs_u64:
; X64: # %bb.0:
; X64-NEXT: leaq 1(%rdi), %rax
; X64-NEXT: orq %rdi, %rax
; X64-NEXT: retq
%1 = add i64 %a0, 1
%2 = or i64 %a0, %1
ret i64 %2
}
define i64 @test__blsfill_u64(i64 %a0) {
; X64-LABEL: test__blsfill_u64:
; X64: # %bb.0:
; X64-NEXT: leaq -1(%rdi), %rax
; X64-NEXT: orq %rdi, %rax
; X64-NEXT: retq
%1 = sub i64 %a0, 1
%2 = or i64 %a0, %1
ret i64 %2
}
define i64 @test__blsic_u64(i64 %a0) {
; X64-LABEL: test__blsic_u64:
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: movq %rdi, %rcx
; X64-NEXT: xorq $-1, %rcx
; X64-NEXT: subq $1, %rax
; X64-NEXT: orq %rcx, %rax
; X64-NEXT: retq
%1 = xor i64 %a0, -1
%2 = sub i64 %a0, 1
%3 = or i64 %1, %2
ret i64 %3
}
define i64 @test__t1mskc_u64(i64 %a0) {
; X64-LABEL: test__t1mskc_u64:
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: movq %rdi, %rcx
; X64-NEXT: xorq $-1, %rcx
; X64-NEXT: addq $1, %rax
; X64-NEXT: orq %rcx, %rax
; X64-NEXT: retq
%1 = xor i64 %a0, -1
%2 = add i64 %a0, 1
%3 = or i64 %1, %2
ret i64 %3
}
define i64 @test__tzmsk_u64(i64 %a0) {
; X64-LABEL: test__tzmsk_u64:
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: movq %rdi, %rcx
; X64-NEXT: xorq $-1, %rcx
; X64-NEXT: subq $1, %rax
; X64-NEXT: andq %rcx, %rax
; X64-NEXT: retq
%1 = xor i64 %a0, -1
%2 = sub i64 %a0, 1
%3 = and i64 %1, %2
ret i64 %3
}
declare i64 @llvm.x86.tbm.bextri.u64(i64, i64)