236 lines
5.4 KiB
YAML
236 lines
5.4 KiB
YAML
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
|
# RUN: llc -mtriple=aarch64-- -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
|
|
---
|
|
name: condbr_of_not
|
|
legalized: true
|
|
regBankSelected: true
|
|
liveins:
|
|
- { reg: '$x0' }
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_not
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0
|
|
; CHECK: [[LDRBBui:%[0-9]+]]:gpr32 = LDRBBui [[COPY]], 0 :: (load 1)
|
|
; CHECK: TBZW [[LDRBBui]], 0, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $x0
|
|
|
|
%0:gpr(p0) = COPY $x0
|
|
%8:gpr(s8) = G_LOAD %0(p0) :: (load 1)
|
|
%4:gpr(s32) = G_ANYEXT %8(s8)
|
|
%5:gpr(s32) = G_CONSTANT i32 1
|
|
%6:gpr(s32) = G_XOR %4, %5
|
|
%3:gpr(s1) = G_TRUNC %6(s32)
|
|
G_BRCOND %3(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: condbr_of_not_64
|
|
legalized: true
|
|
regBankSelected: true
|
|
liveins:
|
|
- { reg: '$x0' }
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_not_64
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0
|
|
; CHECK: [[LDRBBui:%[0-9]+]]:gpr32 = LDRBBui [[COPY]], 0 :: (load 1)
|
|
; CHECK: TBZW [[LDRBBui]], 0, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
; TB(N)ZX has no encoding if the bit being tested is < 32, so we should get
|
|
; TBZW here.
|
|
;
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $x0
|
|
|
|
%0:gpr(p0) = COPY $x0
|
|
%8:gpr(s8) = G_LOAD %0(p0) :: (load 1)
|
|
%4:gpr(s64) = G_ANYEXT %8(s8)
|
|
%5:gpr(s64) = G_CONSTANT i64 1
|
|
%6:gpr(s64) = G_XOR %4, %5
|
|
%3:gpr(s1) = G_TRUNC %6(s64)
|
|
G_BRCOND %3(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: condbr_of_and
|
|
legalized: true
|
|
regBankSelected: true
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_and
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: %lhs:gpr32 = COPY $w0
|
|
; CHECK: TBNZW %lhs, 0, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $w0
|
|
%lhs:gpr(s32) = COPY $w0
|
|
%rhs:gpr(s32) = G_CONSTANT i32 1
|
|
%op:gpr(s32) = G_AND %lhs, %rhs
|
|
%trunc:gpr(s1) = G_TRUNC %op(s32)
|
|
G_BRCOND %trunc(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: condbr_of_and_no_cst
|
|
legalized: true
|
|
regBankSelected: true
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_and_no_cst
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: %lhs:gpr32 = COPY $w0
|
|
; CHECK: %rhs:gpr32 = COPY $w1
|
|
; CHECK: %op:gpr32 = ANDWrr %lhs, %rhs
|
|
; CHECK: TBNZW %op, 0, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $w0, $w1
|
|
%lhs:gpr(s32) = COPY $w0
|
|
%rhs:gpr(s32) = COPY $w1
|
|
%op:gpr(s32) = G_AND %lhs, %rhs
|
|
%trunc:gpr(s1) = G_TRUNC %op(s32)
|
|
G_BRCOND %trunc(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: condbr_of_shl
|
|
legalized: true
|
|
regBankSelected: true
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_shl
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: %lhs:gpr32 = COPY $w0
|
|
; CHECK: %op:gpr32 = UBFMWri %lhs, 31, 30
|
|
; CHECK: TBNZW %op, 0, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
; We won't ever fold this, because
|
|
; bit = 0
|
|
; bit - constant < 0, which isn't valid for tbz/tbnz.
|
|
;
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $w0
|
|
%lhs:gpr(s32) = COPY $w0
|
|
%rhs:gpr(s32) = G_CONSTANT i32 1
|
|
%op:gpr(s32) = G_SHL %lhs, %rhs
|
|
%trunc:gpr(s1) = G_TRUNC %op(s32)
|
|
G_BRCOND %trunc(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: condbr_of_ashr
|
|
legalized: true
|
|
regBankSelected: true
|
|
body: |
|
|
; CHECK-LABEL: name: condbr_of_ashr
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: %lhs:gpr32 = COPY $w0
|
|
; CHECK: TBNZW %lhs, 1, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
; We can fold ashr, because we can have
|
|
;
|
|
; (tbz (ashr x, c), 0) where 0 + c > # bits in x.
|
|
;
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $w0
|
|
%lhs:gpr(s32) = COPY $w0
|
|
%rhs:gpr(s32) = G_CONSTANT i32 1
|
|
%op:gpr(s32) = G_ASHR %lhs, %rhs
|
|
%trunc:gpr(s1) = G_TRUNC %op(s32)
|
|
G_BRCOND %trunc(s1), %bb.3
|
|
|
|
bb.2:
|
|
RET_ReallyLR
|
|
|
|
bb.3:
|
|
RET_ReallyLR
|
|
|
|
...
|
|
---
|
|
name: tbnzx
|
|
legalized: true
|
|
regBankSelected: true
|
|
body: |
|
|
; CHECK-LABEL: name: tbnzx
|
|
; CHECK: bb.0:
|
|
; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
; CHECK: %lhs:gpr64 = COPY $x0
|
|
; CHECK: TBNZX %lhs, 63, %bb.2
|
|
; CHECK: bb.1:
|
|
; CHECK: RET_ReallyLR
|
|
; CHECK: bb.2:
|
|
; CHECK: RET_ReallyLR
|
|
bb.1:
|
|
successors: %bb.2, %bb.3
|
|
liveins: $x0
|
|
%lhs:gpr(s64) = COPY $x0
|
|
%rhs:gpr(s64) = G_CONSTANT i64 8589934592
|
|
%op:gpr(s64) = G_ASHR %lhs, %rhs
|
|
%trunc:gpr(s1) = G_TRUNC %op(s64)
|
|
G_BRCOND %trunc(s1), %bb.3
|
|
bb.2:
|
|
RET_ReallyLR
|
|
bb.3:
|
|
RET_ReallyLR
|
|
...
|