llvm-for-llvmta/test/CodeGen/AArch64/GlobalISel/select-ssubo.mir

167 lines
5.3 KiB
Plaintext
Raw Permalink Normal View History

2022-04-25 10:02:23 +02:00
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -verify-machineinstrs -mtriple aarch64-unknown-uknown -global-isel -run-pass=instruction-select %s -o - | FileCheck %s
...
---
name: ssubo_s32
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $w0, $w1, $x2
; CHECK-LABEL: name: ssubo_s32
; CHECK: liveins: $w0, $w1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w0
; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY $w1
; CHECK: [[SUBSWrr:%[0-9]+]]:gpr32 = SUBSWrr [[COPY]], [[COPY1]], implicit-def $nzcv
; CHECK: [[CSINCWr:%[0-9]+]]:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: [[UBFMWri:%[0-9]+]]:gpr32 = UBFMWri [[CSINCWr]], 0, 0
; CHECK: [[UBFMWri1:%[0-9]+]]:gpr32 = UBFMWri [[UBFMWri]], 0, 7
; CHECK: $w0 = COPY [[UBFMWri1]]
; CHECK: RET_ReallyLR implicit $w0
%0:gpr(s32) = COPY $w0
%1:gpr(s32) = COPY $w1
%3:gpr(s32), %4:gpr(s1) = G_SSUBO %0, %1
%5:gpr(s8) = G_ZEXT %4(s1)
%6:gpr(s32) = G_ZEXT %5(s8)
$w0 = COPY %6(s32)
RET_ReallyLR implicit $w0
...
---
name: ssubo_s64
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $x0, $x1, $x2
; CHECK-LABEL: name: ssubo_s64
; CHECK: liveins: $x0, $x1, $x2
; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x0
; CHECK: [[COPY1:%[0-9]+]]:gpr64 = COPY $x1
; CHECK: [[SUBSXrr:%[0-9]+]]:gpr64 = SUBSXrr [[COPY]], [[COPY1]], implicit-def $nzcv
; CHECK: [[CSINCWr:%[0-9]+]]:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: [[UBFMWri:%[0-9]+]]:gpr32 = UBFMWri [[CSINCWr]], 0, 0
; CHECK: [[UBFMWri1:%[0-9]+]]:gpr32 = UBFMWri [[UBFMWri]], 0, 7
; CHECK: $w0 = COPY [[UBFMWri1]]
; CHECK: RET_ReallyLR implicit $w0
%0:gpr(s64) = COPY $x0
%1:gpr(s64) = COPY $x1
%3:gpr(s64), %4:gpr(s1) = G_SSUBO %0, %1
%5:gpr(s8) = G_ZEXT %4(s1)
%6:gpr(s32) = G_ZEXT %5(s8)
$w0 = COPY %6(s32)
RET_ReallyLR implicit $w0
...
---
name: ssubo_s32_imm
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $w0, $w1, $x2
; Check that we get ADDSWri when we can fold in a constant.
;
; CHECK-LABEL: name: ssubo_s32_imm
; CHECK: liveins: $w0, $w1, $x2
; CHECK: %copy:gpr32sp = COPY $w0
; CHECK: %add:gpr32 = SUBSWri %copy, 16, 0, implicit-def $nzcv
; CHECK: %overflow:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: $w0 = COPY %add
; CHECK: RET_ReallyLR implicit $w0
%copy:gpr(s32) = COPY $w0
%constant:gpr(s32) = G_CONSTANT i32 16
%add:gpr(s32), %overflow:gpr(s1) = G_SSUBO %copy, %constant
$w0 = COPY %add(s32)
RET_ReallyLR implicit $w0
...
---
name: ssubo_s32_shifted
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $w0, $w1, $x2
; Check that we get ADDSWrs when we can fold in a shift.
;
; CHECK-LABEL: name: ssubo_s32_shifted
; CHECK: liveins: $w0, $w1, $x2
; CHECK: %copy1:gpr32 = COPY $w0
; CHECK: %copy2:gpr32 = COPY $w1
; CHECK: %add:gpr32 = SUBSWrs %copy1, %copy2, 16, implicit-def $nzcv
; CHECK: %overflow:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: $w0 = COPY %add
; CHECK: RET_ReallyLR implicit $w0
%copy1:gpr(s32) = COPY $w0
%copy2:gpr(s32) = COPY $w1
%constant:gpr(s32) = G_CONSTANT i32 16
%shift:gpr(s32) = G_SHL %copy2(s32), %constant(s32)
%add:gpr(s32), %overflow:gpr(s1) = G_SSUBO %copy1, %shift
$w0 = COPY %add(s32)
RET_ReallyLR implicit $w0
...
---
name: ssubo_s32_neg_imm
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $w0, $w1, $x2
; Check that we get SUBSWri when we can fold in a negative constant.
;
; CHECK-LABEL: name: ssubo_s32_neg_imm
; CHECK: liveins: $w0, $w1, $x2
; CHECK: %copy:gpr32sp = COPY $w0
; CHECK: %add:gpr32 = ADDSWri %copy, 16, 0, implicit-def $nzcv
; CHECK: %overflow:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: $w0 = COPY %add
; CHECK: RET_ReallyLR implicit $w0
%copy:gpr(s32) = COPY $w0
%constant:gpr(s32) = G_CONSTANT i32 -16
%add:gpr(s32), %overflow:gpr(s1) = G_SSUBO %copy, %constant
$w0 = COPY %add(s32)
RET_ReallyLR implicit $w0
...
---
name: ssubo_arith_extended
alignment: 4
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $w0, $x0
; Check that we get ADDSXrx.
; CHECK-LABEL: name: ssubo_arith_extended
; CHECK: liveins: $w0, $x0
; CHECK: %reg0:gpr64sp = COPY $x0
; CHECK: %reg1:gpr32 = COPY $w0
; CHECK: %add:gpr64 = SUBSXrx %reg0, %reg1, 18, implicit-def $nzcv
; CHECK: %flags:gpr32 = CSINCWr $wzr, $wzr, 7, implicit $nzcv
; CHECK: $x0 = COPY %add
; CHECK: RET_ReallyLR implicit $x0
%reg0:gpr(s64) = COPY $x0
%reg1:gpr(s32) = COPY $w0
%ext:gpr(s64) = G_ZEXT %reg1(s32)
%cst:gpr(s64) = G_CONSTANT i64 2
%shift:gpr(s64) = G_SHL %ext, %cst(s64)
%add:gpr(s64), %flags:gpr(s1) = G_SSUBO %reg0, %shift
$x0 = COPY %add(s64)
RET_ReallyLR implicit $x0