122 lines
4.2 KiB
YAML
122 lines
4.2 KiB
YAML
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
|
# RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
|
|
|
|
--- |
|
|
define i32 @test_store_release_i64(i32 %a, i64* %addr) {
|
|
ret i32 %a
|
|
}
|
|
|
|
define i32 @test_store_release_i32(i32 %a, i64* %addr) {
|
|
ret i32 %a
|
|
}
|
|
|
|
define void @test_store_release_i8(i32, i8 %val, i8* %addr) { ret void }
|
|
define void @test_store_release_i16(i32, i16 %val, i16* %addr) { ret void }
|
|
...
|
|
---
|
|
name: test_store_release_i64
|
|
alignment: 4
|
|
legalized: true
|
|
regBankSelected: true
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
liveins: $w0, $x1, $x2
|
|
|
|
; CHECK-LABEL: name: test_store_release_i64
|
|
; CHECK: liveins: $w0, $x1, $x2
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x1
|
|
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
|
|
; CHECK: early-clobber %2:gpr32 = STLXRX [[COPY]], [[COPY1]] :: (volatile store 8 into %ir.addr)
|
|
; CHECK: $w0 = COPY %2
|
|
; CHECK: RET_ReallyLR implicit $w0
|
|
%1:gpr(s64) = COPY $x1
|
|
%2:gpr(p0) = COPY $x2
|
|
%3:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %1(s64), %2(p0) :: (volatile store 8 into %ir.addr)
|
|
$w0 = COPY %3(s32)
|
|
RET_ReallyLR implicit $w0
|
|
|
|
...
|
|
---
|
|
name: test_store_release_i32
|
|
alignment: 4
|
|
legalized: true
|
|
regBankSelected: true
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
liveins: $w0, $w1, $x2
|
|
; CHECK-LABEL: name: test_store_release_i32
|
|
; CHECK: liveins: $w0, $w1, $x2
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
|
|
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
|
|
; CHECK: early-clobber %3:gpr32 = STLXRW [[COPY]], [[COPY1]] :: (volatile store 4 into %ir.addr)
|
|
; CHECK: $w0 = COPY %3
|
|
; CHECK: RET_ReallyLR implicit $w0
|
|
%1:gpr(s32) = COPY $w1
|
|
%2:gpr(p0) = COPY $x2
|
|
%3:gpr(s64) = G_ZEXT %1(s32)
|
|
%4:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %3(s64), %2(p0) :: (volatile store 4 into %ir.addr)
|
|
$w0 = COPY %4(s32)
|
|
RET_ReallyLR implicit $w0
|
|
|
|
...
|
|
---
|
|
name: test_store_release_i8
|
|
alignment: 4
|
|
legalized: true
|
|
regBankSelected: true
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
liveins: $w0, $w1, $x2
|
|
|
|
; CHECK-LABEL: name: test_store_release_i8
|
|
; CHECK: liveins: $w0, $w1, $x2
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
|
|
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
|
|
; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
|
|
; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
|
|
; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
|
|
; CHECK: early-clobber %5:gpr32 = STLXRB [[COPY2]], [[COPY1]] :: (volatile store 1 into %ir.addr)
|
|
; CHECK: $w0 = COPY %5
|
|
; CHECK: RET_ReallyLR implicit $w0
|
|
%3:gpr(s32) = COPY $w1
|
|
%2:gpr(p0) = COPY $x2
|
|
%6:gpr(s64) = G_CONSTANT i64 255
|
|
%7:gpr(s64) = G_ANYEXT %3(s32)
|
|
%4:gpr(s64) = G_AND %7, %6
|
|
%5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 1 into %ir.addr)
|
|
$w0 = COPY %5(s32)
|
|
RET_ReallyLR implicit $w0
|
|
|
|
...
|
|
---
|
|
name: test_store_release_i16
|
|
alignment: 4
|
|
legalized: true
|
|
regBankSelected: true
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
liveins: $w0, $w1, $x2
|
|
|
|
; CHECK-LABEL: name: test_store_release_i16
|
|
; CHECK: liveins: $w0, $w1, $x2
|
|
; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
|
|
; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
|
|
; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
|
|
; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
|
|
; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
|
|
; CHECK: early-clobber %5:gpr32 = STLXRH [[COPY2]], [[COPY1]] :: (volatile store 2 into %ir.addr)
|
|
; CHECK: $w0 = COPY %5
|
|
; CHECK: RET_ReallyLR implicit $w0
|
|
%3:gpr(s32) = COPY $w1
|
|
%2:gpr(p0) = COPY $x2
|
|
%6:gpr(s64) = G_CONSTANT i64 65535
|
|
%7:gpr(s64) = G_ANYEXT %3(s32)
|
|
%4:gpr(s64) = G_AND %7, %6
|
|
%5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 2 into %ir.addr)
|
|
$w0 = COPY %5(s32)
|
|
RET_ReallyLR implicit $w0
|