llvm-for-llvmta/test/CodeGen/AArch64/GlobalISel/prelegalizercombiner-xor-of...

178 lines
5.3 KiB
Plaintext
Raw Permalink Normal View History

2022-04-25 10:02:23 +02:00
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64 -debugify-and-strip-all-safe -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s
...
---
name: fold_scalar
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1
; (xor (and x, y), y) -> (and (not x), y)
; CHECK-LABEL: name: fold_scalar
; CHECK: liveins: $w0, $w1
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
; CHECK: $w0 = COPY %xor(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%and:_(s32) = G_AND %x, %y
%xor:_(s32) = G_XOR %and, %y
$w0 = COPY %xor(s32)
RET_ReallyLR implicit $w0
...
---
name: fold_vector
tracksRegLiveness: true
body: |
bb.0:
liveins: $x0, $x1
; Vector edition
; CHECK-LABEL: name: fold_vector
; CHECK: liveins: $x0, $x1
; CHECK: %x:_(<2 x s32>) = COPY $x0
; CHECK: %y:_(<2 x s32>) = COPY $x1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32)
; CHECK: [[XOR:%[0-9]+]]:_(<2 x s32>) = G_XOR %x, [[BUILD_VECTOR]]
; CHECK: %xor:_(<2 x s32>) = G_AND [[XOR]], %y
; CHECK: $x0 = COPY %xor(<2 x s32>)
; CHECK: RET_ReallyLR implicit $x0
%x:_(<2 x s32>) = COPY $x0
%y:_(<2 x s32>) = COPY $x1
%and:_(<2 x s32>) = G_AND %x, %y
%xor:_(<2 x s32>) = G_XOR %and, %y
$x0 = COPY %xor(<2 x s32>)
RET_ReallyLR implicit $x0
...
---
name: fold_commuted_and
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1
; (xor (and y, x), y) -> (and (not x), y)
; CHECK-LABEL: name: fold_commuted_and
; CHECK: liveins: $w0, $w1
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
; CHECK: $w0 = COPY %xor(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%and:_(s32) = G_AND %y, %x
%xor:_(s32) = G_XOR %and, %y
$w0 = COPY %xor(s32)
RET_ReallyLR implicit $w0
...
---
name: fold_commuted_xor
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1
; (xor y, (and x, y)) -> (and (not x), y)
; CHECK-LABEL: name: fold_commuted_xor
; CHECK: liveins: $w0, $w1
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
; CHECK: $w0 = COPY %xor(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%and:_(s32) = G_AND %x, %y
%xor:_(s32) = G_XOR %y, %and
$w0 = COPY %xor(s32)
RET_ReallyLR implicit $w0
...
---
name: fold_commuted_xor_and
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1
; (xor y, (and x, y)) -> (and (not x), y)
; CHECK-LABEL: name: fold_commuted_xor_and
; CHECK: liveins: $w0, $w1
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
; CHECK: $w0 = COPY %xor(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%and:_(s32) = G_AND %y, %x
%xor:_(s32) = G_XOR %y, %and
$w0 = COPY %xor(s32)
RET_ReallyLR implicit $w0
...
---
name: dont_fold_different_regs
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1, $w2
; The G_AND does not share any registers with the G_XOR
; CHECK-LABEL: name: dont_fold_different_regs
; CHECK: liveins: $w0, $w1, $w2
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: %z:_(s32) = COPY $w2
; CHECK: %and:_(s32) = G_AND %x, %z
; CHECK: %xor:_(s32) = G_XOR %and, %y
; CHECK: $w0 = COPY %xor(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%z:_(s32) = COPY $w2
%and:_(s32) = G_AND %x, %z
%xor:_(s32) = G_XOR %and, %y
$w0 = COPY %xor(s32)
RET_ReallyLR implicit $w0
...
---
name: dont_fold_more_than_one_use
tracksRegLiveness: true
body: |
bb.0:
liveins: $w0, $w1, $w2
; Don't fold when the G_AND is used outside the G_XOR.
;
; CHECK-LABEL: name: dont_fold_more_than_one_use
; CHECK: liveins: $w0, $w1, $w2
; CHECK: %x:_(s32) = COPY $w0
; CHECK: %y:_(s32) = COPY $w1
; CHECK: %z:_(s32) = COPY $w2
; CHECK: %and:_(s32) = G_AND %x, %z
; CHECK: %xor:_(s32) = G_XOR %and, %y
; CHECK: %add:_(s32) = G_ADD %and, %xor
; CHECK: $w0 = COPY %add(s32)
; CHECK: RET_ReallyLR implicit $w0
%x:_(s32) = COPY $w0
%y:_(s32) = COPY $w1
%z:_(s32) = COPY $w2
%and:_(s32) = G_AND %x, %z
%xor:_(s32) = G_XOR %and, %y
%add:_(s32) = G_ADD %and, %xor
$w0 = COPY %add(s32)
RET_ReallyLR implicit $w0