1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
|
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-unknown-unknown -run-pass=instruction-select -global-isel-abort=1 -verify-machineinstrs %s -o - | FileCheck %s
#
# Check folding an AND into a G_BRCOND which has been matched as a TB(N)Z.
...
---
name: fold_and_rhs
alignment: 4
legalized: true
regBankSelected: true
body: |
; CHECK-LABEL: name: fold_and_rhs
; CHECK: bb.0:
; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
; CHECK: %copy:gpr64all = COPY $x0
; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
; CHECK: TBNZW [[COPY1]], 3, %bb.1
; CHECK: B %bb.0
; CHECK: bb.1:
; CHECK: RET_ReallyLR
bb.0:
successors: %bb.0, %bb.1
liveins: $x0
%copy:gpr(s64) = COPY $x0
%bit:gpr(s64) = G_CONSTANT i64 8
%zero:gpr(s64) = G_CONSTANT i64 0
%fold_cst:gpr(s64) = G_CONSTANT i64 8
; tbnz (and x, 8), 3 == tbnz x, 3 because the third bit of x & 8 is 1 when
; the third bit of x is 1.
%fold_me:gpr(s64) = G_AND %copy, %fold_cst
%and:gpr(s64) = G_AND %fold_me, %bit
%cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
G_BRCOND %cmp, %bb.1
G_BR %bb.0
bb.1:
RET_ReallyLR
...
---
name: fold_and_lhs
alignment: 4
legalized: true
regBankSelected: true
body: |
; CHECK-LABEL: name: fold_and_lhs
; CHECK: bb.0:
; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
; CHECK: %copy:gpr64all = COPY $x0
; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
; CHECK: TBNZW [[COPY1]], 3, %bb.1
; CHECK: B %bb.0
; CHECK: bb.1:
; CHECK: RET_ReallyLR
bb.0:
successors: %bb.0, %bb.1
liveins: $x0
%copy:gpr(s64) = COPY $x0
%bit:gpr(s64) = G_CONSTANT i64 8
%zero:gpr(s64) = G_CONSTANT i64 0
%fold_cst:gpr(s64) = G_CONSTANT i64 8
; Same as above, but with the constant on the other side.
%fold_me:gpr(s64) = G_AND %fold_cst, %copy
%and:gpr(s64) = G_AND %fold_me, %bit
%cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
G_BRCOND %cmp, %bb.1
G_BR %bb.0
bb.1:
RET_ReallyLR
...
---
name: dont_fold_and
alignment: 4
legalized: true
regBankSelected: true
body: |
; CHECK-LABEL: name: dont_fold_and
; CHECK: bb.0:
; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
; CHECK: %copy:gpr64 = COPY $x0
; CHECK: %fold_me:gpr64sp = ANDXri %copy, 4098
; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %fold_me.sub_32
; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
; CHECK: TBNZW [[COPY1]], 3, %bb.1
; CHECK: B %bb.0
; CHECK: bb.1:
; CHECK: RET_ReallyLR
bb.0:
successors: %bb.0, %bb.1
liveins: $x0
%copy:gpr(s64) = COPY $x0
%bit:gpr(s64) = G_CONSTANT i64 8
%zero:gpr(s64) = G_CONSTANT i64 0
; tbnz (and x, 7), 3 != tbnz x, 3, because the third bit of x & 7 is always
; zero.
%fold_cst:gpr(s64) = G_CONSTANT i64 7
%fold_me:gpr(s64) = G_AND %copy, %fold_cst
%and:gpr(s64) = G_AND %fold_me, %bit
%cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
G_BRCOND %cmp, %bb.1
G_BR %bb.0
bb.1:
RET_ReallyLR
|