1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
|
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -debugify-and-strip-all-safe -mtriple arm64-apple-ios -O0 -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombinerhelper-only-enable-rule="overlapping_and" -global-isel -verify-machineinstrs %s -o - | FileCheck %s
# REQUIRES: asserts
---
name: bitmask_overlap1
body: |
bb.1:
; CHECK-LABEL: name: bitmask_overlap1
; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
; CHECK: $w0 = COPY [[AND]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(s32) = G_CONSTANT i32 -128
%3:_(s32) = G_CONSTANT i32 255
%2:_(s32) = G_AND %0, %1
%4:_(s32) = G_AND %2, %3
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: bitmask_overlap2
body: |
bb.1:
; CHECK-LABEL: name: bitmask_overlap2
; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
; CHECK: $w0 = COPY [[AND]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(s32) = G_CONSTANT i32 255
%3:_(s32) = G_CONSTANT i32 -128
%2:_(s32) = G_AND %1, %0
%4:_(s32) = G_AND %2, %3
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: bitmask_overlap3
body: |
bb.1:
; CHECK-LABEL: name: bitmask_overlap3
; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
; CHECK: $w0 = COPY [[AND]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(s32) = G_CONSTANT i32 255
%3:_(s32) = G_CONSTANT i32 -128
%2:_(s32) = G_AND %1, %0
%4:_(s32) = G_AND %3, %2
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: bitmask_overlap4
body: |
bb.1:
; CHECK-LABEL: name: bitmask_overlap4
; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
; CHECK: $w0 = COPY [[AND]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(s32) = G_CONSTANT i32 255
%3:_(s32) = G_CONSTANT i32 -128
%2:_(s32) = G_AND %0, %1
%4:_(s32) = G_AND %3, %2
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: bitmask_no_overlap
body: |
bb.1:
; CHECK-LABEL: name: bitmask_no_overlap
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
; CHECK: $w0 = COPY [[C]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(s32) = G_CONSTANT i32 1
%3:_(s32) = G_CONSTANT i32 2
%2:_(s32) = G_AND %0, %1
%4:_(s32) = G_AND %2, %3
$w0 = COPY %4(s32)
RET_ReallyLR implicit $w0
...
---
name: bitmask_overlap_extrause
body: |
bb.1:
; CHECK-LABEL: name: bitmask_overlap_extrause
; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
; CHECK: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]]
; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 128
; CHECK: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C1]]
; CHECK: G_STORE [[AND]](s32), [[COPY1]](p0) :: (store (s32))
; CHECK: $w0 = COPY [[AND1]](s32)
; CHECK: RET_ReallyLR implicit $w0
%0:_(s32) = COPY $w0
%1:_(p0) = COPY $x1
%2:_(s32) = G_CONSTANT i32 255
%4:_(s32) = G_CONSTANT i32 -128
%3:_(s32) = G_AND %0, %2
%5:_(s32) = G_AND %3, %4
G_STORE %3(s32), %1(p0) :: (store (s32))
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...
|