1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
|
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-apple-ios -run-pass=aarch64-prelegalizer-combiner %s -o - | FileCheck %s
# Check that we canonicalize concat_vectors(build_vector, build_vector)
# into build_vector.
---
name: concat_to_build_vector
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0, $x1, $x2, $x3
; CHECK-LABEL: name: concat_to_build_vector
; CHECK: liveins: $x0, $x1, $x2, $x3
; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64)
; CHECK: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x s64>)
%0:_(s64) = COPY $x0
%1:_(s64) = COPY $x1
%2:_(s64) = COPY $x2
%3:_(s64) = COPY $x3
%4:_(<2 x s64>) = G_BUILD_VECTOR %0(s64), %1
%5:_(<2 x s64>) = G_BUILD_VECTOR %2(s64), %3
%6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
RET_ReallyLR implicit %6
...
# Same test as concat_to_build_vector but with pointer types.
---
name: concat_to_build_vector_ptr
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0, $x1, $x2, $x3
; CHECK-LABEL: name: concat_to_build_vector_ptr
; CHECK: liveins: $x0, $x1, $x2, $x3
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
; CHECK: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
; CHECK: [[COPY3:%[0-9]+]]:_(p0) = COPY $x3
; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x p0>) = G_BUILD_VECTOR [[COPY]](p0), [[COPY1]](p0), [[COPY2]](p0), [[COPY3]](p0)
; CHECK: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x p0>)
%0:_(p0) = COPY $x0
%1:_(p0) = COPY $x1
%2:_(p0) = COPY $x2
%3:_(p0) = COPY $x3
%4:_(<2 x p0>) = G_BUILD_VECTOR %0(p0), %1
%5:_(<2 x p0>) = G_BUILD_VECTOR %2(p0), %3
%6:_(<4 x p0>) = G_CONCAT_VECTORS %4(<2 x p0>), %5
RET_ReallyLR implicit %6
...
# Check that we canonicalize concat_vectors(undef, undef) into undef.
---
name: concat_to_undef
tracksRegLiveness: true
body: |
bb.1:
; CHECK-LABEL: name: concat_to_undef
; CHECK: [[DEF:%[0-9]+]]:_(<4 x s64>) = G_IMPLICIT_DEF
; CHECK: RET_ReallyLR implicit [[DEF]](<4 x s64>)
%4:_(<2 x s64>) = G_IMPLICIT_DEF
%5:_(<2 x s64>) = G_IMPLICIT_DEF
%6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
RET_ReallyLR implicit %6
...
# Check that when combining concat_vectors(build_vector, undef) into
# build_vector, we correctly break the undef vector into a sequence
# of undef scalar.
---
name: concat_to_build_vector_with_undef
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0, $x1
; CHECK-LABEL: name: concat_to_build_vector_with_undef
; CHECK: liveins: $x0, $x1
; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
; CHECK: [[DEF:%[0-9]+]]:_(s64) = G_IMPLICIT_DEF
; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[DEF]](s64), [[DEF]](s64)
; CHECK: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x s64>)
%0:_(s64) = COPY $x0
%1:_(s64) = COPY $x1
%4:_(<2 x s64>) = G_BUILD_VECTOR %0(s64), %1
%5:_(<2 x s64>) = G_IMPLICIT_DEF
%6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
RET_ReallyLR implicit %6
...
# Same as concat_to_build_vector_with_undef but with pointer types.
---
name: concat_to_build_vector_with_undef_ptr
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0, $x1
; CHECK-LABEL: name: concat_to_build_vector_with_undef_ptr
; CHECK: liveins: $x0, $x1
; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
; CHECK: [[DEF:%[0-9]+]]:_(p0) = G_IMPLICIT_DEF
; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x p0>) = G_BUILD_VECTOR [[COPY]](p0), [[COPY1]](p0), [[DEF]](p0), [[DEF]](p0)
; CHECK: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x p0>)
%0:_(p0) = COPY $x0
%1:_(p0) = COPY $x1
%4:_(<2 x p0>) = G_BUILD_VECTOR %0(p0), %1
%5:_(<2 x p0>) = G_IMPLICIT_DEF
%6:_(<4 x p0>) = G_CONCAT_VECTORS %4(<2 x p0>), %5
RET_ReallyLR implicit %6
...
# Check that we keep a concat_vectors as soon as one of the operand is
# not undef or build_vector. I.e., we cannot flatten the concat_vectors.
---
name: concat_to_build_vector_negative_test
tracksRegLiveness: true
body: |
bb.1:
liveins: $q0
; CHECK-LABEL: name: concat_to_build_vector_negative_test
; CHECK: liveins: $q0
; CHECK: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
; CHECK: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF
; CHECK: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s64>) = G_CONCAT_VECTORS [[COPY]](<2 x s64>), [[DEF]](<2 x s64>)
; CHECK: RET_ReallyLR implicit [[CONCAT_VECTORS]](<4 x s64>)
%4:_(<2 x s64>) = COPY $q0
%5:_(<2 x s64>) = G_IMPLICIT_DEF
%6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
RET_ReallyLR implicit %6
...
|