1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
declare i64 @llvm.riscv.vsetvli.opt.i64(i64, i64, i64)
declare i64 @llvm.riscv.vsetvlimax.opt.i64(i64, i64)
define void @test_vsetvli_e8m1(i64 %avl) nounwind {
; CHECK-LABEL: test_vsetvli_e8m1:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e8, m1, ta, mu
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 0, i64 0)
ret void
}
define void @test_vsetvli_e16mf4(i64 %avl) nounwind {
; CHECK-LABEL: test_vsetvli_e16mf4:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e16, mf4, ta, mu
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 1, i64 6)
ret void
}
define void @test_vsetvli_e32mf8_zero_avl() nounwind {
; CHECK-LABEL: test_vsetvli_e32mf8_zero_avl:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetivli a0, 0, e16, mf4, ta, mu
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvli.i64(i64 0, i64 1, i64 6)
ret void
}
define void @test_vsetvlimax_e32m2() nounwind {
; CHECK-LABEL: test_vsetvlimax_e32m2:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, mu
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
ret void
}
define void @test_vsetvlimax_e64m4() nounwind {
; CHECK-LABEL: test_vsetvlimax_e64m4:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, mu
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
ret void
}
define i64 @test_vsetvli_opt_e8m1(i64 %avl) nounwind {
; CHECK-LABEL: test_vsetvli_opt_e8m1:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e8, m1, ta, mu
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvli.opt.i64(i64 %avl, i64 0, i64 0)
ret i64 %vl
}
; Check that we remove the intrinsic if it's unused.
define void @test_vsetvli_opt_e8m1_nouse(i64 %avl) nounwind {
; CHECK-LABEL: test_vsetvli_opt_e8m1_nouse:
; CHECK: # %bb.0:
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvli.opt.i64(i64 %avl, i64 0, i64 0)
ret void
}
define i64 @test_vsetvli_opt_e16mf4(i64 %avl) nounwind {
; CHECK-LABEL: test_vsetvli_opt_e16mf4:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e16, mf4, ta, mu
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvli.opt.i64(i64 %avl, i64 1, i64 6)
ret i64 %vl
}
define i64 @test_vsetvli_opt_e32mf8_zero_avl() nounwind {
; CHECK-LABEL: test_vsetvli_opt_e32mf8_zero_avl:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetivli a0, 0, e16, mf4, ta, mu
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvli.opt.i64(i64 0, i64 1, i64 6)
ret i64 %vl
}
define i64 @test_vsetvlimax_opt_e32m2() nounwind {
; CHECK-LABEL: test_vsetvlimax_opt_e32m2:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, mu
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 2, i64 1)
ret i64 %vl
}
define void @test_vsetvlimax_opt_e32m2_nouse() nounwind {
; CHECK-LABEL: test_vsetvlimax_opt_e32m2_nouse:
; CHECK: # %bb.0:
; CHECK-NEXT: ret
call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 2, i64 1)
ret void
}
define i64 @test_vsetvlimax_opt_e64m4() nounwind {
; CHECK-LABEL: test_vsetvlimax_opt_e64m4:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, mu
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvlimax.opt.i64(i64 3, i64 2)
ret i64 %vl
}
declare <vscale x 4 x i32> @llvm.riscv.vle.nxv4i32.i64(<vscale x 4 x i32>, <vscale x 4 x i32>*, i64)
; Check that we remove the redundant vsetvli when followed by another operation
define <vscale x 4 x i32> @redundant_vsetvli(i64 %avl, <vscale x 4 x i32>* %ptr) nounwind {
; CHECK-LABEL: redundant_vsetvli:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e32, m2, ta, mu
; CHECK-NEXT: vle32.v v8, (a1)
; CHECK-NEXT: ret
%vl = call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 2, i64 1)
%x = call <vscale x 4 x i32> @llvm.riscv.vle.nxv4i32.i64(<vscale x 4 x i32> undef, <vscale x 4 x i32>* %ptr, i64 %vl)
ret <vscale x 4 x i32> %x
}
; Check that we remove the repeated/redundant vsetvli when followed by another
; operation
; FIXME: We don't catch the second vsetvli because it has a use of its output.
; We could replace it with the output of the first vsetvli.
define <vscale x 4 x i32> @repeated_vsetvli(i64 %avl, <vscale x 4 x i32>* %ptr) nounwind {
; CHECK-LABEL: repeated_vsetvli:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetvli a0, a0, e32, m2, ta, mu
; CHECK-NEXT: vsetvli a0, a0, e32, m2, ta, mu
; CHECK-NEXT: vle32.v v8, (a1)
; CHECK-NEXT: ret
%vl0 = call i64 @llvm.riscv.vsetvli.i64(i64 %avl, i64 2, i64 1)
%vl1 = call i64 @llvm.riscv.vsetvli.i64(i64 %vl0, i64 2, i64 1)
%x = call <vscale x 4 x i32> @llvm.riscv.vle.nxv4i32.i64(<vscale x 4 x i32> undef, <vscale x 4 x i32>* %ptr, i64 %vl1)
ret <vscale x 4 x i32> %x
}
|