1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
|
# RUN: llc -mtriple=amdgcn -run-pass si-fix-sgpr-copies %s -o - | FileCheck %s -check-prefixes=GCN
# RUN: llc -mtriple=amdgcn -passes=si-fix-sgpr-copies %s -o - | FileCheck %s -check-prefixes=GCN
---
name: phi_visit_order
tracksRegLiveness: true
registers:
- { id: 0, class: sreg_32_xm0 }
- { id: 1, class: sreg_64 }
- { id: 2, class: sreg_32_xm0 }
- { id: 7, class: vgpr_32 }
- { id: 8, class: sreg_32_xm0 }
- { id: 9, class: vgpr_32 }
- { id: 10, class: sreg_64 }
- { id: 11, class: sreg_32_xm0 }
body: |
; GCN-LABEL: name: phi_visit_order
; GCN: S_ADD_I32
bb.0:
liveins: $vgpr0
%7 = COPY $vgpr0
%8 = S_MOV_B32 0
bb.1:
%0 = PHI %8, %bb.0, %0, %bb.1, %2, %bb.2
%9 = V_MOV_B32_e32 9, implicit $exec
%10 = V_CMP_EQ_U32_e64 %7, %9, implicit $exec
%1 = SI_IF %10, %bb.2, implicit-def $exec, implicit-def $scc, implicit $exec
S_BRANCH %bb.1
bb.2:
SI_END_CF %1, implicit-def $exec, implicit-def $scc, implicit $exec
%11 = S_MOV_B32 1
%2 = S_ADD_I32 %0, %11, implicit-def $scc
S_BRANCH %bb.1
...
---
# GCN-LABEL: name: dead_illegal_virtreg_copy
# GCN: %0:vgpr_32 = COPY $vgpr0
# GCN: %1:sreg_32_xm0 = IMPLICIT_DEF
# GCN: S_ENDPGM 0, implicit %0
name: dead_illegal_virtreg_copy
tracksRegLiveness: true
body: |
bb.0:
liveins: $vgpr0
%0:vgpr_32 = COPY $vgpr0
%1:sreg_32_xm0 = COPY %0
S_ENDPGM 0, implicit %1
...
---
# GCN-LABEL: name: dead_illegal_physreg_copy
# GCN: %2:vgpr_32 = COPY $vgpr0
# GCN: %1:sreg_32_xm0 = IMPLICIT_DEF
# GCN: S_ENDPGM 0, implicit %2
name: dead_illegal_physreg_copy
tracksRegLiveness: true
body: |
bb.0:
liveins: $vgpr0
%0:sreg_32_xm0 = COPY $vgpr0
%1:sreg_32_xm0 = COPY %0
S_ENDPGM 0, implicit %1
...
# Make sure there's no assert when looking at the implicit use on S_ENDPGM
# GCN-LABEL: name: s_to_v_copy_implicit_use
# GCN: %0:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM undef %1:sreg_64, 0, 0 :: (load (s32), addrspace 4)
# GCN-NEXT: %2:vgpr_32 = COPY %0
# GCN-NEXT: S_ENDPGM 0, implicit %2
---
name: s_to_v_copy_implicit_use
tracksRegLiveness: true
body: |
bb.0:
%0:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM undef %2:sreg_64, 0, 0 :: (load (s32), addrspace 4)
%1:vgpr_32 = COPY %0
S_ENDPGM 0, implicit %1
...
# GCN-LABEL: name: find_debug_loc_end_iterator_regression
# GCN: %6:vreg_1 = COPY %4
# GCN: %14:sgpr_32 = S_MOV_B32 0
# GCN: %7:vgpr_32 = PHI %5, %bb.0, %1, %bb.3
# GCN: %8:sreg_32 = PHI %14, %bb.0, %9, %bb.3
# GCN: %11:sreg_64 = PHI %10, %bb.1, %12, %bb.2
# GCN: %13:sreg_64 = COPY %6
---
name: find_debug_loc_end_iterator_regression
tracksRegLiveness: true
body: |
bb.0:
liveins: $vgpr2, $vgpr3
%0:vgpr_32 = COPY $vgpr3
%1:vgpr_32 = COPY $vgpr2
%2:sreg_64 = V_CMP_EQ_U32_e64 killed %0, 1, implicit $exec
%3:sreg_64 = S_MOV_B64 -1
%4:sreg_64 = S_XOR_B64 killed %2, killed %3, implicit-def dead $scc
%5:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
%6:vreg_1 = COPY %4
bb.1:
%7:vgpr_32 = PHI %5, %bb.0, %1, %bb.3
%8:sreg_32 = PHI %5, %bb.0, %9, %bb.3
%10:sreg_64 = S_MOV_B64 0
bb.2:
%11:sreg_64 = PHI %10, %bb.1, %12, %bb.2
%13:sreg_64 = COPY %6
%12:sreg_64 = SI_IF_BREAK %13, %11, implicit-def dead $scc
SI_LOOP %12, %bb.2, implicit-def dead $exec, implicit-def dead $scc, implicit $exec
S_BRANCH %bb.3
bb.3:
%9:sreg_32 = S_OR_B32 %8, 1, implicit-def dead $scc
S_CBRANCH_VCCNZ %bb.1, implicit undef $vcc
S_BRANCH %bb.4
bb.4:
SI_RETURN
...
|