1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx90a -O0 -global-isel=true %s -o - | FileCheck %s
define void @test(ptr %p) {
; CHECK-LABEL: test:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: v_mov_b32_e32 v2, v1
; CHECK-NEXT: ; kill: def $vgpr0 killed $vgpr0 def $vgpr0_vgpr1 killed $exec
; CHECK-NEXT: v_mov_b32_e32 v1, v2
; CHECK-NEXT: s_mov_b32 s5, 16
; CHECK-NEXT: s_mov_b32 s6, 0
; CHECK-NEXT: v_mov_b32_e32 v2, s6
; CHECK-NEXT: v_cvt_f32_ubyte0_e64 v2, v2
; CHECK-NEXT: v_rcp_iflag_f32_e64 v2, v2
; CHECK-NEXT: s_mov_b32 s4, 0x4f7ffffe
; CHECK-NEXT: v_mov_b32_e32 v3, s4
; CHECK-NEXT: v_mul_f32_e64 v2, v2, v3
; CHECK-NEXT: v_cvt_u32_f32_e64 v2, v2
; CHECK-NEXT: s_mov_b32 s7, 0
; CHECK-NEXT: v_mov_b32_e32 v3, s7
; CHECK-NEXT: v_mul_hi_u32 v3, v2, v3
; CHECK-NEXT: v_add_u32_e64 v2, v2, v3
; CHECK-NEXT: v_mov_b32_e32 v3, s5
; CHECK-NEXT: v_mul_hi_u32 v2, v2, v3
; CHECK-NEXT: s_mov_b32 s7, 2
; CHECK-NEXT: v_mov_b32_e32 v3, s7
; CHECK-NEXT: v_add_u32_e64 v2, v2, v3
; CHECK-NEXT: v_mov_b32_e32 v3, s6
; CHECK-NEXT: v_cvt_f32_ubyte0_e64 v3, v3
; CHECK-NEXT: v_rcp_iflag_f32_e64 v3, v3
; CHECK-NEXT: v_mov_b32_e32 v4, s4
; CHECK-NEXT: v_mul_f32_e64 v3, v3, v4
; CHECK-NEXT: v_cvt_u32_f32_e64 v3, v3
; CHECK-NEXT: s_mov_b32 s7, 0
; CHECK-NEXT: v_mov_b32_e32 v4, s7
; CHECK-NEXT: v_mul_hi_u32 v4, v3, v4
; CHECK-NEXT: v_add_u32_e64 v3, v3, v4
; CHECK-NEXT: v_mov_b32_e32 v4, s5
; CHECK-NEXT: v_mul_hi_u32 v3, v3, v4
; CHECK-NEXT: s_mov_b32 s7, 2
; CHECK-NEXT: v_mov_b32_e32 v4, s7
; CHECK-NEXT: v_add_u32_e64 v6, v3, v4
; CHECK-NEXT: v_mov_b32_e32 v3, s6
; CHECK-NEXT: v_cvt_f32_ubyte0_e64 v3, v3
; CHECK-NEXT: v_rcp_iflag_f32_e64 v3, v3
; CHECK-NEXT: v_mov_b32_e32 v4, s4
; CHECK-NEXT: v_mul_f32_e64 v3, v3, v4
; CHECK-NEXT: v_cvt_u32_f32_e64 v3, v3
; CHECK-NEXT: s_mov_b32 s7, 0
; CHECK-NEXT: v_mov_b32_e32 v4, s7
; CHECK-NEXT: v_mul_hi_u32 v4, v3, v4
; CHECK-NEXT: v_add_u32_e64 v3, v3, v4
; CHECK-NEXT: v_mov_b32_e32 v4, s5
; CHECK-NEXT: v_mul_hi_u32 v3, v3, v4
; CHECK-NEXT: s_mov_b32 s7, 2
; CHECK-NEXT: v_mov_b32_e32 v4, s7
; CHECK-NEXT: v_add_u32_e64 v3, v3, v4
; CHECK-NEXT: v_mov_b32_e32 v4, s6
; CHECK-NEXT: v_cvt_f32_ubyte0_e64 v4, v4
; CHECK-NEXT: v_rcp_iflag_f32_e64 v4, v4
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_mul_f32_e64 v4, v4, v5
; CHECK-NEXT: v_cvt_u32_f32_e64 v4, v4
; CHECK-NEXT: s_mov_b32 s4, 0
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_u32_e64 v4, v4, v5
; CHECK-NEXT: v_mov_b32_e32 v5, s5
; CHECK-NEXT: v_mul_hi_u32 v4, v4, v5
; CHECK-NEXT: s_mov_b32 s4, 2
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_add_u32_e64 v4, v4, v5
; CHECK-NEXT: s_mov_b32 s4, 0xff
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_mov_b32_e32 v7, s4
; CHECK-NEXT: v_and_b32_e64 v7, v6, v7
; CHECK-NEXT: s_mov_b32 s6, 8
; CHECK-NEXT: v_mov_b32_e32 v6, s6
; CHECK-NEXT: v_lshlrev_b32_e64 v6, v6, v7
; CHECK-NEXT: v_and_or_b32 v2, v2, v5, v6
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_and_b32_e64 v5, v3, v5
; CHECK-NEXT: v_mov_b32_e32 v3, s5
; CHECK-NEXT: v_lshlrev_b32_e64 v3, v3, v5
; CHECK-NEXT: v_mov_b32_e32 v5, s4
; CHECK-NEXT: v_and_b32_e64 v5, v4, v5
; CHECK-NEXT: s_mov_b32 s4, 24
; CHECK-NEXT: v_mov_b32_e32 v4, s4
; CHECK-NEXT: v_lshlrev_b32_e64 v4, v4, v5
; CHECK-NEXT: v_or3_b32 v2, v2, v3, v4
; CHECK-NEXT: flat_store_dword v[0:1], v2
; CHECK-NEXT: s_waitcnt vmcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_setpc_b64 s[30:31]
%B = udiv <4 x i8> splat (i8 16), zeroinitializer
store <4 x i8> %B, ptr %p, align 4
ret void
}
|