File: shuffle-half.ll

package info (click to toggle)
llvm-toolchain-19 1%3A19.1.7-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 1,998,520 kB
  • sloc: cpp: 6,951,680; ansic: 1,486,157; asm: 913,598; python: 232,024; f90: 80,126; objc: 75,281; lisp: 37,276; pascal: 16,990; sh: 10,009; ml: 5,058; perl: 4,724; awk: 3,523; makefile: 3,167; javascript: 2,504; xml: 892; fortran: 664; cs: 573
file content (650 lines) | stat: -rw-r--r-- 32,098 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
; RUN: llc < %s -mtriple=x86_64-linux-gnu -mattr=avx512vl | FileCheck %s

define <32 x half> @dump_vec() {
; CHECK-LABEL: dump_vec:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vpxor %xmm0, %xmm0, %xmm0
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_2
; CHECK-NEXT:  # %bb.1: # %cond.load
; CHECK-NEXT:    vpinsrw $0, (%rax), %xmm0, %xmm0
; CHECK-NEXT:    vmovd {{.*#+}} xmm1 = [65535,0,0,0]
; CHECK-NEXT:    vpand %ymm0, %ymm1, %ymm0
; CHECK-NEXT:    vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
; CHECK-NEXT:  .LBB0_2: # %else
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_4
; CHECK-NEXT:  # %bb.3: # %cond.load1
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_4: # %else2
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_6
; CHECK-NEXT:  # %bb.5: # %cond.load4
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1],xmm1[2],xmm0[3,4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_6: # %else5
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_8
; CHECK-NEXT:  # %bb.7: # %cond.load7
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_8: # %else8
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_10
; CHECK-NEXT:  # %bb.9: # %cond.load10
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3],xmm1[4],xmm0[5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_10: # %else11
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_12
; CHECK-NEXT:  # %bb.11: # %cond.load13
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_12: # %else14
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_14
; CHECK-NEXT:  # %bb.13: # %cond.load16
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5],xmm1[6],xmm0[7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_14: # %else17
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_16
; CHECK-NEXT:  # %bb.15: # %cond.load19
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,6],xmm1[7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_16: # %else20
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_18
; CHECK-NEXT:  # %bb.17: # %cond.load22
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1,2,3,4,5,6,7],ymm1[8],ymm0[9,10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_18: # %else23
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_20
; CHECK-NEXT:  # %bb.19: # %cond.load25
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3,4,5,6,7,8],ymm1[9],ymm0[10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_20: # %else26
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_22
; CHECK-NEXT:  # %bb.21: # %cond.load28
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7,8,9],ymm1[10],ymm0[11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_22: # %else29
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_24
; CHECK-NEXT:  # %bb.23: # %cond.load31
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6,7,8,9,10],ymm1[11],ymm0[12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_24: # %else32
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_26
; CHECK-NEXT:  # %bb.25: # %cond.load34
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6,7,8,9,10,11],ymm1[12],ymm0[13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_26: # %else35
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_28
; CHECK-NEXT:  # %bb.27: # %cond.load37
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7,8,9,10,11,12],ymm1[13],ymm0[14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_28: # %else38
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_30
; CHECK-NEXT:  # %bb.29: # %cond.load40
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7,8,9,10,11,12,13],ymm1[14],ymm0[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_30: # %else41
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_32
; CHECK-NEXT:  # %bb.31: # %cond.load43
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5,6],ymm1[7],ymm0[8,9,10,11,12,13,14],ymm1[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:  .LBB0_32: # %else44
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_34
; CHECK-NEXT:  # %bb.33: # %cond.load46
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1,2,3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_34: # %else47
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_36
; CHECK-NEXT:  # %bb.35: # %cond.load49
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_36: # %else50
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_38
; CHECK-NEXT:  # %bb.37: # %cond.load52
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2],xmm2[3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_38: # %else53
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_40
; CHECK-NEXT:  # %bb.39: # %cond.load55
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3],xmm2[4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_40: # %else56
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_42
; CHECK-NEXT:  # %bb.41: # %cond.load58
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3],xmm1[4],xmm2[5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_42: # %else59
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_44
; CHECK-NEXT:  # %bb.43: # %cond.load61
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5],xmm2[6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_44: # %else62
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_46
; CHECK-NEXT:  # %bb.45: # %cond.load64
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5],xmm1[6],xmm2[7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_46: # %else65
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_48
; CHECK-NEXT:  # %bb.47: # %cond.load67
; CHECK-NEXT:    vpbroadcastw (%rax), %xmm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,6],xmm1[7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_48: # %else68
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_50
; CHECK-NEXT:  # %bb.49: # %cond.load70
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1,2,3,4,5,6,7],ymm1[8],ymm2[9,10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_50: # %else71
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_52
; CHECK-NEXT:  # %bb.51: # %cond.load73
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4,5,6,7,8],ymm1[9],ymm2[10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_52: # %else74
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_54
; CHECK-NEXT:  # %bb.53: # %cond.load76
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4,5,6,7,8,9],ymm1[10],ymm2[11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_54: # %else77
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_56
; CHECK-NEXT:  # %bb.55: # %cond.load79
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6,7,8,9,10],ymm1[11],ymm2[12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_56: # %else80
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_58
; CHECK-NEXT:  # %bb.57: # %cond.load82
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4],ymm2[5,6,7,8,9,10,11],ymm1[12],ymm2[13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_58: # %else83
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_60
; CHECK-NEXT:  # %bb.59: # %cond.load85
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7,8,9,10,11,12],ymm1[13],ymm2[14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_60: # %else86
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_62
; CHECK-NEXT:  # %bb.61: # %cond.load88
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6],ymm2[7,8,9,10,11,12,13],ymm1[14],ymm2[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_62: # %else89
; CHECK-NEXT:    xorl %eax, %eax
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jne .LBB0_64
; CHECK-NEXT:  # %bb.63: # %cond.load91
; CHECK-NEXT:    vpbroadcastw (%rax), %ymm1
; CHECK-NEXT:    vextractf64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7],ymm2[8,9,10,11,12,13,14],ymm1[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB0_64: # %else92
; CHECK-NEXT:    retq
  %1 = call <32 x half> @llvm.masked.load.v32f16.p0(ptr poison, i32 2, <32 x i1> poison, <32 x half> <half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0, half 0.0>)
  ret <32 x half> %1
}

define <32 x half> @build_vec(ptr %p, <32 x i1> %mask) {
; CHECK-LABEL: build_vec:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vpsllw $7, %ymm0, %ymm0
; CHECK-NEXT:    vpmovmskb %ymm0, %eax
; CHECK-NEXT:    testb $1, %al
; CHECK-NEXT:    je .LBB1_1
; CHECK-NEXT:  # %bb.2: # %cond.load
; CHECK-NEXT:    vpinsrw $0, (%rdi), %xmm0, %xmm0
; CHECK-NEXT:    vpbroadcastd {{.*#+}} zmm1 = [2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0]
; CHECK-NEXT:    vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm0, %zmm1, %zmm0
; CHECK-NEXT:    testb $2, %al
; CHECK-NEXT:    jne .LBB1_4
; CHECK-NEXT:    jmp .LBB1_5
; CHECK-NEXT:  .LBB1_1:
; CHECK-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0,2.0E+0]
; CHECK-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
; CHECK-NEXT:    testb $2, %al
; CHECK-NEXT:    je .LBB1_5
; CHECK-NEXT:  .LBB1_4: # %cond.load1
; CHECK-NEXT:    vpbroadcastw 2(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:  .LBB1_5: # %else2
; CHECK-NEXT:    testb $4, %al
; CHECK-NEXT:    jne .LBB1_6
; CHECK-NEXT:  # %bb.7: # %else5
; CHECK-NEXT:    testb $8, %al
; CHECK-NEXT:    jne .LBB1_8
; CHECK-NEXT:  .LBB1_9: # %else8
; CHECK-NEXT:    testb $16, %al
; CHECK-NEXT:    jne .LBB1_10
; CHECK-NEXT:  .LBB1_11: # %else11
; CHECK-NEXT:    testb $32, %al
; CHECK-NEXT:    jne .LBB1_12
; CHECK-NEXT:  .LBB1_13: # %else14
; CHECK-NEXT:    testb $64, %al
; CHECK-NEXT:    jne .LBB1_14
; CHECK-NEXT:  .LBB1_15: # %else17
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    js .LBB1_16
; CHECK-NEXT:  .LBB1_17: # %else20
; CHECK-NEXT:    testl $256, %eax # imm = 0x100
; CHECK-NEXT:    jne .LBB1_18
; CHECK-NEXT:  .LBB1_19: # %else23
; CHECK-NEXT:    testl $512, %eax # imm = 0x200
; CHECK-NEXT:    jne .LBB1_20
; CHECK-NEXT:  .LBB1_21: # %else26
; CHECK-NEXT:    testl $1024, %eax # imm = 0x400
; CHECK-NEXT:    jne .LBB1_22
; CHECK-NEXT:  .LBB1_23: # %else29
; CHECK-NEXT:    testl $2048, %eax # imm = 0x800
; CHECK-NEXT:    jne .LBB1_24
; CHECK-NEXT:  .LBB1_25: # %else32
; CHECK-NEXT:    testl $4096, %eax # imm = 0x1000
; CHECK-NEXT:    jne .LBB1_26
; CHECK-NEXT:  .LBB1_27: # %else35
; CHECK-NEXT:    testl $8192, %eax # imm = 0x2000
; CHECK-NEXT:    jne .LBB1_28
; CHECK-NEXT:  .LBB1_29: # %else38
; CHECK-NEXT:    testl $16384, %eax # imm = 0x4000
; CHECK-NEXT:    jne .LBB1_30
; CHECK-NEXT:  .LBB1_31: # %else41
; CHECK-NEXT:    testw %ax, %ax
; CHECK-NEXT:    js .LBB1_32
; CHECK-NEXT:  .LBB1_33: # %else44
; CHECK-NEXT:    testl $65536, %eax # imm = 0x10000
; CHECK-NEXT:    jne .LBB1_34
; CHECK-NEXT:  .LBB1_35: # %else47
; CHECK-NEXT:    testl $131072, %eax # imm = 0x20000
; CHECK-NEXT:    jne .LBB1_36
; CHECK-NEXT:  .LBB1_37: # %else50
; CHECK-NEXT:    testl $262144, %eax # imm = 0x40000
; CHECK-NEXT:    jne .LBB1_38
; CHECK-NEXT:  .LBB1_39: # %else53
; CHECK-NEXT:    testl $524288, %eax # imm = 0x80000
; CHECK-NEXT:    jne .LBB1_40
; CHECK-NEXT:  .LBB1_41: # %else56
; CHECK-NEXT:    testl $1048576, %eax # imm = 0x100000
; CHECK-NEXT:    jne .LBB1_42
; CHECK-NEXT:  .LBB1_43: # %else59
; CHECK-NEXT:    testl $2097152, %eax # imm = 0x200000
; CHECK-NEXT:    jne .LBB1_44
; CHECK-NEXT:  .LBB1_45: # %else62
; CHECK-NEXT:    testl $4194304, %eax # imm = 0x400000
; CHECK-NEXT:    jne .LBB1_46
; CHECK-NEXT:  .LBB1_47: # %else65
; CHECK-NEXT:    testl $8388608, %eax # imm = 0x800000
; CHECK-NEXT:    jne .LBB1_48
; CHECK-NEXT:  .LBB1_49: # %else68
; CHECK-NEXT:    testl $16777216, %eax # imm = 0x1000000
; CHECK-NEXT:    jne .LBB1_50
; CHECK-NEXT:  .LBB1_51: # %else71
; CHECK-NEXT:    testl $33554432, %eax # imm = 0x2000000
; CHECK-NEXT:    jne .LBB1_52
; CHECK-NEXT:  .LBB1_53: # %else74
; CHECK-NEXT:    testl $67108864, %eax # imm = 0x4000000
; CHECK-NEXT:    jne .LBB1_54
; CHECK-NEXT:  .LBB1_55: # %else77
; CHECK-NEXT:    testl $134217728, %eax # imm = 0x8000000
; CHECK-NEXT:    jne .LBB1_56
; CHECK-NEXT:  .LBB1_57: # %else80
; CHECK-NEXT:    testl $268435456, %eax # imm = 0x10000000
; CHECK-NEXT:    jne .LBB1_58
; CHECK-NEXT:  .LBB1_59: # %else83
; CHECK-NEXT:    testl $536870912, %eax # imm = 0x20000000
; CHECK-NEXT:    jne .LBB1_60
; CHECK-NEXT:  .LBB1_61: # %else86
; CHECK-NEXT:    testl $1073741824, %eax # imm = 0x40000000
; CHECK-NEXT:    jne .LBB1_62
; CHECK-NEXT:  .LBB1_63: # %else89
; CHECK-NEXT:    testl $-2147483648, %eax # imm = 0x80000000
; CHECK-NEXT:    jne .LBB1_64
; CHECK-NEXT:  .LBB1_65: # %else92
; CHECK-NEXT:    retq
; CHECK-NEXT:  .LBB1_6: # %cond.load4
; CHECK-NEXT:    vpbroadcastw 4(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1],xmm1[2],xmm0[3,4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testb $8, %al
; CHECK-NEXT:    je .LBB1_9
; CHECK-NEXT:  .LBB1_8: # %cond.load7
; CHECK-NEXT:    vpbroadcastw 6(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testb $16, %al
; CHECK-NEXT:    je .LBB1_11
; CHECK-NEXT:  .LBB1_10: # %cond.load10
; CHECK-NEXT:    vpbroadcastw 8(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3],xmm1[4],xmm0[5,6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testb $32, %al
; CHECK-NEXT:    je .LBB1_13
; CHECK-NEXT:  .LBB1_12: # %cond.load13
; CHECK-NEXT:    vpbroadcastw 10(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testb $64, %al
; CHECK-NEXT:    je .LBB1_15
; CHECK-NEXT:  .LBB1_14: # %cond.load16
; CHECK-NEXT:    vpbroadcastw 12(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5],xmm1[6],xmm0[7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testb %al, %al
; CHECK-NEXT:    jns .LBB1_17
; CHECK-NEXT:  .LBB1_16: # %cond.load19
; CHECK-NEXT:    vpbroadcastw 14(%rdi), %xmm1
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,6],xmm1[7]
; CHECK-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $256, %eax # imm = 0x100
; CHECK-NEXT:    je .LBB1_19
; CHECK-NEXT:  .LBB1_18: # %cond.load22
; CHECK-NEXT:    vpbroadcastw 16(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1,2,3,4,5,6,7],ymm1[8],ymm0[9,10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $512, %eax # imm = 0x200
; CHECK-NEXT:    je .LBB1_21
; CHECK-NEXT:  .LBB1_20: # %cond.load25
; CHECK-NEXT:    vpbroadcastw 18(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3,4,5,6,7,8],ymm1[9],ymm0[10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $1024, %eax # imm = 0x400
; CHECK-NEXT:    je .LBB1_23
; CHECK-NEXT:  .LBB1_22: # %cond.load28
; CHECK-NEXT:    vpbroadcastw 20(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7,8,9],ymm1[10],ymm0[11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $2048, %eax # imm = 0x800
; CHECK-NEXT:    je .LBB1_25
; CHECK-NEXT:  .LBB1_24: # %cond.load31
; CHECK-NEXT:    vpbroadcastw 22(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6,7,8,9,10],ymm1[11],ymm0[12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $4096, %eax # imm = 0x1000
; CHECK-NEXT:    je .LBB1_27
; CHECK-NEXT:  .LBB1_26: # %cond.load34
; CHECK-NEXT:    vpbroadcastw 24(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6,7,8,9,10,11],ymm1[12],ymm0[13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $8192, %eax # imm = 0x2000
; CHECK-NEXT:    je .LBB1_29
; CHECK-NEXT:  .LBB1_28: # %cond.load37
; CHECK-NEXT:    vpbroadcastw 26(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7,8,9,10,11,12],ymm1[13],ymm0[14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $16384, %eax # imm = 0x4000
; CHECK-NEXT:    je .LBB1_31
; CHECK-NEXT:  .LBB1_30: # %cond.load40
; CHECK-NEXT:    vpbroadcastw 28(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7,8,9,10,11,12,13],ymm1[14],ymm0[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testw %ax, %ax
; CHECK-NEXT:    jns .LBB1_33
; CHECK-NEXT:  .LBB1_32: # %cond.load43
; CHECK-NEXT:    vpbroadcastw 30(%rdi), %ymm1
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5,6],ymm1[7],ymm0[8,9,10,11,12,13,14],ymm1[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
; CHECK-NEXT:    testl $65536, %eax # imm = 0x10000
; CHECK-NEXT:    je .LBB1_35
; CHECK-NEXT:  .LBB1_34: # %cond.load46
; CHECK-NEXT:    vpbroadcastw 32(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1,2,3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $131072, %eax # imm = 0x20000
; CHECK-NEXT:    je .LBB1_37
; CHECK-NEXT:  .LBB1_36: # %cond.load49
; CHECK-NEXT:    vpbroadcastw 34(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $262144, %eax # imm = 0x40000
; CHECK-NEXT:    je .LBB1_39
; CHECK-NEXT:  .LBB1_38: # %cond.load52
; CHECK-NEXT:    vpbroadcastw 36(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2],xmm2[3,4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $524288, %eax # imm = 0x80000
; CHECK-NEXT:    je .LBB1_41
; CHECK-NEXT:  .LBB1_40: # %cond.load55
; CHECK-NEXT:    vpbroadcastw 38(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3],xmm2[4,5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $1048576, %eax # imm = 0x100000
; CHECK-NEXT:    je .LBB1_43
; CHECK-NEXT:  .LBB1_42: # %cond.load58
; CHECK-NEXT:    vpbroadcastw 40(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3],xmm1[4],xmm2[5,6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $2097152, %eax # imm = 0x200000
; CHECK-NEXT:    je .LBB1_45
; CHECK-NEXT:  .LBB1_44: # %cond.load61
; CHECK-NEXT:    vpbroadcastw 42(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5],xmm2[6,7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $4194304, %eax # imm = 0x400000
; CHECK-NEXT:    je .LBB1_47
; CHECK-NEXT:  .LBB1_46: # %cond.load64
; CHECK-NEXT:    vpbroadcastw 44(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5],xmm1[6],xmm2[7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $8388608, %eax # imm = 0x800000
; CHECK-NEXT:    je .LBB1_49
; CHECK-NEXT:  .LBB1_48: # %cond.load67
; CHECK-NEXT:    vpbroadcastw 46(%rdi), %xmm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,6],xmm1[7]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $16777216, %eax # imm = 0x1000000
; CHECK-NEXT:    je .LBB1_51
; CHECK-NEXT:  .LBB1_50: # %cond.load70
; CHECK-NEXT:    vpbroadcastw 48(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1,2,3,4,5,6,7],ymm1[8],ymm2[9,10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $33554432, %eax # imm = 0x2000000
; CHECK-NEXT:    je .LBB1_53
; CHECK-NEXT:  .LBB1_52: # %cond.load73
; CHECK-NEXT:    vpbroadcastw 50(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4,5,6,7,8],ymm1[9],ymm2[10,11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $67108864, %eax # imm = 0x4000000
; CHECK-NEXT:    je .LBB1_55
; CHECK-NEXT:  .LBB1_54: # %cond.load76
; CHECK-NEXT:    vpbroadcastw 52(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4,5,6,7,8,9],ymm1[10],ymm2[11,12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $134217728, %eax # imm = 0x8000000
; CHECK-NEXT:    je .LBB1_57
; CHECK-NEXT:  .LBB1_56: # %cond.load79
; CHECK-NEXT:    vpbroadcastw 54(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6,7,8,9,10],ymm1[11],ymm2[12,13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $268435456, %eax # imm = 0x10000000
; CHECK-NEXT:    je .LBB1_59
; CHECK-NEXT:  .LBB1_58: # %cond.load82
; CHECK-NEXT:    vpbroadcastw 56(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4],ymm2[5,6,7,8,9,10,11],ymm1[12],ymm2[13,14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $536870912, %eax # imm = 0x20000000
; CHECK-NEXT:    je .LBB1_61
; CHECK-NEXT:  .LBB1_60: # %cond.load85
; CHECK-NEXT:    vpbroadcastw 58(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7,8,9,10,11,12],ymm1[13],ymm2[14,15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $1073741824, %eax # imm = 0x40000000
; CHECK-NEXT:    je .LBB1_63
; CHECK-NEXT:  .LBB1_62: # %cond.load88
; CHECK-NEXT:    vpbroadcastw 60(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6],ymm2[7,8,9,10,11,12,13],ymm1[14],ymm2[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    testl $-2147483648, %eax # imm = 0x80000000
; CHECK-NEXT:    je .LBB1_65
; CHECK-NEXT:  .LBB1_64: # %cond.load91
; CHECK-NEXT:    vpbroadcastw 62(%rdi), %ymm1
; CHECK-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; CHECK-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7],ymm2[8,9,10,11,12,13,14],ymm1[15]
; CHECK-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
; CHECK-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; CHECK-NEXT:    retq
  %1 = call <32 x half> @llvm.masked.load.v32f16.p0(ptr %p, i32 2, <32 x i1 > %mask, <32 x half> <half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0, half 2.0>)
  ret <32 x half> %1
}

declare <32 x half> @llvm.masked.load.v32f16.p0(ptr, i32, <32 x i1>, <32 x half>)