File: avx512vbmi2_vl.s

package info (click to toggle)
binutils 2.31.1-16
  • links: PTS, VCS
  • area: main
  • in suites: buster
  • size: 309,412 kB
  • sloc: ansic: 1,161,194; asm: 638,508; cpp: 128,829; exp: 68,580; makefile: 55,828; sh: 22,360; yacc: 14,238; lisp: 13,272; perl: 2,111; ada: 1,681; lex: 1,652; pascal: 1,446; cs: 879; sed: 195; python: 154; xml: 95; awk: 25
file content (332 lines) | stat: -rw-r--r-- 19,272 bytes parent folder | download | duplicates (11)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
# Check 32bit AVX512{VBMI2,VL} instructions

	.allow_index_reg
	.text
_start:
	vpcompressb	%xmm6, -123456(%esp,%esi,8){%k7}	 # AVX512{VBMI2,VL}
	vpcompressb	%xmm6, 126(%edx){%k7}	 # AVX512{VBMI2,VL} Disp8
	vpcompressb	%ymm6, -123456(%esp,%esi,8){%k7}	 # AVX512{VBMI2,VL}
	vpcompressb	%ymm6, 126(%edx){%k7}	 # AVX512{VBMI2,VL} Disp8
	vpcompressb	%xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpcompressb	%xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpcompressb	%ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpcompressb	%ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}

	vpcompressw	%xmm6, -123456(%esp,%esi,8){%k7}	 # AVX512{VBMI2,VL}
	vpcompressw	%xmm6, 128(%edx){%k7}	 # AVX512{VBMI2,VL} Disp8
	vpcompressw	%ymm6, -123456(%esp,%esi,8){%k7}	 # AVX512{VBMI2,VL}
	vpcompressw	%ymm6, 128(%edx){%k7}	 # AVX512{VBMI2,VL} Disp8
	vpcompressw	%xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpcompressw	%xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpcompressw	%ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpcompressw	%ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}

	vpexpandb	(%ecx), %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandb	-123456(%esp,%esi,8), %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandb	126(%edx), %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpexpandb	(%ecx), %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandb	-123456(%esp,%esi,8), %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandb	126(%edx), %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpexpandb	%xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandb	%xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandb	%ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandb	%ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}

	vpexpandw	(%ecx), %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandw	-123456(%esp,%esi,8), %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandw	128(%edx), %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpexpandw	(%ecx), %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandw	-123456(%esp,%esi,8), %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandw	128(%edx), %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpexpandw	%xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandw	%xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpexpandw	%ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpexpandw	%ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}

	vpshldvw	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvw	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvw	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvw	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvw	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvw	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvw	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvw	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshldvd	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvd	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvd	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvd	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	508(%edx){1to4}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvd	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvd	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvd	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	508(%edx){1to8}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshldvq	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvq	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvq	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvq	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	1016(%edx){1to2}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvq	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldvq	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldvq	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	1016(%edx){1to4}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdvw	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvw	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvw	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvw	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvw	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvw	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvw	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvw	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdvd	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvd	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvd	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvd	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	508(%edx){1to4}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvd	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvd	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvd	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	508(%edx){1to8}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdvq	%xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvq	%xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvq	-123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvq	2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	1016(%edx){1to2}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	%ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvq	%ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdvq	-123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdvq	4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	1016(%edx){1to4}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshldw	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldw	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldw	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldw	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldw	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldw	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldw	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldw	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshldd	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldd	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldd	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldd	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldd	$123, 508(%edx){1to4}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldd	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldd	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldd	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldd	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldd	$123, 508(%edx){1to8}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshldq	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldq	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldq	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldq	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldq	$123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldq	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldq	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshldq	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshldq	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshldq	$123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdw	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdw	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdw	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdw	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdw	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdw	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdw	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdw	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdd	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdd	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdd	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdd	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	$123, 508(%edx){1to4}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdd	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdd	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdd	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	$123, 508(%edx){1to8}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	vpshrdq	$0xab, %xmm4, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdq	$0xab, %xmm4, %xmm5, %xmm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdq	$123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdq	$123, 2032(%edx), %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	$123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	$0xab, %ymm4, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdq	$0xab, %ymm4, %ymm5, %ymm6{%k7}{z}	 # AVX512{VBMI2,VL}
	vpshrdq	$123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL}
	vpshrdq	$123, 4064(%edx), %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	$123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7}	 # AVX512{VBMI2,VL} Disp8

	.intel_syntax noprefix
	vpcompressb	XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6	 # AVX512{VBMI2,VL}
	vpcompressb	XMMWORD PTR [edx+126]{k7}, xmm6	 # AVX512{VBMI2,VL} Disp8
	vpcompressb	YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6	 # AVX512{VBMI2,VL}
	vpcompressb	YMMWORD PTR [edx+126]{k7}, ymm6	 # AVX512{VBMI2,VL} Disp8
	vpcompressb	xmm6{k7}, xmm5	 # AVX512{VBMI2,VL}
	vpcompressb	xmm6{k7}{z}, xmm5	 # AVX512{VBMI2,VL}
	vpcompressb	ymm6{k7}, ymm5	 # AVX512{VBMI2,VL}
	vpcompressb	ymm6{k7}{z}, ymm5	 # AVX512{VBMI2,VL}

	vpcompressw	XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6	 # AVX512{VBMI2,VL}
	vpcompressw	XMMWORD PTR [edx+128]{k7}, xmm6	 # AVX512{VBMI2,VL} Disp8
	vpcompressw	YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6	 # AVX512{VBMI2,VL}
	vpcompressw	YMMWORD PTR [edx+128]{k7}, ymm6	 # AVX512{VBMI2,VL} Disp8
	vpcompressw	xmm6{k7}, xmm5	 # AVX512{VBMI2,VL}
	vpcompressw	xmm6{k7}{z}, xmm5	 # AVX512{VBMI2,VL}
	vpcompressw	ymm6{k7}, ymm5	 # AVX512{VBMI2,VL}
	vpcompressw	ymm6{k7}{z}, ymm5	 # AVX512{VBMI2,VL}

	vpexpandb	xmm6{k7}{z}, XMMWORD PTR [ecx]	 # AVX512{VBMI2,VL}
	vpexpandb	xmm6{k7}, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpexpandb	xmm6{k7}, XMMWORD PTR [edx+126]	 # AVX512{VBMI2,VL} Disp8
	vpexpandb	ymm6{k7}{z}, YMMWORD PTR [ecx]	 # AVX512{VBMI2,VL}
	vpexpandb	ymm6{k7}, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpexpandb	ymm6{k7}, YMMWORD PTR [edx+126]	 # AVX512{VBMI2,VL} Disp8
	vpexpandb	xmm6{k7}, xmm5	 # AVX512{VBMI2,VL}
	vpexpandb	xmm6{k7}{z}, xmm5	 # AVX512{VBMI2,VL}
	vpexpandb	ymm6{k7}, ymm5	 # AVX512{VBMI2,VL}
	vpexpandb	ymm6{k7}{z}, ymm5	 # AVX512{VBMI2,VL}

	vpexpandw	xmm6{k7}{z}, XMMWORD PTR [ecx]	 # AVX512{VBMI2,VL}
	vpexpandw	xmm6{k7}, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpexpandw	xmm6{k7}, XMMWORD PTR [edx+128]	 # AVX512{VBMI2,VL} Disp8
	vpexpandw	ymm6{k7}{z}, YMMWORD PTR [ecx]	 # AVX512{VBMI2,VL}
	vpexpandw	ymm6{k7}, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpexpandw	ymm6{k7}, YMMWORD PTR [edx+128]	 # AVX512{VBMI2,VL} Disp8
	vpexpandw	xmm6{k7}, xmm5	 # AVX512{VBMI2,VL}
	vpexpandw	xmm6{k7}{z}, xmm5	 # AVX512{VBMI2,VL}
	vpexpandw	ymm6{k7}, ymm5	 # AVX512{VBMI2,VL}
	vpexpandw	ymm6{k7}{z}, ymm5	 # AVX512{VBMI2,VL}

	vpshldvw	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvw	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvw	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvw	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshldvw	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvw	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvw	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvw	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8

	vpshldvd	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvd	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvd	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvd	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	xmm6{k7}, xmm5, [edx+508]{1to4}	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvd	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvd	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvd	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8
	vpshldvd	ymm6{k7}, ymm5, [edx+508]{1to8}	 # AVX512{VBMI2,VL} Disp8

	vpshldvq	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvq	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshldvq	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvq	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	xmm6{k7}, xmm5, [edx+1016]{1to2}	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvq	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshldvq	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshldvq	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8
	vpshldvq	ymm6{k7}, ymm5, [edx+1016]{1to4}	 # AVX512{VBMI2,VL} Disp8

	vpshrdvw	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvw	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvw	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvw	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshrdvw	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvw	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvw	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvw	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8

	vpshrdvd	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvd	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvd	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvd	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	xmm6{k7}, xmm5, [edx+508]{1to4}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvd	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvd	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvd	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8
	vpshrdvd	ymm6{k7}, ymm5, [edx+508]{1to8}	 # AVX512{VBMI2,VL} Disp8

	vpshrdvq	xmm6{k7}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvq	xmm6{k7}{z}, xmm5, xmm4	 # AVX512{VBMI2,VL}
	vpshrdvq	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvq	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032]	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	xmm6{k7}, xmm5, [edx+1016]{1to2}	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	ymm6{k7}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvq	ymm6{k7}{z}, ymm5, ymm4	 # AVX512{VBMI2,VL}
	vpshrdvq	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456]	 # AVX512{VBMI2,VL}
	vpshrdvq	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064]	 # AVX512{VBMI2,VL} Disp8
	vpshrdvq	ymm6{k7}, ymm5, [edx+1016]{1to4}	 # AVX512{VBMI2,VL} Disp8

	vpshldw	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldw	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldw	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldw	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshldw	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldw	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldw	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldw	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8

	vpshldd	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldd	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldd	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldd	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshldd	xmm6{k7}, xmm5, [edx+508]{1to4}, 123	 # AVX512{VBMI2,VL} Disp8
	vpshldd	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldd	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldd	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldd	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8
	vpshldd	ymm6{k7}, ymm5, [edx+508]{1to8}, 123	 # AVX512{VBMI2,VL} Disp8

	vpshldq	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldq	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldq	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldq	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshldq	xmm6{k7}, xmm5, [edx+1016]{1to2}, 123	 # AVX512{VBMI2,VL} Disp8
	vpshldq	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldq	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshldq	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshldq	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8
	vpshldq	ymm6{k7}, ymm5, [edx+1016]{1to4}, 123	 # AVX512{VBMI2,VL} Disp8

	vpshrdw	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdw	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdw	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdw	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdw	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdw	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdw	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdw	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdd	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdd	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdd	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	xmm6{k7}, xmm5, [edx+508]{1to4}, 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdd	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdd	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdd	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdd	ymm6{k7}, ymm5, [edx+508]{1to8}, 123	 # AVX512{VBMI2,VL} Disp8

	vpshrdq	xmm6{k7}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdq	xmm6{k7}{z}, xmm5, xmm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdq	xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdq	xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	xmm6{k7}, xmm5, [edx+1016]{1to2}, 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	ymm6{k7}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdq	ymm6{k7}{z}, ymm5, ymm4, 0xab	 # AVX512{VBMI2,VL}
	vpshrdq	ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123	 # AVX512{VBMI2,VL}
	vpshrdq	ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123	 # AVX512{VBMI2,VL} Disp8
	vpshrdq	ymm6{k7}, ymm5, [edx+1016]{1to4}, 123	 # AVX512{VBMI2,VL} Disp8