1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407
|
/*========================== begin_copyright_notice ============================
Copyright (C) 2020-2021 Intel Corporation
SPDX-License-Identifier: MIT
============================= end_copyright_notice ===========================*/
#ifndef IGCBIF_INTRINSICS_LSC_CL
#define IGCBIF_INTRINSICS_LSC_CL
#ifdef cl_intel_pvc_lsc_validation
////////////////////////////////////////////////////////////////////////
// LSC (Load/Store Cache) intrinsics
//
// This set of intrinsics maps access DG2/PVC+ LSC messages.
// The general scheme is that we use the SPIR-V data type to infer
// the LSC type and vector combination. The following table represents
// the mapping for load and store messages.
//
// +-------------+----------+---------------------------------------
// | OpenCL Type | LSC Type | Notes
// +-------------+----------+---------------------------------------
// | uchar+ | D8U32 | 32b in the register file; 8b in memory
// | ushort+ | D16U32 | 32b in the register file; 16b in memory
// | uint | D32 V1 |
// | uint2 | D32 V2 |
// | uint3 | D32 V3 |
// | uint4 | D32 V4 |
// | uint8+ | D32 V8 | May split into several submessages, unless half SIMD (B0+)
// | ulong | D64 V1 |
// | ulong2 | D64 V2 |
// | ulong3+ | D64 V3 | Will split into several submessages, unless half SIMD (B0+)
// | ulong4+ | D64 V4 | Will split into several submessages
// | ulong8++ | D64 V8 | Will split into several submessages
// +-------------+----------+---------------------------------------
// + no native D8 and D16 load/store support exists at the moment;
// also, upper bytes of DW may be garbage depending on platform (later platform fixes)
// ++ can split into several messages
//
// *** Cache Controls ***
// Cache controls are present in most messages. Some have additional
// constraints not listed here, which may vary per platform and stepping.
//
// *** Uniform Immediate Offset ***
// An additional immediate offset (in elements) parameter is also provided.
// Semantically this is no different than adding to the base pointer,
// but hardware may be able to fuse the add into the message.
// As the name implies, the argument must be uniform and immediate
// (not variable).
//
// *** Other Data Types ***
// Only unsigned types are needed for most load and store operations.
// Use OpenCL reinterpretation functions to convert to signed and float
// types (e.g. as_float4(...)).
//
// Atomics favor unsigned for untyped integer operations such as bitwise ops,
// (integer) add and so forth. Again, use as_int(..) to convert.
// For explict operations such as signed min/max and floating-point add/sub
// (among others) signed and floating point types are used and no conversion
// is necessary.
//
// *** Additional Restrictions ***
// Hardware documentation contains additional constraints; platforms
// and stepping may contain addition restrictions not enforced here.
// In such cases, the results of the operation are undefined.
///////////////////////////////////////////////////////////////////////
// LSC Cache options
// Those values are in API intrinsics and need to be explicit.
// Those values should match exactly to ones used in IGC
// (given in igc_regkeys_enums_defs.h).
///////////////////////////////////////////////////////////////////////
//
// Load message caching control
enum LSC_LDCC {
LSC_LDCC_DEFAULT = 0,
LSC_LDCC_L1UC_L3UC = 1, // Override to L1 uncached and L3 uncached
LSC_LDCC_L1UC_L3C = 2, // Override to L1 uncached and L3 cached
LSC_LDCC_L1C_L3UC = 3, // Override to L1 cached and L3 uncached
LSC_LDCC_L1C_L3C = 4, // Override to L1 cached and L3 cached
LSC_LDCC_L1S_L3UC = 5, // Override to L1 streaming load and L3 uncached
LSC_LDCC_L1S_L3C = 6, // Override to L1 streaming load and L3 cached
LSC_LDCC_L1IAR_L3C = 7, // Override to L1 invalidate-after-read, and L3 cached
};
// Store message caching control (also used for atomics)
enum LSC_STCC {
LSC_STCC_DEFAULT = 0,
LSC_STCC_L1UC_L3UC = 1, // Override to L1 uncached and L3 uncached
LSC_STCC_L1UC_L3WB = 2, // Override to L1 uncached and L3 written back
LSC_STCC_L1WT_L3UC = 3, // Override to L1 written through and L3 uncached
LSC_STCC_L1WT_L3WB = 4, // Override to L1 written through and L3 written back
LSC_STCC_L1S_L3UC = 5, // Override to L1 streaming and L3 uncached
LSC_STCC_L1S_L3WB = 6, // Override to L1 streaming and L3 written back
LSC_STCC_L1WB_L3WB = 7, // Override to L1 written through and L3 written back
};
///////////////////////////////////////////////////////////////////////
// LSC Loads
///////////////////////////////////////////////////////////////////////
// global address space gathering load
uint __builtin_IB_lsc_load_global_uchar_to_uint (const __global uchar *base, int immElemOff, enum LSC_LDCC cacheOpt); //D8U32
uint __builtin_IB_lsc_load_global_ushort_to_uint(const __global ushort *base, int immElemOff, enum LSC_LDCC cacheOpt); //D16U32
uint __builtin_IB_lsc_load_global_uint (const __global uint *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V1
uint2 __builtin_IB_lsc_load_global_uint2 (const __global uint2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V2
uint3 __builtin_IB_lsc_load_global_uint3 (const __global uint3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V3
uint4 __builtin_IB_lsc_load_global_uint4 (const __global uint4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V4
uint8 __builtin_IB_lsc_load_global_uint8 (const __global uint8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V8
ulong __builtin_IB_lsc_load_global_ulong (const __global ulong *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V1
ulong2 __builtin_IB_lsc_load_global_ulong2(const __global ulong2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V2
ulong3 __builtin_IB_lsc_load_global_ulong3(const __global ulong3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V3
ulong4 __builtin_IB_lsc_load_global_ulong4(const __global ulong4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V4
ulong8 __builtin_IB_lsc_load_global_ulong8(const __global ulong8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V8
uint __builtin_IB_lsc_load_cmask_global_uint (const __global uint *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V1
uint __builtin_IB_lsc_load_cmask_global_uint2 (const __global uint2 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V2
uint __builtin_IB_lsc_load_cmask_global_uint3 (const __global uint3 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V3
uint __builtin_IB_lsc_load_cmask_global_uint4 (const __global uint4 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V4
uint __builtin_IB_lsc_load_cmask_global_ulong (const __global ulong *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V1
uint __builtin_IB_lsc_load_cmask_global_ulong2(const __global ulong2 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V2
uint __builtin_IB_lsc_load_cmask_global_ulong3(const __global ulong3 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V3
uint __builtin_IB_lsc_load_cmask_global_ulong4(const __global ulong4 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V4
// global block load (one-dimensional)
uint __builtin_IB_lsc_load_block_global_uchar_to_uint (const __global uchar *base, int immElemOff, enum LSC_LDCC cacheOpt); //D8U32
uint __builtin_IB_lsc_load_block_global_ushort_to_uint(const __global ushort *base, int immElemOff, enum LSC_LDCC cacheOpt); //D16U32
uint __builtin_IB_lsc_load_block_global_uint (const __global uint *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V1
uint2 __builtin_IB_lsc_load_block_global_uint2 (const __global uint2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V2
uint3 __builtin_IB_lsc_load_block_global_uint3 (const __global uint3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V3
uint4 __builtin_IB_lsc_load_block_global_uint4 (const __global uint4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V4
uint8 __builtin_IB_lsc_load_block_global_uint8 (const __global uint8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V8
ulong __builtin_IB_lsc_load_block_global_ulong (const __global ulong *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V1
ulong2 __builtin_IB_lsc_load_block_global_ulong2(const __global ulong2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V2
ulong3 __builtin_IB_lsc_load_block_global_ulong3(const __global ulong3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V3
ulong4 __builtin_IB_lsc_load_block_global_ulong4(const __global ulong4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V4
ulong8 __builtin_IB_lsc_load_block_global_ulong8(const __global ulong8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V8
// local address space
uint __builtin_IB_lsc_load_local_uchar_to_uint( const __local uchar *base, int immElemOff); //D8U32
uint __builtin_IB_lsc_load_local_ushort_to_uint(const __local ushort *base, int immElemOff); //D16U32
uint __builtin_IB_lsc_load_local_uint (const __local uint *base, int immElemOff); //D32V1
uint2 __builtin_IB_lsc_load_local_uint2 (const __local uint2 *base, int immElemOff); //D32V2
uint3 __builtin_IB_lsc_load_local_uint3 (const __local uint3 *base, int immElemOff); //D32V3
uint4 __builtin_IB_lsc_load_local_uint4 (const __local uint4 *base, int immElemOff); //D32V4
uint8 __builtin_IB_lsc_load_local_uint8 (const __local uint8 *base, int immElemOff); //D32V8
ulong __builtin_IB_lsc_load_local_ulong (const __local ulong *base, int immElemOff); //D64V1
ulong2 __builtin_IB_lsc_load_local_ulong2(const __local ulong2 *base, int immElemOff); //D64V2
ulong3 __builtin_IB_lsc_load_local_ulong3(const __local ulong3 *base, int immElemOff); //D64V3
ulong4 __builtin_IB_lsc_load_local_ulong4(const __local ulong4 *base, int immElemOff); //D64V4
ulong8 __builtin_IB_lsc_load_local_ulong8(const __local ulong8 *base, int immElemOff); //D64V8
uint __builtin_IB_lsc_load_cmask_global_uint (const __global uint *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V1
uint __builtin_IB_lsc_load_cmask_global_uint2 (const __global uint2 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V2
uint __builtin_IB_lsc_load_cmask_global_uint3 (const __global uint3 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V3
uint __builtin_IB_lsc_load_cmask_global_uint4 (const __global uint4 *base, int elemOff, enum LSC_LDCC cacheOpt); //D32V4
uint __builtin_IB_lsc_load_cmask_global_ulong (const __global ulong *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V1
uint __builtin_IB_lsc_load_cmask_global_ulong2(const __global ulong2 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V2
uint __builtin_IB_lsc_load_cmask_global_ulong3(const __global ulong3 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V3
uint __builtin_IB_lsc_load_cmask_global_ulong4(const __global ulong4 *base, int elemOff, enum LSC_LDCC cacheOpt); //D64V4
///////////////////////////////////////////////////////////////////////
// LSC Stores
///////////////////////////////////////////////////////////////////////
// global address space scattering store
void __builtin_IB_lsc_store_global_uchar_from_uint (__global uchar *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D8U32
void __builtin_IB_lsc_store_global_ushort_from_uint(__global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D16U32
void __builtin_IB_lsc_store_global_uint (__global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D32V1
void __builtin_IB_lsc_store_global_uint2 (__global uint2 *base, int immElemOff, uint2 val, enum LSC_STCC cacheOpt); //D32V2
void __builtin_IB_lsc_store_global_uint3 (__global uint3 *base, int immElemOff, uint3 val, enum LSC_STCC cacheOpt); //D32V3
void __builtin_IB_lsc_store_global_uint4 (__global uint4 *base, int immElemOff, uint4 val, enum LSC_STCC cacheOpt); //D32V4
void __builtin_IB_lsc_store_global_uint8 (__global uint8 *base, int immElemOff, uint8 val, enum LSC_STCC cacheOpt); //D32V8
void __builtin_IB_lsc_store_global_ulong (__global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt); //D64V1
void __builtin_IB_lsc_store_global_ulong2(__global ulong2 *base, int immElemOff, ulong2 val, enum LSC_STCC cacheOpt); //D64V2
void __builtin_IB_lsc_store_global_ulong3(__global ulong3 *base, int immElemOff, ulong3 val, enum LSC_STCC cacheOpt); //D64V3
void __builtin_IB_lsc_store_global_ulong4(__global ulong4 *base, int immElemOff, ulong4 val, enum LSC_STCC cacheOpt); //D64V4
void __builtin_IB_lsc_store_global_ulong8(__global ulong8 *base, int immElemOff, ulong8 val, enum LSC_STCC cacheOpt); //D64V8
void __builtin_IB_lsc_store_cmask_global_uint (__global uint *base, int elemOff, uint val, enum LSC_STCC cacheOpt); //D32V1
void __builtin_IB_lsc_store_cmask_global_uint2 (__global uint2 *base, int elemOff, uint2 val, enum LSC_STCC cacheOpt); //D32V2
void __builtin_IB_lsc_store_cmask_global_uint3 (__global uint3 *base, int elemOff, uint3 val, enum LSC_STCC cacheOpt); //D32V3
void __builtin_IB_lsc_store_cmask_global_uint4 (__global uint4 *base, int elemOff, uint4 val, enum LSC_STCC cacheOpt); //D32V4
void __builtin_IB_lsc_store_cmask_global_ulong (__global ulong *base, int elemOff, ulong val, enum LSC_STCC cacheOpt); //D64V1
void __builtin_IB_lsc_store_cmask_global_ulong2(__global ulong2 *base, int elemOff, ulong2 val, enum LSC_STCC cacheOpt); //D64V2
void __builtin_IB_lsc_store_cmask_global_ulong3(__global ulong3 *base, int elemOff, ulong3 val, enum LSC_STCC cacheOpt); //D64V3
void __builtin_IB_lsc_store_cmask_global_ulong4(__global ulong4 *base, int elemOff, ulong4 val, enum LSC_STCC cacheOpt); //D64V4
// global block store (one-dimensional)
void __builtin_IB_lsc_store_block_global_uchar_from_uint (__global uchar *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D8U32
void __builtin_IB_lsc_store_block_global_ushort_from_uint(__global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D16U32
void __builtin_IB_lsc_store_block_global_uint (__global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt); //D32V1
void __builtin_IB_lsc_store_block_global_uint2 (__global uint2 *base, int immElemOff, uint2 val, enum LSC_STCC cacheOpt); //D32V2
void __builtin_IB_lsc_store_block_global_uint3 (__global uint3 *base, int immElemOff, uint3 val, enum LSC_STCC cacheOpt); //D32V3
void __builtin_IB_lsc_store_block_global_uint4 (__global uint4 *base, int immElemOff, uint4 val, enum LSC_STCC cacheOpt); //D32V4
void __builtin_IB_lsc_store_block_global_uint8 (__global uint8 *base, int immElemOff, uint8 val, enum LSC_STCC cacheOpt); //D32V8
void __builtin_IB_lsc_store_block_global_ulong (__global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt); //D64V1
void __builtin_IB_lsc_store_block_global_ulong2(__global ulong2 *base, int immElemOff, ulong2 val, enum LSC_STCC cacheOpt); //D64V2
void __builtin_IB_lsc_store_block_global_ulong3(__global ulong3 *base, int immElemOff, ulong3 val, enum LSC_STCC cacheOpt); //D64V3
void __builtin_IB_lsc_store_block_global_ulong4(__global ulong4 *base, int immElemOff, ulong4 val, enum LSC_STCC cacheOpt); //D64V4
void __builtin_IB_lsc_store_block_global_ulong8(__global ulong8 *base, int immElemOff, ulong8 val, enum LSC_STCC cacheOpt); //D64V8
// local address space
void __builtin_IB_lsc_store_local_uchar_from_uint (__local uchar *base, int immElemOff, uint val); //D8U32
void __builtin_IB_lsc_store_local_ushort_from_uint(__local ushort *base, int immElemOff, uint val); //D16U32
void __builtin_IB_lsc_store_local_uint (__local uint *base, int immElemOff, uint val); //D32V1
void __builtin_IB_lsc_store_local_uint2 (__local uint2 *base, int immElemOff, uint2 val); //D32V2
void __builtin_IB_lsc_store_local_uint3 (__local uint3 *base, int immElemOff, uint3 val); //D32V3
void __builtin_IB_lsc_store_local_uint4 (__local uint4 *base, int immElemOff, uint4 val); //D32V4
void __builtin_IB_lsc_store_local_uint8 (__local uint8 *base, int immElemOff, uint8 val); //D32V8
void __builtin_IB_lsc_store_local_ulong (__local ulong *base, int immElemOff, ulong val); //D64V1
void __builtin_IB_lsc_store_local_ulong2(__local ulong2 *base, int immElemOff, ulong2 val); //D64V2
void __builtin_IB_lsc_store_local_ulong3(__local ulong3 *base, int immElemOff, ulong3 val); //D64V3
void __builtin_IB_lsc_store_local_ulong4(__local ulong4 *base, int immElemOff, ulong4 val); //D64V4
void __builtin_IB_lsc_store_local_ulong8(__local ulong8 *base, int immElemOff, ulong8 val); //D64V8
void __builtin_IB_lsc_store_cmask_local_uint (__local uint *base, int elemOff, uint val, enum LSC_STCC cacheOpt); //D32V1
void __builtin_IB_lsc_store_cmask_local_uint2 (__local uint2 *base, int elemOff, uint2 val, enum LSC_STCC cacheOpt); //D32V2
void __builtin_IB_lsc_store_cmask_local_uint3 (__local uint3 *base, int elemOff, uint3 val, enum LSC_STCC cacheOpt); //D32V3
void __builtin_IB_lsc_store_cmask_local_uint4 (__local uint4 *base, int elemOff, uint4 val, enum LSC_STCC cacheOpt); //D32V4
void __builtin_IB_lsc_store_cmask_local_ulong (__local ulong *base, int elemOff, ulong val, enum LSC_STCC cacheOpt); //D64V1
void __builtin_IB_lsc_store_cmask_local_ulong2(__local ulong2 *base, int elemOff, ulong2 val, enum LSC_STCC cacheOpt); //D64V2
void __builtin_IB_lsc_store_cmask_local_ulong3(__local ulong3 *base, int elemOff, ulong3 val, enum LSC_STCC cacheOpt); //D64V3
void __builtin_IB_lsc_store_cmask_local_ulong4(__local ulong4 *base, int elemOff, ulong4 val, enum LSC_STCC cacheOpt); //D64V4
///////////////////////////////////////////////////////////////////////
// prefetching
///////////////////////////////////////////////////////////////////////
//
// LSC Pre-Fetch Load functions with CacheControls
// global address space
void __builtin_IB_lsc_prefetch_global_uchar (const __global uchar *base, int immElemOff, enum LSC_LDCC cacheOpt); //D8U32
void __builtin_IB_lsc_prefetch_global_ushort(const __global ushort *base, int immElemOff, enum LSC_LDCC cacheOpt); //D16U32
void __builtin_IB_lsc_prefetch_global_uint (const __global uint *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V1
void __builtin_IB_lsc_prefetch_global_uint2 (const __global uint2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V2
void __builtin_IB_lsc_prefetch_global_uint3 (const __global uint3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V3
void __builtin_IB_lsc_prefetch_global_uint4 (const __global uint4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V4
void __builtin_IB_lsc_prefetch_global_uint8 (const __global uint8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V8
void __builtin_IB_lsc_prefetch_global_ulong (const __global ulong *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V1
void __builtin_IB_lsc_prefetch_global_ulong2(const __global ulong2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V2
void __builtin_IB_lsc_prefetch_global_ulong3(const __global ulong3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V3
void __builtin_IB_lsc_prefetch_global_ulong4(const __global ulong4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V4
void __builtin_IB_lsc_prefetch_global_ulong8(const __global ulong8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V8
// Global address space prefetch, returning a boolean status
// value per subgroup item.
// Returns True is returned if addr+immOff was in-bounds of the TRTT.
bool __builtin_IB_lsc_load_status_global_uchar (const __global uchar *base, int immElemOff, enum LSC_LDCC cacheOpt); //D8U32
bool __builtin_IB_lsc_load_status_global_ushort(const __global ushort *base, int immElemOff, enum LSC_LDCC cacheOpt); //D16U32
bool __builtin_IB_lsc_load_status_global_uint (const __global uint *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V1
bool __builtin_IB_lsc_load_status_global_uint2 (const __global uint2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V2
bool __builtin_IB_lsc_load_status_global_uint3 (const __global uint3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V3
bool __builtin_IB_lsc_load_status_global_uint4 (const __global uint4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V4
bool __builtin_IB_lsc_load_status_global_uint8 (const __global uint8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D32V8
bool __builtin_IB_lsc_load_status_global_ulong (const __global ulong *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V1
bool __builtin_IB_lsc_load_status_global_ulong2(const __global ulong2 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V2
bool __builtin_IB_lsc_load_status_global_ulong3(const __global ulong3 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V3
bool __builtin_IB_lsc_load_status_global_ulong4(const __global ulong4 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V4
bool __builtin_IB_lsc_load_status_global_ulong8(const __global ulong8 *base, int immElemOff, enum LSC_LDCC cacheOpt); //D64V8
///////////////////////////////////////////////////////////////////////
// LSC Fence support
///////////////////////////////////////////////////////////////////////
// FS - Fence Scope
enum LSC_FS {
LSC_FS_THREAD_GROUP,
LSC_FS_LOCAL,
LSC_FS_TILE,
LSC_FS_GPU,
LSC_FS_GPUs,
LSC_FS_SYSTEM_RELEASE,
LSC_FS_SYSTEM_ACQUIRE
};
// FT - Fence Type
enum LSC_FT {
LSC_FT_DEFAULT,
LSC_FT_EVICT,
LSC_FT_INVALIDATE,
LSC_FT_DISCARD,
LSC_FT_CLEAN,
LSC_FT_L3
};
void __builtin_IB_lsc_fence_global_untyped(enum LSC_FS scope, enum LSC_FT flushType); // Mem Port - UGM
void __builtin_IB_lsc_fence_global_untyped_cross_tile(enum LSC_FS scope, enum LSC_FT flushType); // Mem Port - UGML
void __builtin_IB_lsc_fence_global_typed(enum LSC_FS scope, enum LSC_FT flushType); // Mem Port - TGM
void __builtin_IB_lsc_fence_local(); // Mem Port - SLM
///////////////////////////////////////////////////////////////////////
// LSC atomics
///////////////////////////////////////////////////////////////////////
//////////////////////////////
// floating point
//////////////////////////////
// FP32 global
float __builtin_IB_lsc_atomic_add_global_float(volatile __global float *base, int immElemOff, float val, enum LSC_STCC cacheOpt);
float __builtin_IB_lsc_atomic_sub_global_float(volatile __global float *base, int immElemOff, float val, enum LSC_STCC cacheOpt);
float __builtin_IB_lsc_atomic_min_global_float(volatile __global float *base, int immElemOff, float val, enum LSC_STCC cacheOpt);
float __builtin_IB_lsc_atomic_max_global_float(volatile __global float *base, int immElemOff, float val, enum LSC_STCC cacheOpt);
float __builtin_IB_lsc_atomic_cmpxchg_global_float(volatile __global float *base, int immElemOff, float cmp, float val, enum LSC_STCC cacheOpt);
// FP32 local
float __builtin_IB_lsc_atomic_min_local_float(volatile __local float *base, int immElemOff, float val);
float __builtin_IB_lsc_atomic_max_local_float(volatile __local float *base, int immElemOff, float val);
float __builtin_IB_lsc_atomic_cmpxchg_local_float(volatile __local float *base, int immElemOff, float cmp, float val);
// FP64 add,sub atomic support
double __builtin_IB_lsc_atomic_add_global_double(volatile __global double *base, int immElemOff, double val, enum LSC_STCC cacheOpt);
double __builtin_IB_lsc_atomic_sub_global_double(volatile __global double *base, int immElemOff, double val, enum LSC_STCC cacheOpt);
//////////////////////////////
// integer
//////////////////////////////
// LSC I16 atomics global
uint __builtin_IB_lsc_atomic_inc_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_dec_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_add_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_sub_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
int __builtin_IB_lsc_atomic_min_global_short_from_int (volatile __global short *base, int immElemOff, int val, enum LSC_STCC cacheOpt);
int __builtin_IB_lsc_atomic_max_global_short_from_int (volatile __global short *base, int immElemOff, int val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_min_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_max_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_and_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_or_global_ushort_from_uint (volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_xor_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_load_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_store_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_cmpxchg_global_ushort_from_uint(volatile __global ushort *base, int immElemOff, uint cmp, uint val, enum LSC_STCC cacheOpt);
// LSC I16 atomics local
uint __builtin_IB_lsc_atomic_inc_local_ushort_from_uint(volatile __local ushort *base, int immElemOff);
uint __builtin_IB_lsc_atomic_dec_local_ushort_from_uint(volatile __local ushort *base, int immElemOff);
uint __builtin_IB_lsc_atomic_add_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_sub_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
int __builtin_IB_lsc_atomic_min_local_short_from_int (volatile __local short *base, int immElemOff, int val);
int __builtin_IB_lsc_atomic_max_local_short_from_int (volatile __local short *base, int immElemOff, int val);
uint __builtin_IB_lsc_atomic_min_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_max_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_and_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_or_local_ushort_from_uint (volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_xor_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_load_local_ushort_from_uint(volatile __local ushort *base, int immElemOff);
uint __builtin_IB_lsc_atomic_store_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_cmpxchg_local_ushort_from_uint(volatile __local ushort *base, int immElemOff, uint cmp, uint val);
// LSC I32 atomics global
uint __builtin_IB_lsc_atomic_inc_global_uint(volatile __global uint *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_dec_global_uint(volatile __global uint *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_add_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_sub_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
int __builtin_IB_lsc_atomic_min_global_int (volatile __global int *base, int immElemOff, int val, enum LSC_STCC cacheOpt);
int __builtin_IB_lsc_atomic_max_global_int (volatile __global int *base, int immElemOff, int val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_min_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_max_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_and_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_or_global_uint (volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_xor_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_load_global_uint(volatile __global uint *base, int immElemOff, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_store_global_uint(volatile __global uint *base, int immElemOff, uint val, enum LSC_STCC cacheOpt);
uint __builtin_IB_lsc_atomic_cmpxchg_global_uint(volatile __global uint *base, int immElemOff, uint cmp, uint val, enum LSC_STCC cacheOpt);
// LSC I32 atomics local
uint __builtin_IB_lsc_atomic_inc_local_uint(volatile __local uint *base, int immElemOff);
uint __builtin_IB_lsc_atomic_dec_local_uint(volatile __local uint *base, int immElemOff);
uint __builtin_IB_lsc_atomic_add_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_sub_local_uint(volatile __local uint *base, int immElemOff, uint val);
int __builtin_IB_lsc_atomic_min_local_int (volatile __local int *base, int immElemOff, int val);
int __builtin_IB_lsc_atomic_max_local_int (volatile __local int *base, int immElemOff, int val);
uint __builtin_IB_lsc_atomic_min_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_max_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_and_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_or_local_uint (volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_xor_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_load_local_uint(volatile __local uint *base, int immElemOff);
uint __builtin_IB_lsc_atomic_store_local_uint(volatile __local uint *base, int immElemOff, uint val);
uint __builtin_IB_lsc_atomic_cmpxchg_local_uint(volatile __local uint *base, int immElemOff, uint cmp, uint val);
// LSC I64 atomics global
ulong __builtin_IB_lsc_atomic_inc_global_ulong(volatile __global ulong *base, int immElemOff, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_dec_global_ulong(volatile __global ulong *base, int immElemOff, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_add_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_sub_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
long __builtin_IB_lsc_atomic_min_global_long (volatile __global long *base, int immElemOff, long val, enum LSC_STCC cacheOpt);
long __builtin_IB_lsc_atomic_max_global_long (volatile __global long *base, int immElemOff, long val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_min_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_max_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_and_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_or_global_ulong (volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_xor_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_load_global_ulong(volatile __global ulong *base, int immElemOff, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_store_global_ulong(volatile __global ulong *base, int immElemOff, ulong val, enum LSC_STCC cacheOpt);
ulong __builtin_IB_lsc_atomic_cmpxchg_global_ulong(volatile __global ulong *base, int immElemOff, ulong cmp, ulong val, enum LSC_STCC cacheOpt);
// LSC I64 atomics local
ulong __builtin_IB_lsc_atomic_cmpxchg_local_ulong(volatile __local ulong *base, int immElemOff, ulong cmp, ulong val);
#endif // cl_intel_pvc_lsc_validation
#endif // IGCBIF_INTRINSICS_LSC_CL
|