1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
|
/* SPDX-License-Identifier: BSD-3-Clause
* Copyright(c) 2023 Microsoft Corporation
*/
#ifndef RTE_STDATOMIC_H
#define RTE_STDATOMIC_H
#include <assert.h>
#ifdef RTE_ENABLE_STDATOMIC
#ifndef _MSC_VER
#ifdef __STDC_NO_ATOMICS__
#error enable_stdatomic=true but atomics not supported by toolchain
#endif
#endif
#include <stdatomic.h>
/* RTE_ATOMIC(type) is provided for use as a type specifier
* permitting designation of an rte atomic type.
*/
#define RTE_ATOMIC(type) _Atomic(type)
/* __rte_atomic is provided for type qualification permitting
* designation of an rte atomic qualified type-name.
*/
#define __rte_atomic _Atomic
/* The memory order is an enumerated type in C11. */
typedef memory_order rte_memory_order;
#define rte_memory_order_relaxed memory_order_relaxed
#ifdef __ATOMIC_RELAXED
static_assert(rte_memory_order_relaxed == __ATOMIC_RELAXED,
"rte_memory_order_relaxed == __ATOMIC_RELAXED");
#endif
#define rte_memory_order_consume memory_order_consume
#ifdef __ATOMIC_CONSUME
static_assert(rte_memory_order_consume == __ATOMIC_CONSUME,
"rte_memory_order_consume == __ATOMIC_CONSUME");
#endif
#define rte_memory_order_acquire memory_order_acquire
#ifdef __ATOMIC_ACQUIRE
static_assert(rte_memory_order_acquire == __ATOMIC_ACQUIRE,
"rte_memory_order_acquire == __ATOMIC_ACQUIRE");
#endif
#define rte_memory_order_release memory_order_release
#ifdef __ATOMIC_RELEASE
static_assert(rte_memory_order_release == __ATOMIC_RELEASE,
"rte_memory_order_release == __ATOMIC_RELEASE");
#endif
#define rte_memory_order_acq_rel memory_order_acq_rel
#ifdef __ATOMIC_ACQ_REL
static_assert(rte_memory_order_acq_rel == __ATOMIC_ACQ_REL,
"rte_memory_order_acq_rel == __ATOMIC_ACQ_REL");
#endif
#define rte_memory_order_seq_cst memory_order_seq_cst
#ifdef __ATOMIC_SEQ_CST
static_assert(rte_memory_order_seq_cst == __ATOMIC_SEQ_CST,
"rte_memory_order_seq_cst == __ATOMIC_SEQ_CST");
#endif
#define rte_atomic_load_explicit(ptr, memorder) \
atomic_load_explicit(ptr, memorder)
#define rte_atomic_store_explicit(ptr, val, memorder) \
atomic_store_explicit(ptr, val, memorder)
#define rte_atomic_exchange_explicit(ptr, val, memorder) \
atomic_exchange_explicit(ptr, val, memorder)
#define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder) \
atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder)
#define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder) \
atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder)
#define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
atomic_fetch_add_explicit(ptr, val, memorder)
#define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
atomic_fetch_sub_explicit(ptr, val, memorder)
#define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
atomic_fetch_and_explicit(ptr, val, memorder)
#define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
atomic_fetch_xor_explicit(ptr, val, memorder)
#define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
atomic_fetch_or_explicit(ptr, val, memorder)
#define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
atomic_fetch_nand_explicit(ptr, val, memorder)
#define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
atomic_flag_test_and_set_explicit(ptr, memorder)
#define rte_atomic_flag_clear_explicit(ptr, memorder) \
atomic_flag_clear_explicit(ptr, memorder)
/* We provide internal macro here to allow conditional expansion
* in the body of the per-arch rte_atomic_thread_fence inline functions.
*/
#define __rte_atomic_thread_fence(memorder) \
atomic_thread_fence(memorder)
#else /* !RTE_ENABLE_STDATOMIC */
#define RTE_ATOMIC(type) type
#define __rte_atomic
/* The memory order is an integer type in GCC built-ins,
* not an enumerated type like in C11.
*/
typedef int rte_memory_order;
#define rte_memory_order_relaxed __ATOMIC_RELAXED
#define rte_memory_order_consume __ATOMIC_CONSUME
#define rte_memory_order_acquire __ATOMIC_ACQUIRE
#define rte_memory_order_release __ATOMIC_RELEASE
#define rte_memory_order_acq_rel __ATOMIC_ACQ_REL
#define rte_memory_order_seq_cst __ATOMIC_SEQ_CST
#define rte_atomic_load_explicit(ptr, memorder) \
__atomic_load_n(ptr, memorder)
#define rte_atomic_store_explicit(ptr, val, memorder) \
__atomic_store_n(ptr, val, memorder)
#define rte_atomic_exchange_explicit(ptr, val, memorder) \
__atomic_exchange_n(ptr, val, memorder)
#define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder) \
__atomic_compare_exchange_n(ptr, expected, desired, 0, \
succ_memorder, fail_memorder)
#define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
succ_memorder, fail_memorder) \
__atomic_compare_exchange_n(ptr, expected, desired, 1, \
succ_memorder, fail_memorder)
#define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
__atomic_fetch_add(ptr, val, memorder)
#define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
__atomic_fetch_sub(ptr, val, memorder)
#define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
__atomic_fetch_and(ptr, val, memorder)
#define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
__atomic_fetch_xor(ptr, val, memorder)
#define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
__atomic_fetch_or(ptr, val, memorder)
#define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
__atomic_fetch_nand(ptr, val, memorder)
#define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
__atomic_test_and_set(ptr, memorder)
#define rte_atomic_flag_clear_explicit(ptr, memorder) \
__atomic_clear(ptr, memorder)
/* We provide internal macro here to allow conditional expansion
* in the body of the per-arch rte_atomic_thread_fence inline functions.
*/
#define __rte_atomic_thread_fence(memorder) \
__atomic_thread_fence(memorder)
#endif
#endif /* RTE_STDATOMIC_H */
|