1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
|
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Based on arch/arm/include/asm/atomic.h
*
* Copyright (C) 1996 Russell King.
* Copyright (C) 2002 Deep Blue Solutions Ltd.
* Copyright (C) 2012 ARM Ltd.
*/
#ifndef __ASM_ATOMIC_H
#define __ASM_ATOMIC_H
#include <linux/compiler.h>
#include <linux/types.h>
#include <asm/barrier.h>
#include <asm/cmpxchg.h>
#include <asm/lse.h>
#define ATOMIC_OP(op) \
static __always_inline void arch_##op(int i, atomic_t *v) \
{ \
__lse_ll_sc_body(op, i, v); \
}
ATOMIC_OP(atomic_andnot)
ATOMIC_OP(atomic_or)
ATOMIC_OP(atomic_xor)
ATOMIC_OP(atomic_add)
ATOMIC_OP(atomic_and)
ATOMIC_OP(atomic_sub)
#undef ATOMIC_OP
#define ATOMIC_FETCH_OP(name, op) \
static __always_inline int arch_##op##name(int i, atomic_t *v) \
{ \
return __lse_ll_sc_body(op##name, i, v); \
}
#define ATOMIC_FETCH_OPS(op) \
ATOMIC_FETCH_OP(_relaxed, op) \
ATOMIC_FETCH_OP(_acquire, op) \
ATOMIC_FETCH_OP(_release, op) \
ATOMIC_FETCH_OP( , op)
ATOMIC_FETCH_OPS(atomic_fetch_andnot)
ATOMIC_FETCH_OPS(atomic_fetch_or)
ATOMIC_FETCH_OPS(atomic_fetch_xor)
ATOMIC_FETCH_OPS(atomic_fetch_add)
ATOMIC_FETCH_OPS(atomic_fetch_and)
ATOMIC_FETCH_OPS(atomic_fetch_sub)
ATOMIC_FETCH_OPS(atomic_add_return)
ATOMIC_FETCH_OPS(atomic_sub_return)
#undef ATOMIC_FETCH_OP
#undef ATOMIC_FETCH_OPS
#define ATOMIC64_OP(op) \
static __always_inline void arch_##op(long i, atomic64_t *v) \
{ \
__lse_ll_sc_body(op, i, v); \
}
ATOMIC64_OP(atomic64_andnot)
ATOMIC64_OP(atomic64_or)
ATOMIC64_OP(atomic64_xor)
ATOMIC64_OP(atomic64_add)
ATOMIC64_OP(atomic64_and)
ATOMIC64_OP(atomic64_sub)
#undef ATOMIC64_OP
#define ATOMIC64_FETCH_OP(name, op) \
static __always_inline long arch_##op##name(long i, atomic64_t *v) \
{ \
return __lse_ll_sc_body(op##name, i, v); \
}
#define ATOMIC64_FETCH_OPS(op) \
ATOMIC64_FETCH_OP(_relaxed, op) \
ATOMIC64_FETCH_OP(_acquire, op) \
ATOMIC64_FETCH_OP(_release, op) \
ATOMIC64_FETCH_OP( , op)
ATOMIC64_FETCH_OPS(atomic64_fetch_andnot)
ATOMIC64_FETCH_OPS(atomic64_fetch_or)
ATOMIC64_FETCH_OPS(atomic64_fetch_xor)
ATOMIC64_FETCH_OPS(atomic64_fetch_add)
ATOMIC64_FETCH_OPS(atomic64_fetch_and)
ATOMIC64_FETCH_OPS(atomic64_fetch_sub)
ATOMIC64_FETCH_OPS(atomic64_add_return)
ATOMIC64_FETCH_OPS(atomic64_sub_return)
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_FETCH_OPS
static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
{
return __lse_ll_sc_body(atomic64_dec_if_positive, v);
}
#define arch_atomic_read(v) __READ_ONCE((v)->counter)
#define arch_atomic_set(v, i) __WRITE_ONCE(((v)->counter), (i))
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
#define arch_atomic_add_return_release arch_atomic_add_return_release
#define arch_atomic_add_return arch_atomic_add_return
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
#define arch_atomic_sub_return_release arch_atomic_sub_return_release
#define arch_atomic_sub_return arch_atomic_sub_return
#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
#define arch_atomic_fetch_add arch_atomic_fetch_add
#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
#define arch_atomic_fetch_sub arch_atomic_fetch_sub
#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
#define arch_atomic_fetch_and arch_atomic_fetch_and
#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
#define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
#define arch_atomic_fetch_or arch_atomic_fetch_or
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
#define arch_atomic_andnot arch_atomic_andnot
/*
* 64-bit arch_atomic operations.
*/
#define ATOMIC64_INIT ATOMIC_INIT
#define arch_atomic64_read arch_atomic_read
#define arch_atomic64_set arch_atomic_set
#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
#define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
#define arch_atomic64_add_return_release arch_atomic64_add_return_release
#define arch_atomic64_add_return arch_atomic64_add_return
#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
#define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
#define arch_atomic64_sub_return arch_atomic64_sub_return
#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
#define arch_atomic64_fetch_add arch_atomic64_fetch_add
#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
#define arch_atomic64_fetch_and arch_atomic64_fetch_and
#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
#define arch_atomic64_fetch_or arch_atomic64_fetch_or
#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
#define arch_atomic64_andnot arch_atomic64_andnot
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
#endif /* __ASM_ATOMIC_H */
|