1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
|
/* SPDX-License-Identifier: GPL-2.0+ */
#ifndef _ASM_GENERIC_ATOMIC_H
#define _ASM_GENERIC_ATOMIC_H
typedef struct { volatile int counter; } atomic_t;
#if BITS_PER_LONG == 32
typedef struct { volatile long long counter; } atomic64_t;
#else /* BIT_PER_LONG == 32 */
typedef struct { volatile long counter; } atomic64_t;
#endif
#define ATOMIC_INIT(i) { (i) }
#define atomic_read(v) ((v)->counter)
#define atomic_set(v, i) ((v)->counter = (i))
#define atomic64_read(v) atomic_read(v)
#define atomic64_set(v, i) atomic_set(v, i)
static inline void atomic_add(int i, atomic_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter += i;
local_irq_restore(flags);
}
static inline void atomic_sub(int i, atomic_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter -= i;
local_irq_restore(flags);
}
static inline void atomic_inc(atomic_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
++v->counter;
local_irq_restore(flags);
}
static inline void atomic_dec(atomic_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
--v->counter;
local_irq_restore(flags);
}
static inline int atomic_dec_and_test(volatile atomic_t *v)
{
unsigned long flags = 0;
int val;
local_irq_save(flags);
val = v->counter;
v->counter = val -= 1;
local_irq_restore(flags);
return val == 0;
}
static inline int atomic_add_negative(int i, volatile atomic_t *v)
{
unsigned long flags = 0;
int val;
local_irq_save(flags);
val = v->counter;
v->counter = val += i;
local_irq_restore(flags);
return val < 0;
}
static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
{
unsigned long flags = 0;
local_irq_save(flags);
*addr &= ~mask;
local_irq_restore(flags);
}
#if BITS_PER_LONG == 32
static inline void atomic64_add(long long i, volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter += i;
local_irq_restore(flags);
}
static inline void atomic64_sub(long long i, volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter -= i;
local_irq_restore(flags);
}
#else /* BIT_PER_LONG == 32 */
static inline void atomic64_add(long i, volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter += i;
local_irq_restore(flags);
}
static inline void atomic64_sub(long i, volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter -= i;
local_irq_restore(flags);
}
#endif
static inline void atomic64_inc(volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter += 1;
local_irq_restore(flags);
}
static inline void atomic64_dec(volatile atomic64_t *v)
{
unsigned long flags = 0;
local_irq_save(flags);
v->counter -= 1;
local_irq_restore(flags);
}
#endif
|