diff options
-rw-r--r-- | arch/x86/machine/atomic.h | 30 |
1 files changed, 30 insertions, 0 deletions
diff --git a/arch/x86/machine/atomic.h b/arch/x86/machine/atomic.h index fb8428d4..3fe3ade9 100644 --- a/arch/x86/machine/atomic.h +++ b/arch/x86/machine/atomic.h @@ -26,6 +26,12 @@ : "+m" (*(ptr)) \ : "r" (delta)) +#define ATOMIC_FETCHADD(ptr, oldval, delta) \ + asm volatile("lock xadd %1, %0" \ + : "+m" (*(ptr)), "=r" (oldval) \ + : "1" (delta) \ + : "memory") + #define ATOMIC_AND(ptr, bits) \ asm volatile("lock and %1, %0" \ : "+m" (*(ptr)) \ @@ -55,6 +61,18 @@ atomic_add_uint(volatile unsigned int *ptr, int delta) ATOMIC_ADD(ptr, delta); } +/* + * Implies a full memory barrier. + */ +static inline unsigned int +atomic_fetchadd_uint(volatile unsigned int *ptr, int delta) +{ + unsigned int oldval; + + ATOMIC_FETCHADD(ptr, oldval, delta); + return oldval; +} + static inline void atomic_and_uint(volatile unsigned int *ptr, unsigned int bits) { @@ -98,6 +116,18 @@ atomic_add_ulong(volatile unsigned long *ptr, long delta) ATOMIC_ADD(ptr, delta); } +/* + * Implies a full memory barrier. + */ +static inline unsigned long +atomic_fetchadd_ulong(volatile unsigned long *ptr, long delta) +{ + unsigned long oldval; + + ATOMIC_FETCHADD(ptr, oldval, delta); + return oldval; +} + static inline void atomic_and_ulong(volatile unsigned long *ptr, unsigned long bits) { |