diff options
author | Richard Braun <rbraun@sceen.net> | 2014-04-27 16:44:09 +0200 |
---|---|---|
committer | Richard Braun <rbraun@sceen.net> | 2014-04-27 16:44:24 +0200 |
commit | 7c1352c7fd91bf71604d88753e624cc0996a3ccc (patch) | |
tree | acc5cb219b7308e3c2d6d644c48c8f1489b31aeb | |
parent | 6fdc04fb087b2ec156cc0ea4d024f555a3cf0de4 (diff) |
x86/atomic: add fetchadd operations
-rw-r--r-- | arch/x86/machine/atomic.h | 30 |
1 files changed, 30 insertions, 0 deletions
diff --git a/arch/x86/machine/atomic.h b/arch/x86/machine/atomic.h index fb8428d4..3fe3ade9 100644 --- a/arch/x86/machine/atomic.h +++ b/arch/x86/machine/atomic.h @@ -26,6 +26,12 @@ : "+m" (*(ptr)) \ : "r" (delta)) +#define ATOMIC_FETCHADD(ptr, oldval, delta) \ + asm volatile("lock xadd %1, %0" \ + : "+m" (*(ptr)), "=r" (oldval) \ + : "1" (delta) \ + : "memory") + #define ATOMIC_AND(ptr, bits) \ asm volatile("lock and %1, %0" \ : "+m" (*(ptr)) \ @@ -55,6 +61,18 @@ atomic_add_uint(volatile unsigned int *ptr, int delta) ATOMIC_ADD(ptr, delta); } +/* + * Implies a full memory barrier. + */ +static inline unsigned int +atomic_fetchadd_uint(volatile unsigned int *ptr, int delta) +{ + unsigned int oldval; + + ATOMIC_FETCHADD(ptr, oldval, delta); + return oldval; +} + static inline void atomic_and_uint(volatile unsigned int *ptr, unsigned int bits) { @@ -98,6 +116,18 @@ atomic_add_ulong(volatile unsigned long *ptr, long delta) ATOMIC_ADD(ptr, delta); } +/* + * Implies a full memory barrier. + */ +static inline unsigned long +atomic_fetchadd_ulong(volatile unsigned long *ptr, long delta) +{ + unsigned long oldval; + + ATOMIC_FETCHADD(ptr, oldval, delta); + return oldval; +} + static inline void atomic_and_ulong(volatile unsigned long *ptr, unsigned long bits) { |