summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Braun <rbraun@sceen.net>2014-05-05 21:48:22 +0200
committerRichard Braun <rbraun@sceen.net>2014-05-05 21:48:22 +0200
commita67386802e26cd9ea613bf5bdf385f5b3d3645ea (patch)
treebfbee7a27d518c454fa4c026d34b2491afef5bec
parent3caa7fe14357894370c8d3b92cc2822b34f2c6a1 (diff)
x86/atomic: add xor operations
-rw-r--r--arch/x86/machine/atomic.h17
1 files changed, 17 insertions, 0 deletions
diff --git a/arch/x86/machine/atomic.h b/arch/x86/machine/atomic.h
index 3fe3ade9..3b2ede51 100644
--- a/arch/x86/machine/atomic.h
+++ b/arch/x86/machine/atomic.h
@@ -42,6 +42,11 @@
: "+m" (*(ptr)) \
: "r" (bits))
+#define ATOMIC_XOR(ptr, bits) \
+ asm volatile("lock xor %1, %0" \
+ : "+m" (*(ptr)) \
+ : "r" (bits))
+
/* The xchg instruction doesn't need a lock prefix */
#define ATOMIC_SWAP(ptr, oldval, newval) \
asm volatile("xchg %1, %0" \
@@ -85,6 +90,12 @@ atomic_or_uint(volatile unsigned int *ptr, unsigned int bits)
ATOMIC_OR(ptr, bits);
}
+static inline void
+atomic_xor_uint(volatile unsigned int *ptr, unsigned int bits)
+{
+ ATOMIC_XOR(ptr, bits);
+}
+
/*
* Implies a full memory barrier.
*/
@@ -140,6 +151,12 @@ atomic_or_ulong(volatile unsigned long *ptr, unsigned long bits)
ATOMIC_OR(ptr, bits);
}
+static inline void
+atomic_xor_ulong(volatile unsigned long *ptr, unsigned long bits)
+{
+ ATOMIC_XOR(ptr, bits);
+}
+
/*
* Implies a full memory barrier.
*/