summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--arch/x86/Makefrag.am1
-rw-r--r--arch/x86/machine/atomic.h77
2 files changed, 78 insertions, 0 deletions
diff --git a/arch/x86/Makefrag.am b/arch/x86/Makefrag.am
index d3e4aa51..6e61160c 100644
--- a/arch/x86/Makefrag.am
+++ b/arch/x86/Makefrag.am
@@ -2,6 +2,7 @@ x86_FILES = \
arch/x86/machine/acpimp.c \
arch/x86/machine/acpimp.h \
arch/x86/machine/asm.h \
+ arch/x86/machine/atomic.h \
arch/x86/machine/biosmem.c \
arch/x86/machine/biosmem.h \
arch/x86/machine/boot_asm.S \
diff --git a/arch/x86/machine/atomic.h b/arch/x86/machine/atomic.h
new file mode 100644
index 00000000..3386a530
--- /dev/null
+++ b/arch/x86/machine/atomic.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2012 Richard Braun.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ *
+ * Atomic operations.
+ *
+ * Note that it cannot be assumed these operations imply a compiler or a
+ * memory barrier.
+ */
+
+#ifndef _X86_ATOMIC_H
+#define _X86_ATOMIC_H
+
+static inline void
+atomic_add(volatile unsigned long *ptr, long delta)
+{
+ asm volatile("lock add %1, %0"
+ : "+m" (*ptr)
+ : "r" (delta));
+}
+
+static inline void
+atomic_and(volatile unsigned long *ptr, unsigned long bits)
+{
+ asm volatile("lock and %1, %0"
+ : "+m" (*ptr)
+ : "r" (bits));
+}
+
+static inline void
+atomic_or(volatile unsigned long *ptr, unsigned long bits)
+{
+ asm volatile("lock or %1, %0"
+ : "+m" (*ptr)
+ : "r" (bits));
+}
+
+static inline unsigned long
+atomic_swap(volatile unsigned long *ptr, unsigned long newval)
+{
+ unsigned long prev;
+
+ /* The xchg instruction doesn't need a lock prefix */
+ asm volatile("xchg %1, %0"
+ : "+m" (*ptr), "=r" (prev)
+ : "1" (newval));
+
+ return prev;
+}
+
+static inline unsigned long
+atomic_cas(volatile unsigned long *ptr, unsigned long oldval,
+ unsigned long newval)
+{
+ unsigned long prev;
+
+ asm volatile("lock cmpxchg %3, %0"
+ : "+m" (*ptr), "=a" (prev)
+ : "1" (oldval), "r" (newval));
+
+ return prev;
+}
+
+#endif /* _X86_ATOMIC_H */