[tip:x86/spinlocks] x86: Add xadd helper macro

From: tip-bot for Jeremy Fitzhardinge
Date: Fri Jul 22 2011 - 15:58:22 EST


Commit-ID: 847c73e8042e565a2cc4934c84103ab82e0eac42
Gitweb: http://git.kernel.org/tip/847c73e8042e565a2cc4934c84103ab82e0eac42
Author: Jeremy Fitzhardinge <jeremy.fitzhardinge@xxxxxxxxxx>
AuthorDate: Thu, 23 Jun 2011 18:19:19 -0700
Committer: H. Peter Anvin <hpa@xxxxxxxxxxxxxxx>
CommitDate: Fri, 22 Jul 2011 11:18:58 -0700

x86: Add xadd helper macro

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@xxxxxxxxxx>
Link: http://lkml.kernel.org/r/ce03e48f4b70a2a31accf32c8b41b781674e57c3.1308878118.git.jeremy.fitzhardinge@xxxxxxxxxx
Signed-off-by: H. Peter Anvin <hpa@xxxxxxxxxxxxxxx>
---
arch/x86/include/asm/cmpxchg_32.h | 21 +++++++++++++++++++++
arch/x86/include/asm/cmpxchg_64.h | 26 ++++++++++++++++++++++++++
2 files changed, 47 insertions(+), 0 deletions(-)

diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 284a6e8..30f0318 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -280,4 +280,25 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,

#endif

+#define xadd(ptr, inc) \
+ do { \
+ switch (sizeof(*(ptr))) { \
+ case 1: \
+ asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ case 2: \
+ asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ case 4: \
+ asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ } \
+ } while(0)
+
#endif /* _ASM_X86_CMPXCHG_32_H */
diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h
index 423ae58..62da1ff 100644
--- a/arch/x86/include/asm/cmpxchg_64.h
+++ b/arch/x86/include/asm/cmpxchg_64.h
@@ -151,4 +151,30 @@ extern void __cmpxchg_wrong_size(void);
cmpxchg_local((ptr), (o), (n)); \
})

+#define xadd(ptr, inc) \
+ do { \
+ switch (sizeof(*(ptr))) { \
+ case 1: \
+ asm volatile (LOCK_PREFIX "xaddb %b0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ case 2: \
+ asm volatile (LOCK_PREFIX "xaddw %w0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ case 4: \
+ asm volatile (LOCK_PREFIX "xaddl %0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ case 8: \
+ asm volatile (LOCK_PREFIX "xaddq %q0, %1\n" \
+ : "+r" (inc), "+m" (*(ptr)) \
+ : : "memory", "cc"); \
+ break; \
+ } \
+ } while(0)
+
#endif /* _ASM_X86_CMPXCHG_64_H */
--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@xxxxxxxxxxxxxxx
More majordomo info at http://vger.kernel.org/majordomo-info.html
Please read the FAQ at http://www.tux.org/lkml/