From: Alexander Sverdlin <alexander.sverd...@nokia.com>

This has the effect of removing one redundant SYNCW from
queued_spin_lock_slowpath() on Octeon.

Signed-off-by: Alexander Sverdlin <alexander.sverd...@nokia.com>
---
 arch/mips/include/asm/atomic.h  | 2 ++
 arch/mips/include/asm/cmpxchg.h | 4 ++++
 2 files changed, 6 insertions(+)

diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index a4e5116..3b0f54b 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -266,5 +266,7 @@ ATOMIC_SIP_OP(atomic64, s64, dsubu, lld, scd)
 
 #define atomic_cmpxchg_relaxed(v, o, n) \
        (cmpxchg_relaxed(&((v)->counter), (o), (n)))
+#define atomic_xchg_relaxed(v, new) \
+       (xchg_relaxed(&((v)->counter), (new)))
 
 #endif /* _ASM_ATOMIC_H */
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 620f01a..7830d81 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -110,6 +110,10 @@ unsigned long __xchg(volatile void *ptr, unsigned long x, 
int size)
        __res;                                                          \
 })
 
+#define xchg_relaxed(ptr, x)                                           \
+       ((__typeof__(*(ptr)))                                           \
+               __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))))
+
 #define __cmpxchg_asm(ld, st, m, old, new)                             \
 ({                                                                     \
        __typeof(*(m)) __ret;                                           \
-- 
2.10.2

Reply via email to