__xchg will be used for non-atomic xchg macro.

Signed-off-by: Andrzej Hajda <andrzej.ha...@intel.com>
---
 arch/mips/include/asm/cmpxchg.h | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 7ec9493b28614f..feed343ad483a9 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -68,7 +68,7 @@ extern unsigned long __xchg_small(volatile void *ptr, 
unsigned long val,
                                  unsigned int size);
 
 static __always_inline
-unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
+unsigned long __arch_xchg(volatile void *ptr, unsigned long x, int size)
 {
        switch (size) {
        case 1:
@@ -102,7 +102,7 @@ unsigned long __xchg(volatile void *ptr, unsigned long x, 
int size)
                smp_mb__before_llsc();                                  \
                                                                        \
        __res = (__typeof__(*(ptr)))                                    \
-               __xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));      \
+               __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
                                                                        \
        smp_llsc_mb();                                                  \
                                                                        \
-- 
2.34.1

Reply via email to