Use the new cmpxchg_emu_u8() and cmpxchg_emu_u16() to emulate one-byte
and two-byte cmpxchg() on arc.

Signed-off-by: Paul E. McKenney <paul...@kernel.org>
Cc: Vineet Gupta <vgu...@kernel.org>
Cc: Andi Shyti <andi.sh...@linux.intel.com>
Cc: Andrzej Hajda <andrzej.ha...@intel.com>
Cc: Arnd Bergmann <a...@arndb.de>
Cc: Palmer Dabbelt <pal...@rivosinc.com>
Cc: <linux-snps-arc@lists.infradead.org>
---
 arch/arc/Kconfig               |  1 +
 arch/arc/include/asm/cmpxchg.h | 38 ++++++++++++++++++++++++++--------
 2 files changed, 30 insertions(+), 9 deletions(-)

diff --git a/arch/arc/Kconfig b/arch/arc/Kconfig
index 99d2845f3feb9..0b40039f38eb2 100644
--- a/arch/arc/Kconfig
+++ b/arch/arc/Kconfig
@@ -14,6 +14,7 @@ config ARC
        select ARCH_HAS_SETUP_DMA_OPS
        select ARCH_HAS_SYNC_DMA_FOR_CPU
        select ARCH_HAS_SYNC_DMA_FOR_DEVICE
+       select ARCH_NEED_CMPXCHG_1_2_EMU
        select ARCH_SUPPORTS_ATOMIC_RMW if ARC_HAS_LLSC
        select ARCH_32BIT_OFF_T
        select BUILDTIME_TABLE_SORT
diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h
index e138fde067dea..1e3e23adaca13 100644
--- a/arch/arc/include/asm/cmpxchg.h
+++ b/arch/arc/include/asm/cmpxchg.h
@@ -46,6 +46,12 @@
        __typeof__(*(ptr)) _prev_;                                      \
                                                                        \
        switch(sizeof((_p_))) {                                         \
+       case 1:                                                         \
+               _prev_ = cmpxchg_emu_u8((volatile u8 *)_p_, _o_, _n_);  \
+               break;                                                  \
+       case 2:                                                         \
+               _prev_ = cmpxchg_emu_u16((volatile u16 *)_p_, _o_, _n_); \
+               break;                                                  \
        case 4:                                                         \
                _prev_ = __cmpxchg(_p_, _o_, _n_);                      \
                break;                                                  \
@@ -65,16 +71,30 @@
        __typeof__(*(ptr)) _prev_;                                      \
        unsigned long __flags;                                          \
                                                                        \
-       BUILD_BUG_ON(sizeof(_p_) != 4);                                 \
+       switch(sizeof((_p_))) {                                         \
+       case 1:                                                         \
+               __flags = cmpxchg_emu_u8((volatile u8 *)_p_, _o_, _n_); \
+               _prev_ = (__typeof__(*(ptr)))__flags;                   \
+               break;                                                  \
+       case 2:                                                         \
+               __flags = cmpxchg_emu_u16((volatile u16 *)_p_, _o_, _n_); \
+               _prev_ = (__typeof__(*(ptr)))__flags;                   \
+               break;                                                  \
+       case 4:                                                         \
+               /*                                                      \
+                * spin lock/unlock provide the needed smp_mb()         \
+                * before/after                                         \
+                */                                                     \
+               atomic_ops_lock(__flags);                               \
+               _prev_ = *_p_;                                          \
+               if (_prev_ == _o_)                                      \
+                       *_p_ = _n_;                                     \
+               atomic_ops_unlock(__flags);                             \
+               break;                                                  \
+       default:                                                        \
+               BUILD_BUG();                                            \
+       }                                                               \
                                                                        \
-       /*                                                              \
-        * spin lock/unlock provide the needed smp_mb() before/after    \
-        */                                                             \
-       atomic_ops_lock(__flags);                                       \
-       _prev_ = *_p_;                                                  \
-       if (_prev_ == _o_)                                              \
-               *_p_ = _n_;                                             \
-       atomic_ops_unlock(__flags);                                     \
        _prev_;                                                         \
 })
 
-- 
2.40.1


_______________________________________________
linux-snps-arc mailing list
linux-snps-arc@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-snps-arc

Reply via email to