On Sat,  4 Nov 2023 17:16:15 +0800
"wuqiang.matt" <wuqiang.m...@bytedance.com> wrote:

> For architectures that support native cmpxchg, we'd like to
> implement arch_cmpxchg[64]_local with the native variants of
> supported data size. If not, the generci_cmpxchg[64]_local
> will be used.
> 
> Reported-by: kernel test robot <l...@intel.com>
> Closes: 
> https://lore.kernel.org/oe-kbuild-all/202310272207.tlpflya4-...@intel.com/
> 

Looks good to me.

Reviewed-by: Masami Hiramatsu (Google) <mhira...@kernel.org>

But I need hexagon's maintainer's comment too.

Thank you,

> Signed-off-by: wuqiang.matt <wuqiang.m...@bytedance.com>
> ---
>  arch/hexagon/include/asm/cmpxchg.h | 51 +++++++++++++++++++++++++++++-
>  1 file changed, 50 insertions(+), 1 deletion(-)
> 
> diff --git a/arch/hexagon/include/asm/cmpxchg.h 
> b/arch/hexagon/include/asm/cmpxchg.h
> index bf6cf5579cf4..2b5e5bbaf807 100644
> --- a/arch/hexagon/include/asm/cmpxchg.h
> +++ b/arch/hexagon/include/asm/cmpxchg.h
> @@ -8,6 +8,8 @@
>  #ifndef _ASM_CMPXCHG_H
>  #define _ASM_CMPXCHG_H
>  
> +#include <linux/build_bug.h>
> +
>  /*
>   * __arch_xchg - atomically exchange a register and a memory location
>   * @x: value to swap
> @@ -51,13 +53,15 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
>   *  variable casting.
>   */
>  
> -#define arch_cmpxchg(ptr, old, new)                          \
> +#define __cmpxchg_32(ptr, old, new)                          \
>  ({                                                           \
>       __typeof__(ptr) __ptr = (ptr);                          \
>       __typeof__(*(ptr)) __old = (old);                       \
>       __typeof__(*(ptr)) __new = (new);                       \
>       __typeof__(*(ptr)) __oldval = 0;                        \
>                                                               \
> +     BUILD_BUG_ON(sizeof(*(ptr)) != 4);                      \
> +                                                             \
>       asm volatile(                                           \
>               "1:     %0 = memw_locked(%1);\n"                \
>               "       { P0 = cmp.eq(%0,%2);\n"                \
> @@ -72,4 +76,49 @@ __arch_xchg(unsigned long x, volatile void *ptr, int size)
>       __oldval;                                               \
>  })
>  
> +#define __cmpxchg(ptr, old, val, size)                               \
> +({                                                           \
> +     __typeof__(*(ptr)) oldval;                              \
> +                                                             \
> +     switch (size) {                                         \
> +     case 4:                                                 \
> +             oldval = __cmpxchg_32(ptr, old, val);           \
> +             break;                                          \
> +     default:                                                \
> +             BUILD_BUG();                                    \
> +             oldval = val;                                   \
> +             break;                                          \
> +     }                                                       \
> +                                                             \
> +     oldval;                                                 \
> +})
> +
> +#define arch_cmpxchg(ptr, o, n)      __cmpxchg((ptr), (o), (n), 
> sizeof(*(ptr)))
> +
> +/*
> + * always make arch_cmpxchg[64]_local available, native cmpxchg
> + * will be used if available, then generic_cmpxchg[64]_local
> + */
> +#include <asm-generic/cmpxchg-local.h>
> +
> +#define arch_cmpxchg_local(ptr, old, val)                    \
> +({                                                           \
> +     __typeof__(*(ptr)) retval;                              \
> +     int size = sizeof(*(ptr));                              \
> +                                                             \
> +     switch (size) {                                         \
> +     case 4:                                                 \
> +             retval = __cmpxchg_32(ptr, old, val);           \
> +             break;                                          \
> +     default:                                                \
> +             retval = __generic_cmpxchg_local(ptr, old,      \
> +                                              val, size);    \
> +             break;                                          \
> +     }                                                       \
> +                                                             \
> +     retval;                                                 \
> +})
> +
> +#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), 
> (o), (n))
> +
>  #endif /* _ASM_CMPXCHG_H */
> -- 
> 2.40.1
> 


-- 
Masami Hiramatsu (Google) <mhira...@kernel.org>

Reply via email to