This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG
instruction against a user space address.

This generalizes the already existing futex_atomic_cmpxchg_inatomic()
so it can be used in other contexts.  This will be used in the
upcoming support for Intel MPX (Memory Protection Extensions.)

Signed-off-by: Qiaowei Ren <[email protected]>
---
 arch/x86/include/asm/uaccess.h |   91 ++++++++++++++++++++++++++++++++++++++++
 1 files changed, 91 insertions(+), 0 deletions(-)

diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h
index 5838fa9..894d8bf 100644
--- a/arch/x86/include/asm/uaccess.h
+++ b/arch/x86/include/asm/uaccess.h
@@ -525,6 +525,97 @@ extern __must_check long strnlen_user(const char __user 
*str, long n);
 unsigned long __must_check clear_user(void __user *mem, unsigned long len);
 unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
 
+extern void __cmpxchg_wrong_size(void)
+       __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size)      \
+({                                                                     \
+       int __ret = 0;                                                  \
+       __typeof__(ptr) __uval = (uval);                                \
+       __typeof__(*(ptr)) __old = (old);                               \
+       __typeof__(*(ptr)) __new = (new);                               \
+       switch (size) {                                                 \
+       case 1:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "q" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 2:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 4:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+#ifdef CONFIG_X86_64                                                   \
+       case 8:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+#endif                                                                 \
+       default:                                                        \
+               __cmpxchg_wrong_size();                                 \
+       }                                                               \
+       *__uval = __old;                                                \
+       __ret;                                                          \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new)              \
+({                                                                     \
+       access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ?                \
+               __user_atomic_cmpxchg_inatomic((uval), (ptr),           \
+                               (old), (new), sizeof(*(ptr))) :         \
+               -EFAULT;                                                \
+})
+
 /*
  * movsl can be slow when source and dest are not both 8-byte aligned
  */
-- 
1.7.1

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to [email protected]
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Reply via email to