Introduce macros to deal with X86_32 using longs and X86_64
using quads.  Small comment fixes to make files match.

Signed-off-by: Harvey Harrison <[EMAIL PROTECTED]>
---
 include/asm-x86/local.h    |   17 +++++++++++++++++
 include/asm-x86/local_32.h |   28 ++++++++++++++--------------
 include/asm-x86/local_64.h |   18 +++++++++---------
 3 files changed, 40 insertions(+), 23 deletions(-)

diff --git a/include/asm-x86/local.h b/include/asm-x86/local.h
index 1e6b5af..8839c36 100644
--- a/include/asm-x86/local.h
+++ b/include/asm-x86/local.h
@@ -14,6 +14,23 @@ typedef struct
 
 #define local_read(l)  atomic_long_read(&(l)->a)
 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
+/*
+ * X86_32 uses longs
+ * X86_64 uses quads
+ */
+#ifdef CONFIG_X86_32
+# define ASM_INC incl
+# define ASM_DEC decl
+# define ASM_ADD addl
+# define ASM_SUB subl
+# define ASM_XADD xaddl
+#else
+# define ASM_INC incq
+# define ASM_DEC decq
+# define ASM_ADD addq
+# define ASM_SUB subq
+# define ASM_XADD xaddq
+#endif
 
 #ifdef CONFIG_X86_32
 # include "local_32.h"
diff --git a/include/asm-x86/local_32.h b/include/asm-x86/local_32.h
index f3bc4d9..ff6d1d2 100644
--- a/include/asm-x86/local_32.h
+++ b/include/asm-x86/local_32.h
@@ -4,21 +4,21 @@
 static inline void local_inc(local_t *l)
 {
        __asm__ __volatile__(
-               "incl %0"
+               "ASM_INC %0"
                :"+m" (l->a.counter));
 }
 
 static inline void local_dec(local_t *l)
 {
        __asm__ __volatile__(
-               "decl %0"
+               "ASM_DEC %0"
                :"+m" (l->a.counter));
 }
 
 static inline void local_add(long i, local_t *l)
 {
        __asm__ __volatile__(
-               "addl %1,%0"
+               "ASM_ADD %1,%0"
                :"+m" (l->a.counter)
                :"ir" (i));
 }
@@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l)
 static inline void local_sub(long i, local_t *l)
 {
        __asm__ __volatile__(
-               "subl %1,%0"
+               "ASM_SUB %1,%0"
                :"+m" (l->a.counter)
                :"ir" (i));
 }
@@ -34,7 +34,7 @@ static inline void local_sub(long i, local_t *l)
 /**
  * local_sub_and_test - subtract value from variable and test result
  * @i: integer value to subtract
- * @l: pointer of type local_t
+ * @l: pointer to type local_t
  *
  * Atomically subtracts @i from @l and returns
  * true if the result is zero, or false for all
@@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "subl %2,%0; sete %1"
+               "ASM_SUB %2,%0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                :"ir" (i) : "memory");
        return c;
@@ -53,7 +53,7 @@ static inline int local_sub_and_test(long i, local_t *l)
 
 /**
  * local_dec_and_test - decrement and test
- * @l: pointer of type local_t
+ * @l: pointer to type local_t
  *
  * Atomically decrements @l by 1 and
  * returns true if the result is 0, or false for all other
@@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "decl %0; sete %1"
+               "ASM_DEC %0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                : : "memory");
        return c != 0;
@@ -72,7 +72,7 @@ static inline int local_dec_and_test(local_t *l)
 
 /**
  * local_inc_and_test - increment and test
- * @l: pointer of type local_t
+ * @l: pointer to type local_t
  *
  * Atomically increments @l by 1
  * and returns true if the result is zero, or false for all
@@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "incl %0; sete %1"
+               "ASM_INC %0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                : : "memory");
        return c != 0;
@@ -91,8 +91,8 @@ static inline int local_inc_and_test(local_t *l)
 
 /**
  * local_add_negative - add and test if negative
- * @l: pointer of type local_t
  * @i: integer value to add
+ * @l: pointer to type local_t
  *
  * Atomically adds @i to @l and returns true
  * if the result is negative, or false when
@@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "addl %2,%0; sets %1"
+               "ASM_ADD %2,%0; sets %1"
                :"+m" (l->a.counter), "=qm" (c)
                :"ir" (i) : "memory");
        return c;
@@ -111,8 +111,8 @@ static inline int local_add_negative(long i, local_t *l)
 
 /**
  * local_add_return - add and return
- * @l: pointer of type local_t
  * @i: integer value to add
+ * @l: pointer to type local_t
  *
  * Atomically adds @i to @l and returns @i + @l
  */
@@ -127,7 +127,7 @@ static inline long local_add_return(long i, local_t *l)
        /* Modern 486+ processor */
        __i = i;
        __asm__ __volatile__(
-               "xaddl %0, %1;"
+               "ASM_XADD %0, %1;"
                :"+r" (i), "+m" (l->a.counter)
                : : "memory");
        return i + __i;
diff --git a/include/asm-x86/local_64.h b/include/asm-x86/local_64.h
index 7808f53..0e92bc6 100644
--- a/include/asm-x86/local_64.h
+++ b/include/asm-x86/local_64.h
@@ -4,21 +4,21 @@
 static inline void local_inc(local_t *l)
 {
        __asm__ __volatile__(
-               "incq %0"
+               "ASM_INC %0"
                :"+m" (l->a.counter));
 }
 
 static inline void local_dec(local_t *l)
 {
        __asm__ __volatile__(
-               "decq %0"
+               "ASM_DEC %0"
                :"+m" (l->a.counter));
 }
 
 static inline void local_add(long i, local_t *l)
 {
        __asm__ __volatile__(
-               "addq %1,%0"
+               "ASM_ADD %1,%0"
                :"+m" (l->a.counter)
                :"ir" (i));
 }
@@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l)
 static inline void local_sub(long i, local_t *l)
 {
        __asm__ __volatile__(
-               "subq %1,%0"
+               "ASM_SUB %1,%0"
                :"+m" (l->a.counter)
                :"ir" (i));
 }
@@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "subq %2,%0; sete %1"
+               "ASM_SUB %2,%0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                :"ir" (i) : "memory");
        return c;
@@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "decq %0; sete %1"
+               "ASM_DEC %0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                : : "memory");
        return c != 0;
@@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "incq %0; sete %1"
+               "ASM_INC %0; sete %1"
                :"+m" (l->a.counter), "=qm" (c)
                : : "memory");
        return c != 0;
@@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l)
        unsigned char c;
 
        __asm__ __volatile__(
-               "addq %2,%0; sets %1"
+               "ASM_ADD %2,%0; sets %1"
                :"+m" (l->a.counter), "=qm" (c)
                :"ir" (i) : "memory");
        return c;
@@ -120,7 +120,7 @@ static inline long local_add_return(long i, local_t *l)
 {
        long __i = i;
        __asm__ __volatile__(
-               "xaddq %0, %1;"
+               "ASM_XADD %0, %1;"
                :"+r" (i), "+m" (l->a.counter)
                : : "memory");
        return i + __i;
-- 
1.5.4.rc0.1083.gf568


--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to [EMAIL PROTECTED]
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Reply via email to