Commit-ID:  ef31563e950c60bb41b97c2b61c32de874f3c949
Gitweb:     http://git.kernel.org/tip/ef31563e950c60bb41b97c2b61c32de874f3c949
Author:     Peter Zijlstra <pet...@infradead.org>
AuthorDate: Wed, 26 Mar 2014 17:56:43 +0100
Committer:  Ingo Molnar <mi...@kernel.org>
CommitDate: Thu, 14 Aug 2014 12:48:09 +0200

locking,arch,mips: Fold atomic_ops

Many of the atomic op implementations are the same except for one
instruction; fold the lot into a few CPP macros and reduce LoC.

This also prepares for easy addition of new ops.

Signed-off-by: Peter Zijlstra <pet...@infradead.org>
Cc: Linus Torvalds <torva...@linux-foundation.org>
Cc: Maciej W. Rozycki <ma...@codesourcery.com>
Cc: Paul E. McKenney <paul...@linux.vnet.ibm.com>
Cc: Ralf Baechle <r...@linux-mips.org>
Cc: linux-m...@linux-mips.org
Link: http://lkml.kernel.org/r/20140508135852.521548...@infradead.org
Signed-off-by: Ingo Molnar <mi...@kernel.org>
---
 arch/mips/include/asm/atomic.h | 557 ++++++++++++++---------------------------
 1 file changed, 187 insertions(+), 370 deletions(-)

diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index 37b2bef..476fe3b 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -40,195 +40,103 @@
  */
 #define atomic_set(v, i)               ((v)->counter = (i))
 
-/*
- * atomic_add - add integer to atomic variable
- * @i: integer value to add
- * @v: pointer of type atomic_t
- *
- * Atomically adds @i to @v.
- */
-static __inline__ void atomic_add(int i, atomic_t * v)
-{
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               int temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     ll      %0, %1          # atomic_add            \n"
-               "       addu    %0, %2                                  \n"
-               "       sc      %0, %1                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               int temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       ll      %0, %1          # atomic_add    \n"
-                       "       addu    %0, %2                          \n"
-                       "       sc      %0, %1                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!temp));
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               v->counter += i;
-               raw_local_irq_restore(flags);
-       }
-}
-
-/*
- * atomic_sub - subtract the atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic_t
- *
- * Atomically subtracts @i from @v.
- */
-static __inline__ void atomic_sub(int i, atomic_t * v)
-{
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               int temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     ll      %0, %1          # atomic_sub            \n"
-               "       subu    %0, %2                                  \n"
-               "       sc      %0, %1                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               int temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       ll      %0, %1          # atomic_sub    \n"
-                       "       subu    %0, %2                          \n"
-                       "       sc      %0, %1                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!temp));
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               v->counter -= i;
-               raw_local_irq_restore(flags);
-       }
-}
-
-/*
- * Same as above, but return the result value
- */
-static __inline__ int atomic_add_return(int i, atomic_t * v)
-{
-       int result;
-
-       smp_mb__before_llsc();
-
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               int temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     ll      %1, %2          # atomic_add_return     \n"
-               "       addu    %0, %1, %3                              \n"
-               "       sc      %0, %2                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       addu    %0, %1, %3                              \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               int temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       ll      %1, %2  # atomic_add_return     \n"
-                       "       addu    %0, %1, %3                      \n"
-                       "       sc      %0, %2                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (result), "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!result));
-
-               result = temp + i;
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               result = v->counter;
-               result += i;
-               v->counter = result;
-               raw_local_irq_restore(flags);
-       }
-
-       smp_llsc_mb();
-
-       return result;
+#define ATOMIC_OP(op, c_op, asm_op)                                            
\
+static __inline__ void atomic_##op(int i, atomic_t * v)                        
        \
+{                                                                              
\
+       if (kernel_uses_llsc && R10000_LLSC_WAR) {                              
\
+               int temp;                                                       
\
+                                                                               
\
+               __asm__ __volatile__(                                           
\
+               "       .set    arch=r4000                              \n"     
\
+               "1:     ll      %0, %1          # atomic_" #op "        \n"     
\
+               "       " #asm_op " %0, %2                              \n"     
\
+               "       sc      %0, %1                                  \n"     
\
+               "       beqzl   %0, 1b                                  \n"     
\
+               "       .set    mips0                                   \n"     
\
+               : "=&r" (temp), "+m" (v->counter)                               
\
+               : "Ir" (i));                                                    
\
+       } else if (kernel_uses_llsc) {                                          
\
+               int temp;                                                       
\
+                                                                               
\
+               do {                                                            
\
+                       __asm__ __volatile__(                                   
\
+                       "       .set    arch=r4000                      \n"     
\
+                       "       ll      %0, %1          # atomic_" #op "\n"     
\
+                       "       " #asm_op " %0, %2                      \n"     
\
+                       "       sc      %0, %1                          \n"     
\
+                       "       .set    mips0                           \n"     
\
+                       : "=&r" (temp), "+m" (v->counter)                       
\
+                       : "Ir" (i));                                            
\
+               } while (unlikely(!temp));                                      
\
+       } else {                                                                
\
+               unsigned long flags;                                            
\
+                                                                               
\
+               raw_local_irq_save(flags);                                      
\
+               v->counter c_op i;                                              
\
+               raw_local_irq_restore(flags);                                   
\
+       }                                                                       
\
+}                                                                              
\
+
+#define ATOMIC_OP_RETURN(op, c_op, asm_op)                                     
\
+static __inline__ int atomic_##op##_return(int i, atomic_t * v)                
        \
+{                                                                              
\
+       int result;                                                             
\
+                                                                               
\
+       smp_mb__before_llsc();                                                  
\
+                                                                               
\
+       if (kernel_uses_llsc && R10000_LLSC_WAR) {                              
\
+               int temp;                                                       
\
+                                                                               
\
+               __asm__ __volatile__(                                           
\
+               "       .set    arch=r4000                              \n"     
\
+               "1:     ll      %1, %2          # atomic_" #op "_return \n"     
\
+               "       " #asm_op " %0, %1, %3                          \n"     
\
+               "       sc      %0, %2                                  \n"     
\
+               "       beqzl   %0, 1b                                  \n"     
\
+               "       addu    %0, %1, %3                              \n"     
\
+               "       .set    mips0                                   \n"     
\
+               : "=&r" (result), "=&r" (temp), "+m" (v->counter)               
\
+               : "Ir" (i));                                                    
\
+       } else if (kernel_uses_llsc) {                                          
\
+               int temp;                                                       
\
+                                                                               
\
+               do {                                                            
\
+                       __asm__ __volatile__(                                   
\
+                       "       .set    arch=r4000                      \n"     
\
+                       "       ll      %1, %2  # atomic_" #op "_return \n"     
\
+                       "       " #asm_op " %0, %1, %3                  \n"     
\
+                       "       sc      %0, %2                          \n"     
\
+                       "       .set    mips0                           \n"     
\
+                       : "=&r" (result), "=&r" (temp), "+m" (v->counter)       
\
+                       : "Ir" (i));                                            
\
+               } while (unlikely(!result));                                    
\
+                                                                               
\
+               result = temp + i;                                              
\
+       } else {                                                                
\
+               unsigned long flags;                                            
\
+                                                                               
\
+               raw_local_irq_save(flags);                                      
\
+               result = v->counter;                                            
\
+               result c_op i;                                                  
\
+               v->counter = result;                                            
\
+               raw_local_irq_restore(flags);                                   
\
+       }                                                                       
\
+                                                                               
\
+       smp_llsc_mb();                                                          
\
+                                                                               
\
+       return result;                                                          
\
 }
 
-static __inline__ int atomic_sub_return(int i, atomic_t * v)
-{
-       int result;
+#define ATOMIC_OPS(op, c_op, asm_op)                                           
\
+       ATOMIC_OP(op, c_op, asm_op)                                             
\
+       ATOMIC_OP_RETURN(op, c_op, asm_op)
 
-       smp_mb__before_llsc();
+ATOMIC_OPS(add, +=, addu)
+ATOMIC_OPS(sub, -=, subu)
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               int temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     ll      %1, %2          # atomic_sub_return     \n"
-               "       subu    %0, %1, %3                              \n"
-               "       sc      %0, %2                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       subu    %0, %1, %3                              \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "=m" (v->counter)
-               : "Ir" (i), "m" (v->counter)
-               : "memory");
-
-               result = temp - i;
-       } else if (kernel_uses_llsc) {
-               int temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       ll      %1, %2  # atomic_sub_return     \n"
-                       "       subu    %0, %1, %3                      \n"
-                       "       sc      %0, %2                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (result), "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!result));
-
-               result = temp - i;
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               result = v->counter;
-               result -= i;
-               v->counter = result;
-               raw_local_irq_restore(flags);
-       }
-
-       smp_llsc_mb();
-
-       return result;
-}
+#undef ATOMIC_OPS
+#undef ATOMIC_OP_RETURN
+#undef ATOMIC_OP
 
 /*
  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
@@ -407,195 +315,104 @@ static __inline__ int __atomic_add_unless(atomic_t *v, 
int a, int u)
  */
 #define atomic64_set(v, i)     ((v)->counter = (i))
 
-/*
- * atomic64_add - add integer to atomic variable
- * @i: integer value to add
- * @v: pointer of type atomic64_t
- *
- * Atomically adds @i to @v.
- */
-static __inline__ void atomic64_add(long i, atomic64_t * v)
-{
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               long temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     lld     %0, %1          # atomic64_add          \n"
-               "       daddu   %0, %2                                  \n"
-               "       scd     %0, %1                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               long temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       lld     %0, %1          # atomic64_add  \n"
-                       "       daddu   %0, %2                          \n"
-                       "       scd     %0, %1                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!temp));
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               v->counter += i;
-               raw_local_irq_restore(flags);
-       }
+#define ATOMIC64_OP(op, c_op, asm_op)                                          
\
+static __inline__ void atomic64_##op(long i, atomic64_t * v)                   
\
+{                                                                              
\
+       if (kernel_uses_llsc && R10000_LLSC_WAR) {                              
\
+               long temp;                                                      
\
+                                                                               
\
+               __asm__ __volatile__(                                           
\
+               "       .set    arch=r4000                              \n"     
\
+               "1:     lld     %0, %1          # atomic64_" #op "      \n"     
\
+               "       " #asm_op " %0, %2                              \n"     
\
+               "       scd     %0, %1                                  \n"     
\
+               "       beqzl   %0, 1b                                  \n"     
\
+               "       .set    mips0                                   \n"     
\
+               : "=&r" (temp), "+m" (v->counter)                               
\
+               : "Ir" (i));                                                    
\
+       } else if (kernel_uses_llsc) {                                          
\
+               long temp;                                                      
\
+                                                                               
\
+               do {                                                            
\
+                       __asm__ __volatile__(                                   
\
+                       "       .set    arch=r4000                      \n"     
\
+                       "       lld     %0, %1          # atomic64_" #op "\n"   
\
+                       "       " #asm_op " %0, %2                      \n"     
\
+                       "       scd     %0, %1                          \n"     
\
+                       "       .set    mips0                           \n"     
\
+                       : "=&r" (temp), "+m" (v->counter)                       
\
+                       : "Ir" (i));                                            
\
+               } while (unlikely(!temp));                                      
\
+       } else {                                                                
\
+               unsigned long flags;                                            
\
+                                                                               
\
+               raw_local_irq_save(flags);                                      
\
+               v->counter c_op i;                                              
\
+               raw_local_irq_restore(flags);                                   
\
+       }                                                                       
\
+}                                                                              
\
+
+#define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                   
\
+static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)          
\
+{                                                                              
\
+       long result;                                                            
\
+                                                                               
\
+       smp_mb__before_llsc();                                                  
\
+                                                                               
\
+       if (kernel_uses_llsc && R10000_LLSC_WAR) {                              
\
+               long temp;                                                      
\
+                                                                               
\
+               __asm__ __volatile__(                                           
\
+               "       .set    arch=r4000                              \n"     
\
+               "1:     lld     %1, %2          # atomic64_" #op "_return\n"    
\
+               "       " #asm_op " %0, %1, %3                          \n"     
\
+               "       scd     %0, %2                                  \n"     
\
+               "       beqzl   %0, 1b                                  \n"     
\
+               "       " #asm_op " %0, %1, %3                          \n"     
\
+               "       .set    mips0                                   \n"     
\
+               : "=&r" (result), "=&r" (temp), "+m" (v->counter)               
\
+               : "Ir" (i));                                                    
\
+       } else if (kernel_uses_llsc) {                                          
\
+               long temp;                                                      
\
+                                                                               
\
+               do {                                                            
\
+                       __asm__ __volatile__(                                   
\
+                       "       .set    arch=r4000                      \n"     
\
+                       "       lld     %1, %2  # atomic64_" #op "_return\n"    
\
+                       "       " #asm_op " %0, %1, %3                  \n"     
\
+                       "       scd     %0, %2                          \n"     
\
+                       "       .set    mips0                           \n"     
\
+                       : "=&r" (result), "=&r" (temp), "=m" (v->counter)       
\
+                       : "Ir" (i), "m" (v->counter)                            
\
+                       : "memory");                                            
\
+               } while (unlikely(!result));                                    
\
+                                                                               
\
+               result = temp + i;                                              
\
+       } else {                                                                
\
+               unsigned long flags;                                            
\
+                                                                               
\
+               raw_local_irq_save(flags);                                      
\
+               result = v->counter;                                            
\
+               result c_op i;                                                  
\
+               v->counter = result;                                            
\
+               raw_local_irq_restore(flags);                                   
\
+       }                                                                       
\
+                                                                               
\
+       smp_llsc_mb();                                                          
\
+                                                                               
\
+       return result;                                                          
\
 }
 
-/*
- * atomic64_sub - subtract the atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic64_t
- *
- * Atomically subtracts @i from @v.
- */
-static __inline__ void atomic64_sub(long i, atomic64_t * v)
-{
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               long temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     lld     %0, %1          # atomic64_sub          \n"
-               "       dsubu   %0, %2                                  \n"
-               "       scd     %0, %1                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               long temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       lld     %0, %1          # atomic64_sub  \n"
-                       "       dsubu   %0, %2                          \n"
-                       "       scd     %0, %1                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (v->counter)
-                       : "Ir" (i));
-               } while (unlikely(!temp));
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               v->counter -= i;
-               raw_local_irq_restore(flags);
-       }
-}
-
-/*
- * Same as above, but return the result value
- */
-static __inline__ long atomic64_add_return(long i, atomic64_t * v)
-{
-       long result;
+#define ATOMIC64_OPS(op, c_op, asm_op)                                         
\
+       ATOMIC64_OP(op, c_op, asm_op)                                           
\
+       ATOMIC64_OP_RETURN(op, c_op, asm_op)
 
-       smp_mb__before_llsc();
+ATOMIC64_OPS(add, +=, daddu)
+ATOMIC64_OPS(sub, -=, dsubu)
 
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               long temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     lld     %1, %2          # atomic64_add_return   \n"
-               "       daddu   %0, %1, %3                              \n"
-               "       scd     %0, %2                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       daddu   %0, %1, %3                              \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i));
-       } else if (kernel_uses_llsc) {
-               long temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       lld     %1, %2  # atomic64_add_return   \n"
-                       "       daddu   %0, %1, %3                      \n"
-                       "       scd     %0, %2                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (result), "=&r" (temp), "=m" (v->counter)
-                       : "Ir" (i), "m" (v->counter)
-                       : "memory");
-               } while (unlikely(!result));
-
-               result = temp + i;
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               result = v->counter;
-               result += i;
-               v->counter = result;
-               raw_local_irq_restore(flags);
-       }
-
-       smp_llsc_mb();
-
-       return result;
-}
-
-static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
-{
-       long result;
-
-       smp_mb__before_llsc();
-
-       if (kernel_uses_llsc && R10000_LLSC_WAR) {
-               long temp;
-
-               __asm__ __volatile__(
-               "       .set    arch=r4000                              \n"
-               "1:     lld     %1, %2          # atomic64_sub_return   \n"
-               "       dsubu   %0, %1, %3                              \n"
-               "       scd     %0, %2                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       dsubu   %0, %1, %3                              \n"
-               "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "=m" (v->counter)
-               : "Ir" (i), "m" (v->counter)
-               : "memory");
-       } else if (kernel_uses_llsc) {
-               long temp;
-
-               do {
-                       __asm__ __volatile__(
-                       "       .set    arch=r4000                      \n"
-                       "       lld     %1, %2  # atomic64_sub_return   \n"
-                       "       dsubu   %0, %1, %3                      \n"
-                       "       scd     %0, %2                          \n"
-                       "       .set    mips0                           \n"
-                       : "=&r" (result), "=&r" (temp), "=m" (v->counter)
-                       : "Ir" (i), "m" (v->counter)
-                       : "memory");
-               } while (unlikely(!result));
-
-               result = temp - i;
-       } else {
-               unsigned long flags;
-
-               raw_local_irq_save(flags);
-               result = v->counter;
-               result -= i;
-               v->counter = result;
-               raw_local_irq_restore(flags);
-       }
-
-       smp_llsc_mb();
-
-       return result;
-}
+#undef ATOMIC64_OPS
+#undef ATOMIC64_OP_RETURN
+#undef ATOMIC64_OP
 
 /*
  * atomic64_sub_if_positive - conditionally subtract integer from atomic 
variable
--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Reply via email to