Using lwsync before a load conditional provides acq_rel semantics.  Other
architectures provide sequential consistency, so replace the lwsync with
sync (see also http://www.cl.cam.ac.uk/~pes20/cpp/cpp0xmappings.html).
---
 urcu/uatomic/ppc.h |   18 ++++++------------
 1 files changed, 6 insertions(+), 12 deletions(-)

diff --git a/urcu/uatomic/ppc.h b/urcu/uatomic/ppc.h
index 3eb3d63..8485f67 100644
--- a/urcu/uatomic/ppc.h
+++ b/urcu/uatomic/ppc.h
@@ -27,12 +27,6 @@
 extern "C" {
 #endif 
 
-#ifdef __NO_LWSYNC__
-#define LWSYNC_OPCODE  "sync\n"
-#else
-#define LWSYNC_OPCODE  "lwsync\n"
-#endif
-
 #define ILLEGAL_INSTR  ".long  0xd00d00"
 
 /*
@@ -53,7 +47,7 @@ unsigned long _uatomic_exchange(void *addr, unsigned long 
val, int len)
                unsigned int result;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "lwarx %0,0,%1\n"       /* load and reserve */
                        "stwcx. %2,0,%1\n"      /* else store conditional */
                        "bne- 1b\n"             /* retry if lost reservation */
@@ -70,7 +64,7 @@ unsigned long _uatomic_exchange(void *addr, unsigned long 
val, int len)
                unsigned long result;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "ldarx %0,0,%1\n"       /* load and reserve */
                        "stdcx. %2,0,%1\n"      /* else store conditional */
                        "bne- 1b\n"             /* retry if lost reservation */
@@ -104,7 +98,7 @@ unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
                unsigned int old_val;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "lwarx %0,0,%1\n"       /* load and reserve */
                        "cmpw %0,%3\n"          /* if load is not equal to */
                        "bne 2f\n"              /* old, fail */
@@ -125,7 +119,7 @@ unsigned long _uatomic_cmpxchg(void *addr, unsigned long 
old,
                unsigned long old_val;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "ldarx %0,0,%1\n"       /* load and reserve */
                        "cmpd %0,%3\n"          /* if load is not equal to */
                        "bne 2f\n"              /* old, fail */
@@ -166,7 +160,7 @@ unsigned long _uatomic_add_return(void *addr, unsigned long 
val,
                unsigned int result;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "lwarx %0,0,%1\n"       /* load and reserve */
                        "add %0,%2,%0\n"        /* add val to value loaded */
                        "stwcx. %0,0,%1\n"      /* store conditional */
@@ -184,7 +178,7 @@ unsigned long _uatomic_add_return(void *addr, unsigned long 
val,
                unsigned long result;
 
                __asm__ __volatile__(
-                       LWSYNC_OPCODE
+                       "sync\n"                /* for sequential consistency */
                "1:\t"  "ldarx %0,0,%1\n"       /* load and reserve */
                        "add %0,%2,%0\n"        /* add val to value loaded */
                        "stdcx. %0,0,%1\n"      /* store conditional */
-- 
1.7.6


_______________________________________________
ltt-dev mailing list
[email protected]
http://lists.casi.polymtl.ca/cgi-bin/mailman/listinfo/ltt-dev

Reply via email to