X-Git-Url: https://git.liburcu.org/?a=blobdiff_plain;f=urcu%2Fuatomic_arch_ppc.h;h=2322c97f43fe8f6a5b0f99daf59de871d08ccc80;hb=14714b51fd19da8d72a3eb3367c2e52615b3baf3;hp=40de9ff9909dc10327bb75e29aca9e33f7743f41;hpb=48d848c7291ec22da6258ac03cd9c1dafec1fdfb;p=urcu.git diff --git a/urcu/uatomic_arch_ppc.h b/urcu/uatomic_arch_ppc.h index 40de9ff..2322c97 100644 --- a/urcu/uatomic_arch_ppc.h +++ b/urcu/uatomic_arch_ppc.h @@ -21,6 +21,11 @@ */ #include +#include + +#ifdef __cplusplus +extern "C" { +#endif #ifndef __SIZEOF_LONG__ #ifdef __powerpc64__ @@ -30,18 +35,20 @@ #endif #endif +#ifdef __NO_LWSYNC__ +#define LWSYNC_OPCODE "sync\n" +#else +#define LWSYNC_OPCODE "lwsync\n" +#endif + #ifndef BITS_PER_LONG #define BITS_PER_LONG (__SIZEOF_LONG__ * 8) #endif #define ILLEGAL_INSTR ".long 0xd00d00" -#define uatomic_set(addr, v) \ -do { \ - ACCESS_ONCE(*(addr)) = (v); \ -} while (0) - -#define uatomic_read(addr) ACCESS_ONCE(*(addr)) +#define uatomic_set(addr, v) STORE_SHARED(*(addr), (v)) +#define uatomic_read(addr) LOAD_SHARED(*(addr)) /* * Using a isync as second barrier for exchange to provide acquire semantic. @@ -61,7 +68,7 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) unsigned int result; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "lwarx %0,0,%1\n" /* load and reserve */ "stwcx. %2,0,%1\n" /* else store conditional */ "bne- 1b\n" /* retry if lost reservation */ @@ -78,7 +85,7 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) unsigned long result; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "ldarx %0,0,%1\n" /* load and reserve */ "stdcx. %2,0,%1\n" /* else store conditional */ "bne- 1b\n" /* retry if lost reservation */ @@ -112,7 +119,7 @@ unsigned long _uatomic_cmpxchg(void *addr, unsigned long old, unsigned int old_val; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "lwarx %0,0,%1\n" /* load and reserve */ "cmpd %0,%3\n" /* if load is not equal to */ "bne 2f\n" /* old, fail */ @@ -133,7 +140,7 @@ unsigned long _uatomic_cmpxchg(void *addr, unsigned long old, unsigned long old_val; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "ldarx %0,0,%1\n" /* load and reserve */ "cmpd %0,%3\n" /* if load is not equal to */ "bne 2f\n" /* old, fail */ @@ -174,7 +181,7 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, unsigned int result; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "lwarx %0,0,%1\n" /* load and reserve */ "add %0,%2,%0\n" /* add val to value loaded */ "stwcx. %0,0,%1\n" /* store conditional */ @@ -192,7 +199,7 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, unsigned long result; __asm__ __volatile__( - "lwsync\n" + LWSYNC_OPCODE "1:\t" "ldarx %0,0,%1\n" /* load and reserve */ "add %0,%2,%0\n" /* add val to value loaded */ "stdcx. %0,0,%1\n" /* store conditional */ @@ -228,4 +235,10 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, #define uatomic_inc(addr) uatomic_add((addr), 1) #define uatomic_dec(addr) uatomic_add((addr), -1) +#define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new) + +#ifdef __cplusplus +} +#endif + #endif /* _URCU_ARCH_UATOMIC_PPC_H */