X-Git-Url: http://git.liburcu.org/?a=blobdiff_plain;f=urcu%2Fuatomic%2Fgeneric.h;h=54d2a8c34a4b744fa021e03ef4fd0a78eb343465;hb=d0bbd9c2e8322f036e0a0a70091cae98cad7e390;hp=04f4afd715800dd02446183d183c4f896de142c1;hpb=e56d99bf2046a163875df80bab5195f38606dfde;p=userspace-rcu.git diff --git a/urcu/uatomic/generic.h b/urcu/uatomic/generic.h index 04f4afd..54d2a8c 100644 --- a/urcu/uatomic/generic.h +++ b/urcu/uatomic/generic.h @@ -29,7 +29,7 @@ extern "C" { #endif #ifndef uatomic_set -#define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v)) +#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v))) #endif #ifndef uatomic_read @@ -41,8 +41,10 @@ static inline __attribute__((always_inline)) void _uatomic_link_error() { #ifdef ILLEGAL_INSTR - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__(ILLEGAL_INSTR); #else __builtin_trap (); @@ -122,6 +124,9 @@ void _uatomic_and(void *addr, unsigned long val, (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_and() cmm_barrier() +#define cmm_smp_mb__after_uatomic_and() cmm_barrier() + #endif /* uatomic_or */ @@ -159,8 +164,12 @@ void _uatomic_or(void *addr, unsigned long val, (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_or() cmm_barrier() +#define cmm_smp_mb__after_uatomic_or() cmm_barrier() + #endif + /* uatomic_add_return */ #ifndef uatomic_add_return @@ -328,6 +337,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_and() cmm_barrier() +#define cmm_smp_mb__after_uatomic_and() cmm_barrier() + #endif /* #ifndef uatomic_and */ #ifndef uatomic_or @@ -399,6 +411,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_or() cmm_barrier() +#define cmm_smp_mb__after_uatomic_or() cmm_barrier() + #endif /* #ifndef uatomic_or */ #ifndef uatomic_add_return @@ -559,19 +574,27 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifndef uatomic_add #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v)) +#define cmm_smp_mb__before_uatomic_add() cmm_barrier() +#define cmm_smp_mb__after_uatomic_add() cmm_barrier() #endif #define uatomic_sub_return(addr, v) \ uatomic_add_return((addr), -(caa_cast_long_keep_sign(v))) #define uatomic_sub(addr, v) \ uatomic_add((addr), -(caa_cast_long_keep_sign(v))) +#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add() #ifndef uatomic_inc #define uatomic_inc(addr) uatomic_add((addr), 1) +#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add() #endif #ifndef uatomic_dec #define uatomic_dec(addr) uatomic_add((addr), -1) +#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add() #endif #ifdef __cplusplus