From 121a3305861b8fd69e664e2914e854f3e4853687 Mon Sep 17 00:00:00 2001 From: Mathieu Desnoyers Date: Tue, 8 May 2012 16:47:28 -0400 Subject: [PATCH] uatomic: add memory barrier API for and/or/add/sub/inc/sub Implement: cmm_smp_mb__before_and, cmm_smp_mb__after_and cmm_smp_mb__before_or, cmm_smp_mb__after_or cmm_smp_mb__before_add, cmm_smp_mb__after_add cmm_smp_mb__before_sub, cmm_smp_mb__after_sub cmm_smp_mb__before_inc, cmm_smp_mb__after_inc cmm_smp_mb__before_dec, cmm_smp_mb__after_dec For generic and x86. These currently translate into simple compiler barriers on all architectures, but the and/or/add/sub/inc/dec uatomics do not provide memory ordering guarantees (only uatomic_add_return, uatomic_sub_return, uatomic_xchg, and uatomic_cmpxchg provides full memory barrier guarantees before and after the atomic operations). Signed-off-by: Mathieu Desnoyers --- urcu/uatomic/generic.h | 21 +++++++++++++++++++++ urcu/uatomic/x86.h | 15 +++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/urcu/uatomic/generic.h b/urcu/uatomic/generic.h index 04f4afd..4ef71c7 100644 --- a/urcu/uatomic/generic.h +++ b/urcu/uatomic/generic.h @@ -122,6 +122,9 @@ void _uatomic_and(void *addr, unsigned long val, (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_and() cmm_barrier() +#define cmm_smp_mb__after_and() cmm_barrier() + #endif /* uatomic_or */ @@ -159,8 +162,12 @@ void _uatomic_or(void *addr, unsigned long val, (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_or() cmm_barrier() +#define cmm_smp_mb__after_or() cmm_barrier() + #endif + /* uatomic_add_return */ #ifndef uatomic_add_return @@ -328,6 +335,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_and() cmm_barrier() +#define cmm_smp_mb__after_and() cmm_barrier() + #endif /* #ifndef uatomic_and */ #ifndef uatomic_or @@ -399,6 +409,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_or() cmm_barrier() +#define cmm_smp_mb__after_or() cmm_barrier() + #endif /* #ifndef uatomic_or */ #ifndef uatomic_add_return @@ -559,19 +572,27 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifndef uatomic_add #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v)) +#define cmm_smp_mb__before_add() cmm_barrier() +#define cmm_smp_mb__after_add() cmm_barrier() #endif #define uatomic_sub_return(addr, v) \ uatomic_add_return((addr), -(caa_cast_long_keep_sign(v))) #define uatomic_sub(addr, v) \ uatomic_add((addr), -(caa_cast_long_keep_sign(v))) +#define cmm_smp_mb__before_sub() cmm_smp_mb__before_add() +#define cmm_smp_mb__after_sub() cmm_smp_mb__after_add() #ifndef uatomic_inc #define uatomic_inc(addr) uatomic_add((addr), 1) +#define cmm_smp_mb__before_inc() cmm_smp_mb__before_add() +#define cmm_smp_mb__after_inc() cmm_smp_mb__after_add() #endif #ifndef uatomic_dec #define uatomic_dec(addr) uatomic_add((addr), -1) +#define cmm_smp_mb__before_dec() cmm_smp_mb__before_add() +#define cmm_smp_mb__after_dec() cmm_smp_mb__after_add() #endif #ifdef __cplusplus diff --git a/urcu/uatomic/x86.h b/urcu/uatomic/x86.h index afe6e37..99b0e6c 100644 --- a/urcu/uatomic/x86.h +++ b/urcu/uatomic/x86.h @@ -576,16 +576,31 @@ extern unsigned long _compat_uatomic_add_return(void *addr, UATOMIC_COMPAT(cmpxchg(addr, old, _new)) #define uatomic_xchg(addr, v) \ UATOMIC_COMPAT(xchg(addr, v)) + #define uatomic_and(addr, v) \ UATOMIC_COMPAT(and(addr, v)) +#define cmm_smp_mb__before_and() cmm_barrier() +#define cmm_smp_mb__after_and() cmm_barrier() + #define uatomic_or(addr, v) \ UATOMIC_COMPAT(or(addr, v)) +#define cmm_smp_mb__before_or() cmm_barrier() +#define cmm_smp_mb__after_or() cmm_barrier() + #define uatomic_add_return(addr, v) \ UATOMIC_COMPAT(add_return(addr, v)) #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v)) +#define cmm_smp_mb__before_add() cmm_barrier() +#define cmm_smp_mb__after_add() cmm_barrier() + #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr)) +#define cmm_smp_mb__before_inc() cmm_barrier() +#define cmm_smp_mb__after_inc() cmm_barrier() + #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr)) +#define cmm_smp_mb__before_dec() cmm_barrier() +#define cmm_smp_mb__after_dec() cmm_barrier() #ifdef __cplusplus } -- 2.34.1