uatomic: add memory barrier API for and/or/add/sub/inc/sub
authorMathieu Desnoyers <mathieu.desnoyers@efficios.com>
Tue, 8 May 2012 20:47:28 +0000 (16:47 -0400)
committerMathieu Desnoyers <mathieu.desnoyers@efficios.com>
Tue, 8 May 2012 20:47:28 +0000 (16:47 -0400)
Implement:
cmm_smp_mb__before_and, cmm_smp_mb__after_and
cmm_smp_mb__before_or, cmm_smp_mb__after_or
cmm_smp_mb__before_add, cmm_smp_mb__after_add
cmm_smp_mb__before_sub, cmm_smp_mb__after_sub
cmm_smp_mb__before_inc, cmm_smp_mb__after_inc
cmm_smp_mb__before_dec, cmm_smp_mb__after_dec

For generic and x86.

These currently translate into simple compiler barriers on all
architectures, but the and/or/add/sub/inc/dec uatomics do not provide
memory ordering guarantees (only uatomic_add_return, uatomic_sub_return,
uatomic_xchg, and uatomic_cmpxchg provides full memory barrier
guarantees before and after the atomic operations).

Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
urcu/uatomic/generic.h
urcu/uatomic/x86.h

index 04f4afd715800dd02446183d183c4f896de142c1..4ef71c77604eb4b96ba7913b4268aef6a3618250 100644 (file)
@@ -122,6 +122,9 @@ void _uatomic_and(void *addr, unsigned long val,
        (_uatomic_and((addr),                   \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
+#define cmm_smp_mb__before_and()       cmm_barrier()
+#define cmm_smp_mb__after_and()                cmm_barrier()
+
 #endif
 
 /* uatomic_or */
@@ -159,8 +162,12 @@ void _uatomic_or(void *addr, unsigned long val,
        (_uatomic_or((addr),                    \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
+#define cmm_smp_mb__before_or()                cmm_barrier()
+#define cmm_smp_mb__after_or()         cmm_barrier()
+
 #endif
 
+
 /* uatomic_add_return */
 
 #ifndef uatomic_add_return
@@ -328,6 +335,9 @@ void _uatomic_and(void *addr, unsigned long val, int len)
        (_uatomic_and((addr),                   \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
+#define cmm_smp_mb__before_and()       cmm_barrier()
+#define cmm_smp_mb__after_and()                cmm_barrier()
+
 #endif /* #ifndef uatomic_and */
 
 #ifndef uatomic_or
@@ -399,6 +409,9 @@ void _uatomic_or(void *addr, unsigned long val, int len)
        (_uatomic_or((addr),                    \
                caa_cast_long_keep_sign(v),     \
                sizeof(*(addr))))
+#define cmm_smp_mb__before_or()                cmm_barrier()
+#define cmm_smp_mb__after_or()         cmm_barrier()
+
 #endif /* #ifndef uatomic_or */
 
 #ifndef uatomic_add_return
@@ -559,19 +572,27 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
 
 #ifndef uatomic_add
 #define uatomic_add(addr, v)           (void)uatomic_add_return((addr), (v))
+#define cmm_smp_mb__before_add()       cmm_barrier()
+#define cmm_smp_mb__after_add()                cmm_barrier()
 #endif
 
 #define uatomic_sub_return(addr, v)    \
        uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
 #define uatomic_sub(addr, v)           \
        uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
+#define cmm_smp_mb__before_sub()       cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_sub()                cmm_smp_mb__after_add()
 
 #ifndef uatomic_inc
 #define uatomic_inc(addr)              uatomic_add((addr), 1)
+#define cmm_smp_mb__before_inc()       cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_inc()                cmm_smp_mb__after_add()
 #endif
 
 #ifndef uatomic_dec
 #define uatomic_dec(addr)              uatomic_add((addr), -1)
+#define cmm_smp_mb__before_dec()       cmm_smp_mb__before_add()
+#define cmm_smp_mb__after_dec()                cmm_smp_mb__after_add()
 #endif
 
 #ifdef __cplusplus
index afe6e37a9d7bf455694077fcbbe3cb2c2dd3a8cb..99b0e6c6b405191538ff6ab50de35320c47652ea 100644 (file)
@@ -576,16 +576,31 @@ extern unsigned long _compat_uatomic_add_return(void *addr,
                UATOMIC_COMPAT(cmpxchg(addr, old, _new))
 #define uatomic_xchg(addr, v)                  \
                UATOMIC_COMPAT(xchg(addr, v))
+
 #define uatomic_and(addr, v)           \
                UATOMIC_COMPAT(and(addr, v))
+#define cmm_smp_mb__before_and()       cmm_barrier()
+#define cmm_smp_mb__after_and()                cmm_barrier()
+
 #define uatomic_or(addr, v)            \
                UATOMIC_COMPAT(or(addr, v))
+#define cmm_smp_mb__before_or()                cmm_barrier()
+#define cmm_smp_mb__after_or()         cmm_barrier()
+
 #define uatomic_add_return(addr, v)            \
                UATOMIC_COMPAT(add_return(addr, v))
 
 #define uatomic_add(addr, v)   UATOMIC_COMPAT(add(addr, v))
+#define cmm_smp_mb__before_add()       cmm_barrier()
+#define cmm_smp_mb__after_add()                cmm_barrier()
+
 #define uatomic_inc(addr)      UATOMIC_COMPAT(inc(addr))
+#define cmm_smp_mb__before_inc()       cmm_barrier()
+#define cmm_smp_mb__after_inc()                cmm_barrier()
+
 #define uatomic_dec(addr)      UATOMIC_COMPAT(dec(addr))
+#define cmm_smp_mb__before_dec()       cmm_barrier()
+#define cmm_smp_mb__after_dec()                cmm_barrier()
 
 #ifdef __cplusplus 
 }
This page took 0.027538 seconds and 4 git commands to generate.