X-Git-Url: http://git.liburcu.org/?p=urcu.git;a=blobdiff_plain;f=urcu%2Fuatomic%2Fgeneric.h;h=5bb0d4f983c3b96c920827b991b6798fadc5cbbd;hp=04f4afd715800dd02446183d183c4f896de142c1;hb=2917c006f87be3f55bd7ba2119d7fbc79f7677d7;hpb=e56d99bf2046a163875df80bab5195f38606dfde diff --git a/urcu/uatomic/generic.h b/urcu/uatomic/generic.h index 04f4afd..5bb0d4f 100644 --- a/urcu/uatomic/generic.h +++ b/urcu/uatomic/generic.h @@ -21,6 +21,7 @@ * Boehm-Demers-Weiser conservative garbage collector. */ +#include #include #include @@ -29,7 +30,7 @@ extern "C" { #endif #ifndef uatomic_set -#define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v)) +#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v))) #endif #ifndef uatomic_read @@ -38,19 +39,21 @@ extern "C" { #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR static inline __attribute__((always_inline)) -void _uatomic_link_error() +void _uatomic_link_error(void) { #ifdef ILLEGAL_INSTR - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__(ILLEGAL_INSTR); #else - __builtin_trap (); + __builtin_trap(); #endif } #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */ -extern void _uatomic_link_error (); +extern void _uatomic_link_error(void); #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */ /* cmpxchg */ @@ -63,17 +66,21 @@ unsigned long _uatomic_cmpxchg(void *addr, unsigned long old, switch (len) { #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: - return __sync_val_compare_and_swap_1(addr, old, _new); + return __sync_val_compare_and_swap_1((uint8_t *) addr, old, + _new); #endif #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: - return __sync_val_compare_and_swap_2(addr, old, _new); + return __sync_val_compare_and_swap_2((uint16_t *) addr, old, + _new); #endif case 4: - return __sync_val_compare_and_swap_4(addr, old, _new); + return __sync_val_compare_and_swap_4((uint32_t *) addr, old, + _new); #if (CAA_BITS_PER_LONG == 64) case 8: - return __sync_val_compare_and_swap_8(addr, old, _new); + return __sync_val_compare_and_swap_8((uint64_t *) addr, old, + _new); #endif } _uatomic_link_error(); @@ -98,20 +105,20 @@ void _uatomic_and(void *addr, unsigned long val, switch (len) { #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: - __sync_and_and_fetch_1(addr, val); + __sync_and_and_fetch_1((uint8_t *) addr, val); return; #endif #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: - __sync_and_and_fetch_2(addr, val); + __sync_and_and_fetch_2((uint16_t *) addr, val); return; #endif case 4: - __sync_and_and_fetch_4(addr, val); + __sync_and_and_fetch_4((uint32_t *) addr, val); return; #if (CAA_BITS_PER_LONG == 64) case 8: - __sync_and_and_fetch_8(addr, val); + __sync_and_and_fetch_8((uint64_t *) addr, val); return; #endif } @@ -122,6 +129,9 @@ void _uatomic_and(void *addr, unsigned long val, (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_and() cmm_barrier() +#define cmm_smp_mb__after_uatomic_and() cmm_barrier() + #endif /* uatomic_or */ @@ -134,20 +144,20 @@ void _uatomic_or(void *addr, unsigned long val, switch (len) { #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: - __sync_or_and_fetch_1(addr, val); + __sync_or_and_fetch_1((uint8_t *) addr, val); return; #endif #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: - __sync_or_and_fetch_2(addr, val); + __sync_or_and_fetch_2((uint16_t *) addr, val); return; #endif case 4: - __sync_or_and_fetch_4(addr, val); + __sync_or_and_fetch_4((uint32_t *) addr, val); return; #if (CAA_BITS_PER_LONG == 64) case 8: - __sync_or_and_fetch_8(addr, val); + __sync_or_and_fetch_8((uint64_t *) addr, val); return; #endif } @@ -159,8 +169,12 @@ void _uatomic_or(void *addr, unsigned long val, (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_or() cmm_barrier() +#define cmm_smp_mb__after_uatomic_or() cmm_barrier() + #endif + /* uatomic_add_return */ #ifndef uatomic_add_return @@ -171,17 +185,17 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, switch (len) { #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: - return __sync_add_and_fetch_1(addr, val); + return __sync_add_and_fetch_1((uint8_t *) addr, val); #endif #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: - return __sync_add_and_fetch_2(addr, val); + return __sync_add_and_fetch_2((uint16_t *) addr, val); #endif case 4: - return __sync_add_and_fetch_4(addr, val); + return __sync_add_and_fetch_4((uint32_t *) addr, val); #if (CAA_BITS_PER_LONG == 64) case 8: - return __sync_add_and_fetch_8(addr, val); + return __sync_add_and_fetch_8((uint64_t *) addr, val); #endif } _uatomic_link_error(); @@ -205,11 +219,12 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: { - unsigned char old; + uint8_t old; do { - old = uatomic_read((unsigned char *)addr); - } while (!__sync_bool_compare_and_swap_1(addr, old, val)); + old = uatomic_read((uint8_t *) addr); + } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr, + old, val)); return old; } @@ -217,33 +232,36 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: { - unsigned short old; + uint16_t old; do { - old = uatomic_read((unsigned short *)addr); - } while (!__sync_bool_compare_and_swap_2(addr, old, val)); + old = uatomic_read((uint16_t *) addr); + } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr, + old, val)); return old; } #endif case 4: { - unsigned int old; + uint32_t old; do { - old = uatomic_read((unsigned int *)addr); - } while (!__sync_bool_compare_and_swap_4(addr, old, val)); + old = uatomic_read((uint32_t *) addr); + } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr, + old, val)); return old; } #if (CAA_BITS_PER_LONG == 64) case 8: { - unsigned long old; + uint64_t old; do { - old = uatomic_read((unsigned long *)addr); - } while (!__sync_bool_compare_and_swap_8(addr, old, val)); + old = uatomic_read((uint64_t *) addr); + } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr, + old, val)); return old; } @@ -271,9 +289,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: { - unsigned char old, oldt; + uint8_t old, oldt; - oldt = uatomic_read((unsigned char *)addr); + oldt = uatomic_read((uint8_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old & val, 1); @@ -285,9 +303,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: { - unsigned short old, oldt; + uint16_t old, oldt; - oldt = uatomic_read((unsigned short *)addr); + oldt = uatomic_read((uint16_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old & val, 2); @@ -296,9 +314,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) #endif case 4: { - unsigned int old, oldt; + uint32_t old, oldt; - oldt = uatomic_read((unsigned int *)addr); + oldt = uatomic_read((uint32_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old & val, 4); @@ -309,9 +327,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) #if (CAA_BITS_PER_LONG == 64) case 8: { - unsigned long old, oldt; + uint64_t old, oldt; - oldt = uatomic_read((unsigned long *)addr); + oldt = uatomic_read((uint64_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old & val, 8); @@ -328,6 +346,9 @@ void _uatomic_and(void *addr, unsigned long val, int len) (_uatomic_and((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_and() cmm_barrier() +#define cmm_smp_mb__after_uatomic_and() cmm_barrier() + #endif /* #ifndef uatomic_and */ #ifndef uatomic_or @@ -340,9 +361,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: { - unsigned char old, oldt; + uint8_t old, oldt; - oldt = uatomic_read((unsigned char *)addr); + oldt = uatomic_read((uint8_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old | val, 1); @@ -354,9 +375,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: { - unsigned short old, oldt; + uint16_t old, oldt; - oldt = uatomic_read((unsigned short *)addr); + oldt = uatomic_read((uint16_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old | val, 2); @@ -367,9 +388,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) #endif case 4: { - unsigned int old, oldt; + uint32_t old, oldt; - oldt = uatomic_read((unsigned int *)addr); + oldt = uatomic_read((uint32_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old | val, 4); @@ -380,9 +401,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) #if (CAA_BITS_PER_LONG == 64) case 8: { - unsigned long old, oldt; + uint64_t old, oldt; - oldt = uatomic_read((unsigned long *)addr); + oldt = uatomic_read((uint64_t *) addr); do { old = oldt; oldt = _uatomic_cmpxchg(addr, old, old | val, 8); @@ -399,6 +420,9 @@ void _uatomic_or(void *addr, unsigned long val, int len) (_uatomic_or((addr), \ caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) +#define cmm_smp_mb__before_uatomic_or() cmm_barrier() +#define cmm_smp_mb__after_uatomic_or() cmm_barrier() + #endif /* #ifndef uatomic_or */ #ifndef uatomic_add_return @@ -411,12 +435,12 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: { - unsigned char old, oldt; + uint8_t old, oldt; - oldt = uatomic_read((unsigned char *)addr); + oldt = uatomic_read((uint8_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned char *)addr, + oldt = uatomic_cmpxchg((uint8_t *) addr, old, old + val); } while (oldt != old); @@ -426,12 +450,12 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: { - unsigned short old, oldt; + uint16_t old, oldt; - oldt = uatomic_read((unsigned short *)addr); + oldt = uatomic_read((uint16_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned short *)addr, + oldt = uatomic_cmpxchg((uint16_t *) addr, old, old + val); } while (oldt != old); @@ -440,12 +464,12 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) #endif case 4: { - unsigned int old, oldt; + uint32_t old, oldt; - oldt = uatomic_read((unsigned int *)addr); + oldt = uatomic_read((uint32_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned int *)addr, + oldt = uatomic_cmpxchg((uint32_t *) addr, old, old + val); } while (oldt != old); @@ -454,12 +478,12 @@ unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) #if (CAA_BITS_PER_LONG == 64) case 8: { - unsigned long old, oldt; + uint64_t old, oldt; - oldt = uatomic_read((unsigned long *)addr); + oldt = uatomic_read((uint64_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned long *)addr, + oldt = uatomic_cmpxchg((uint64_t *) addr, old, old + val); } while (oldt != old); @@ -487,12 +511,12 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_BYTE case 1: { - unsigned char old, oldt; + uint8_t old, oldt; - oldt = uatomic_read((unsigned char *)addr); + oldt = uatomic_read((uint8_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned char *)addr, + oldt = uatomic_cmpxchg((uint8_t *) addr, old, val); } while (oldt != old); @@ -502,12 +526,12 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifdef UATOMIC_HAS_ATOMIC_SHORT case 2: { - unsigned short old, oldt; + uint16_t old, oldt; - oldt = uatomic_read((unsigned short *)addr); + oldt = uatomic_read((uint16_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned short *)addr, + oldt = uatomic_cmpxchg((uint16_t *) addr, old, val); } while (oldt != old); @@ -516,12 +540,12 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #endif case 4: { - unsigned int old, oldt; + uint32_t old, oldt; - oldt = uatomic_read((unsigned int *)addr); + oldt = uatomic_read((uint32_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned int *)addr, + oldt = uatomic_cmpxchg((uint32_t *) addr, old, val); } while (oldt != old); @@ -530,12 +554,12 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #if (CAA_BITS_PER_LONG == 64) case 8: { - unsigned long old, oldt; + uint64_t old, oldt; - oldt = uatomic_read((unsigned long *)addr); + oldt = uatomic_read((uint64_t *) addr); do { old = oldt; - oldt = uatomic_cmpxchg((unsigned long *)addr, + oldt = uatomic_cmpxchg((uint64_t *) addr, old, val); } while (oldt != old); @@ -559,19 +583,27 @@ unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) #ifndef uatomic_add #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v)) +#define cmm_smp_mb__before_uatomic_add() cmm_barrier() +#define cmm_smp_mb__after_uatomic_add() cmm_barrier() #endif #define uatomic_sub_return(addr, v) \ uatomic_add_return((addr), -(caa_cast_long_keep_sign(v))) #define uatomic_sub(addr, v) \ uatomic_add((addr), -(caa_cast_long_keep_sign(v))) +#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add() #ifndef uatomic_inc #define uatomic_inc(addr) uatomic_add((addr), 1) +#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add() #endif #ifndef uatomic_dec #define uatomic_dec(addr) uatomic_add((addr), -1) +#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add() +#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add() #endif #ifdef __cplusplus