X-Git-Url: http://git.liburcu.org/?a=blobdiff_plain;f=urcu%2Fuatomic%2Fx86.h;h=ce1ce5e9b48b75f193d9c80f81977c2005888492;hb=d0bbd9c2e8322f036e0a0a70091cae98cad7e390;hp=9b67f1608d365830bd951382781ada35f91b6150;hpb=cc6b0c208a9707f8a64609a51167290caeace288;p=urcu.git diff --git a/urcu/uatomic/x86.h b/urcu/uatomic/x86.h index 9b67f16..ce1ce5e 100644 --- a/urcu/uatomic/x86.h +++ b/urcu/uatomic/x86.h @@ -95,15 +95,18 @@ unsigned long __uatomic_cmpxchg(void *addr, unsigned long old, } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return 0; } #define _uatomic_cmpxchg(addr, old, _new) \ - ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\ - (unsigned long)(_new), \ + ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), \ + caa_cast_long_keep_sign(old), \ + caa_cast_long_keep_sign(_new),\ sizeof(*(addr)))) /* xchg */ @@ -156,14 +159,17 @@ unsigned long __uatomic_exchange(void *addr, unsigned long val, int len) } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return 0; } #define _uatomic_xchg(addr, v) \ - ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \ + ((__typeof__(*(addr))) __uatomic_exchange((addr), \ + caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) /* uatomic_add_return */ @@ -220,16 +226,18 @@ unsigned long __uatomic_add_return(void *addr, unsigned long val, } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return 0; } -#define _uatomic_add_return(addr, v) \ - ((__typeof__(*(addr))) __uatomic_add_return((addr), \ - (unsigned long)(v), \ - sizeof(*(addr)))) +#define _uatomic_add_return(addr, v) \ + ((__typeof__(*(addr))) __uatomic_add_return((addr), \ + caa_cast_long_keep_sign(v), \ + sizeof(*(addr)))) /* uatomic_and */ @@ -276,14 +284,16 @@ void __uatomic_and(void *addr, unsigned long val, int len) } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return; } #define _uatomic_and(addr, v) \ - (__uatomic_and((addr), (unsigned long)(v), sizeof(*(addr)))) + (__uatomic_and((addr), caa_cast_long_keep_sign(v), sizeof(*(addr)))) /* uatomic_or */ @@ -330,14 +340,16 @@ void __uatomic_or(void *addr, unsigned long val, int len) } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return; } #define _uatomic_or(addr, v) \ - (__uatomic_or((addr), (unsigned long)(v), sizeof(*(addr)))) + (__uatomic_or((addr), caa_cast_long_keep_sign(v), sizeof(*(addr)))) /* uatomic_add */ @@ -384,14 +396,16 @@ void __uatomic_add(void *addr, unsigned long val, int len) } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return; } #define _uatomic_add(addr, v) \ - (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr)))) + (__uatomic_add((addr), caa_cast_long_keep_sign(v), sizeof(*(addr)))) /* uatomic_inc */ @@ -492,8 +506,10 @@ void __uatomic_dec(void *addr, int len) } #endif } - /* generate an illegal instruction. Cannot catch this with linker tricks - * when optimizations are disabled. */ + /* + * generate an illegal instruction. Cannot catch this with + * linker tricks when optimizations are disabled. + */ __asm__ __volatile__("ud2"); return; } @@ -517,7 +533,7 @@ extern unsigned long _compat_uatomic_set(void *addr, unsigned long _new, int len); #define compat_uatomic_set(addr, _new) \ ((__typeof__(*(addr))) _compat_uatomic_set((addr), \ - (unsigned long)(_new), \ + caa_cast_long_keep_sign(_new), \ sizeof(*(addr)))) @@ -525,35 +541,35 @@ extern unsigned long _compat_uatomic_xchg(void *addr, unsigned long _new, int len); #define compat_uatomic_xchg(addr, _new) \ ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \ - (unsigned long)(_new), \ + caa_cast_long_keep_sign(_new), \ sizeof(*(addr)))) extern unsigned long _compat_uatomic_cmpxchg(void *addr, unsigned long old, unsigned long _new, int len); #define compat_uatomic_cmpxchg(addr, old, _new) \ ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \ - (unsigned long)(old), \ - (unsigned long)(_new), \ + caa_cast_long_keep_sign(old), \ + caa_cast_long_keep_sign(_new), \ sizeof(*(addr)))) extern void _compat_uatomic_and(void *addr, unsigned long _new, int len); #define compat_uatomic_and(addr, v) \ (_compat_uatomic_and((addr), \ - (unsigned long)(v), \ + caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) extern void _compat_uatomic_or(void *addr, unsigned long _new, int len); #define compat_uatomic_or(addr, v) \ (_compat_uatomic_or((addr), \ - (unsigned long)(v), \ + caa_cast_long_keep_sign(v), \ sizeof(*(addr)))) extern unsigned long _compat_uatomic_add_return(void *addr, unsigned long _new, int len); -#define compat_uatomic_add_return(addr, v) \ - ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \ - (unsigned long)(v), \ - sizeof(*(addr)))) +#define compat_uatomic_add_return(addr, v) \ + ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \ + caa_cast_long_keep_sign(v), \ + sizeof(*(addr)))) #define compat_uatomic_add(addr, v) \ ((void)compat_uatomic_add_return((addr), (v))) @@ -574,16 +590,31 @@ extern unsigned long _compat_uatomic_add_return(void *addr, UATOMIC_COMPAT(cmpxchg(addr, old, _new)) #define uatomic_xchg(addr, v) \ UATOMIC_COMPAT(xchg(addr, v)) + #define uatomic_and(addr, v) \ UATOMIC_COMPAT(and(addr, v)) +#define cmm_smp_mb__before_uatomic_and() cmm_barrier() +#define cmm_smp_mb__after_uatomic_and() cmm_barrier() + #define uatomic_or(addr, v) \ UATOMIC_COMPAT(or(addr, v)) +#define cmm_smp_mb__before_uatomic_or() cmm_barrier() +#define cmm_smp_mb__after_uatomic_or() cmm_barrier() + #define uatomic_add_return(addr, v) \ UATOMIC_COMPAT(add_return(addr, v)) #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v)) +#define cmm_smp_mb__before_uatomic_add() cmm_barrier() +#define cmm_smp_mb__after_uatomic_add() cmm_barrier() + #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr)) +#define cmm_smp_mb__before_uatomic_inc() cmm_barrier() +#define cmm_smp_mb__after_uatomic_inc() cmm_barrier() + #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr)) +#define cmm_smp_mb__before_uatomic_dec() cmm_barrier() +#define cmm_smp_mb__after_uatomic_dec() cmm_barrier() #ifdef __cplusplus }