1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
26 #define UATOMIC_HAS_ATOMIC_BYTE
27 #define UATOMIC_HAS_ATOMIC_SHORT
34 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
37 struct __uatomic_dummy
{
40 #define __hp(x) ((struct __uatomic_dummy *)(x))
42 #define _uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
46 static inline __attribute__((always_inline
))
47 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
48 unsigned long _new
, int len
)
53 unsigned char result
= old
;
56 "lock; cmpxchgb %2, %1"
57 : "+a"(result
), "+m"(*__hp(addr
))
58 : "q"((unsigned char)_new
)
64 unsigned short result
= old
;
67 "lock; cmpxchgw %2, %1"
68 : "+a"(result
), "+m"(*__hp(addr
))
69 : "r"((unsigned short)_new
)
75 unsigned int result
= old
;
78 "lock; cmpxchgl %2, %1"
79 : "+a"(result
), "+m"(*__hp(addr
))
80 : "r"((unsigned int)_new
)
84 #if (CAA_BITS_PER_LONG == 64)
87 unsigned long result
= old
;
90 "lock; cmpxchgq %2, %1"
91 : "+a"(result
), "+m"(*__hp(addr
))
92 : "r"((unsigned long)_new
)
99 * generate an illegal instruction. Cannot catch this with
100 * linker tricks when optimizations are disabled.
102 __asm__
__volatile__("ud2");
106 #define _uatomic_cmpxchg(addr, old, _new) \
107 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), \
108 caa_cast_long_keep_sign(old), \
109 caa_cast_long_keep_sign(_new),\
114 static inline __attribute__((always_inline
))
115 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
117 /* Note: the "xchg" instruction does not need a "lock" prefix. */
121 unsigned char result
;
122 __asm__
__volatile__(
124 : "=q"(result
), "+m"(*__hp(addr
))
125 : "0" ((unsigned char)val
)
131 unsigned short result
;
132 __asm__
__volatile__(
134 : "=r"(result
), "+m"(*__hp(addr
))
135 : "0" ((unsigned short)val
)
142 __asm__
__volatile__(
144 : "=r"(result
), "+m"(*__hp(addr
))
145 : "0" ((unsigned int)val
)
149 #if (CAA_BITS_PER_LONG == 64)
152 unsigned long result
;
153 __asm__
__volatile__(
155 : "=r"(result
), "+m"(*__hp(addr
))
156 : "0" ((unsigned long)val
)
163 * generate an illegal instruction. Cannot catch this with
164 * linker tricks when optimizations are disabled.
166 __asm__
__volatile__("ud2");
170 #define _uatomic_xchg(addr, v) \
171 ((__typeof__(*(addr))) __uatomic_exchange((addr), \
172 caa_cast_long_keep_sign(v), \
175 /* uatomic_add_return */
177 static inline __attribute__((always_inline
))
178 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
184 unsigned char result
= val
;
186 __asm__
__volatile__(
188 : "+m"(*__hp(addr
)), "+q" (result
)
191 return result
+ (unsigned char)val
;
195 unsigned short result
= val
;
197 __asm__
__volatile__(
199 : "+m"(*__hp(addr
)), "+r" (result
)
202 return result
+ (unsigned short)val
;
206 unsigned int result
= val
;
208 __asm__
__volatile__(
210 : "+m"(*__hp(addr
)), "+r" (result
)
213 return result
+ (unsigned int)val
;
215 #if (CAA_BITS_PER_LONG == 64)
218 unsigned long result
= val
;
220 __asm__
__volatile__(
222 : "+m"(*__hp(addr
)), "+r" (result
)
225 return result
+ (unsigned long)val
;
230 * generate an illegal instruction. Cannot catch this with
231 * linker tricks when optimizations are disabled.
233 __asm__
__volatile__("ud2");
237 #define _uatomic_add_return(addr, v) \
238 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
239 caa_cast_long_keep_sign(v), \
244 static inline __attribute__((always_inline
))
245 void __uatomic_and(void *addr
, unsigned long val
, int len
)
250 __asm__
__volatile__(
253 : "iq" ((unsigned char)val
)
259 __asm__
__volatile__(
262 : "ir" ((unsigned short)val
)
268 __asm__
__volatile__(
271 : "ir" ((unsigned int)val
)
275 #if (CAA_BITS_PER_LONG == 64)
278 __asm__
__volatile__(
281 : "er" ((unsigned long)val
)
288 * generate an illegal instruction. Cannot catch this with
289 * linker tricks when optimizations are disabled.
291 __asm__
__volatile__("ud2");
295 #define _uatomic_and(addr, v) \
296 (__uatomic_and((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
300 static inline __attribute__((always_inline
))
301 void __uatomic_or(void *addr
, unsigned long val
, int len
)
306 __asm__
__volatile__(
309 : "iq" ((unsigned char)val
)
315 __asm__
__volatile__(
318 : "ir" ((unsigned short)val
)
324 __asm__
__volatile__(
327 : "ir" ((unsigned int)val
)
331 #if (CAA_BITS_PER_LONG == 64)
334 __asm__
__volatile__(
337 : "er" ((unsigned long)val
)
344 * generate an illegal instruction. Cannot catch this with
345 * linker tricks when optimizations are disabled.
347 __asm__
__volatile__("ud2");
351 #define _uatomic_or(addr, v) \
352 (__uatomic_or((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
356 static inline __attribute__((always_inline
))
357 void __uatomic_add(void *addr
, unsigned long val
, int len
)
362 __asm__
__volatile__(
365 : "iq" ((unsigned char)val
)
371 __asm__
__volatile__(
374 : "ir" ((unsigned short)val
)
380 __asm__
__volatile__(
383 : "ir" ((unsigned int)val
)
387 #if (CAA_BITS_PER_LONG == 64)
390 __asm__
__volatile__(
393 : "er" ((unsigned long)val
)
400 * generate an illegal instruction. Cannot catch this with
401 * linker tricks when optimizations are disabled.
403 __asm__
__volatile__("ud2");
407 #define _uatomic_add(addr, v) \
408 (__uatomic_add((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
413 static inline __attribute__((always_inline
))
414 void __uatomic_inc(void *addr
, int len
)
419 __asm__
__volatile__(
428 __asm__
__volatile__(
437 __asm__
__volatile__(
444 #if (CAA_BITS_PER_LONG == 64)
447 __asm__
__volatile__(
456 /* generate an illegal instruction. Cannot catch this with linker tricks
457 * when optimizations are disabled. */
458 __asm__
__volatile__("ud2");
462 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
466 static inline __attribute__((always_inline
))
467 void __uatomic_dec(void *addr
, int len
)
472 __asm__
__volatile__(
481 __asm__
__volatile__(
490 __asm__
__volatile__(
497 #if (CAA_BITS_PER_LONG == 64)
500 __asm__
__volatile__(
510 * generate an illegal instruction. Cannot catch this with
511 * linker tricks when optimizations are disabled.
513 __asm__
__volatile__("ud2");
517 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
519 #if ((CAA_BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
520 extern int __rcu_cas_avail
;
521 extern int __rcu_cas_init(void);
523 #define UATOMIC_COMPAT(insn) \
524 ((caa_likely(__rcu_cas_avail > 0)) \
525 ? (_uatomic_##insn) \
526 : ((caa_unlikely(__rcu_cas_avail < 0) \
527 ? ((__rcu_cas_init() > 0) \
528 ? (_uatomic_##insn) \
529 : (compat_uatomic_##insn)) \
530 : (compat_uatomic_##insn))))
533 * We leave the return value so we don't break the ABI, but remove the
534 * return value from the API.
536 extern unsigned long _compat_uatomic_set(void *addr
,
537 unsigned long _new
, int len
);
538 #define compat_uatomic_set(addr, _new) \
539 ((void) _compat_uatomic_set((addr), \
540 caa_cast_long_keep_sign(_new), \
544 extern unsigned long _compat_uatomic_xchg(void *addr
,
545 unsigned long _new
, int len
);
546 #define compat_uatomic_xchg(addr, _new) \
547 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
548 caa_cast_long_keep_sign(_new), \
551 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
552 unsigned long _new
, int len
);
553 #define compat_uatomic_cmpxchg(addr, old, _new) \
554 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
555 caa_cast_long_keep_sign(old), \
556 caa_cast_long_keep_sign(_new), \
559 extern void _compat_uatomic_and(void *addr
, unsigned long _new
, int len
);
560 #define compat_uatomic_and(addr, v) \
561 (_compat_uatomic_and((addr), \
562 caa_cast_long_keep_sign(v), \
565 extern void _compat_uatomic_or(void *addr
, unsigned long _new
, int len
);
566 #define compat_uatomic_or(addr, v) \
567 (_compat_uatomic_or((addr), \
568 caa_cast_long_keep_sign(v), \
571 extern unsigned long _compat_uatomic_add_return(void *addr
,
572 unsigned long _new
, int len
);
573 #define compat_uatomic_add_return(addr, v) \
574 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
575 caa_cast_long_keep_sign(v), \
578 #define compat_uatomic_add(addr, v) \
579 ((void)compat_uatomic_add_return((addr), (v)))
580 #define compat_uatomic_inc(addr) \
581 (compat_uatomic_add((addr), 1))
582 #define compat_uatomic_dec(addr) \
583 (compat_uatomic_add((addr), -1))
586 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
589 /* Read is atomic even in compat mode */
590 #define uatomic_set(addr, v) \
591 UATOMIC_COMPAT(set(addr, v))
593 #define uatomic_cmpxchg(addr, old, _new) \
594 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
595 #define uatomic_xchg(addr, v) \
596 UATOMIC_COMPAT(xchg(addr, v))
598 #define uatomic_and(addr, v) \
599 UATOMIC_COMPAT(and(addr, v))
600 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
601 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
603 #define uatomic_or(addr, v) \
604 UATOMIC_COMPAT(or(addr, v))
605 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
606 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
608 #define uatomic_add_return(addr, v) \
609 UATOMIC_COMPAT(add_return(addr, v))
611 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
612 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
613 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
615 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
616 #define cmm_smp_mb__before_uatomic_inc() cmm_barrier()
617 #define cmm_smp_mb__after_uatomic_inc() cmm_barrier()
619 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
620 #define cmm_smp_mb__before_uatomic_dec() cmm_barrier()
621 #define cmm_smp_mb__after_uatomic_dec() cmm_barrier()
627 #include <urcu/uatomic/generic.h>
629 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.053851 seconds and 4 git commands to generate.