1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
26 #define UATOMIC_HAS_ATOMIC_BYTE
27 #define UATOMIC_HAS_ATOMIC_SHORT
34 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
37 struct __uatomic_dummy
{
40 #define __hp(x) ((struct __uatomic_dummy *)(x))
42 #define _uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
46 static inline __attribute__((always_inline
))
47 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
48 unsigned long _new
, int len
)
53 unsigned char result
= old
;
56 "lock; cmpxchgb %2, %1"
57 : "+a"(result
), "+m"(*__hp(addr
))
58 : "q"((unsigned char)_new
)
64 unsigned short result
= old
;
67 "lock; cmpxchgw %2, %1"
68 : "+a"(result
), "+m"(*__hp(addr
))
69 : "r"((unsigned short)_new
)
75 unsigned int result
= old
;
78 "lock; cmpxchgl %2, %1"
79 : "+a"(result
), "+m"(*__hp(addr
))
80 : "r"((unsigned int)_new
)
84 #if (CAA_BITS_PER_LONG == 64)
87 unsigned long result
= old
;
90 "lock; cmpxchgq %2, %1"
91 : "+a"(result
), "+m"(*__hp(addr
))
92 : "r"((unsigned long)_new
)
98 /* generate an illegal instruction. Cannot catch this with linker tricks
99 * when optimizations are disabled. */
100 __asm__
__volatile__("ud2");
104 #define _uatomic_cmpxchg(addr, old, _new) \
105 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), \
106 caa_cast_long_keep_sign(old), \
107 caa_cast_long_keep_sign(_new),\
112 static inline __attribute__((always_inline
))
113 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
115 /* Note: the "xchg" instruction does not need a "lock" prefix. */
119 unsigned char result
;
120 __asm__
__volatile__(
122 : "=q"(result
), "+m"(*__hp(addr
))
123 : "0" ((unsigned char)val
)
129 unsigned short result
;
130 __asm__
__volatile__(
132 : "=r"(result
), "+m"(*__hp(addr
))
133 : "0" ((unsigned short)val
)
140 __asm__
__volatile__(
142 : "=r"(result
), "+m"(*__hp(addr
))
143 : "0" ((unsigned int)val
)
147 #if (CAA_BITS_PER_LONG == 64)
150 unsigned long result
;
151 __asm__
__volatile__(
153 : "=r"(result
), "+m"(*__hp(addr
))
154 : "0" ((unsigned long)val
)
160 /* generate an illegal instruction. Cannot catch this with linker tricks
161 * when optimizations are disabled. */
162 __asm__
__volatile__("ud2");
166 #define _uatomic_xchg(addr, v) \
167 ((__typeof__(*(addr))) __uatomic_exchange((addr), \
168 caa_cast_long_keep_sign(v), \
171 /* uatomic_add_return */
173 static inline __attribute__((always_inline
))
174 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
180 unsigned char result
= val
;
182 __asm__
__volatile__(
184 : "+m"(*__hp(addr
)), "+q" (result
)
187 return result
+ (unsigned char)val
;
191 unsigned short result
= val
;
193 __asm__
__volatile__(
195 : "+m"(*__hp(addr
)), "+r" (result
)
198 return result
+ (unsigned short)val
;
202 unsigned int result
= val
;
204 __asm__
__volatile__(
206 : "+m"(*__hp(addr
)), "+r" (result
)
209 return result
+ (unsigned int)val
;
211 #if (CAA_BITS_PER_LONG == 64)
214 unsigned long result
= val
;
216 __asm__
__volatile__(
218 : "+m"(*__hp(addr
)), "+r" (result
)
221 return result
+ (unsigned long)val
;
225 /* generate an illegal instruction. Cannot catch this with linker tricks
226 * when optimizations are disabled. */
227 __asm__
__volatile__("ud2");
231 #define _uatomic_add_return(addr, v) \
232 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
233 caa_cast_long_keep_sign(v), \
238 static inline __attribute__((always_inline
))
239 void __uatomic_and(void *addr
, unsigned long val
, int len
)
244 __asm__
__volatile__(
247 : "iq" ((unsigned char)val
)
253 __asm__
__volatile__(
256 : "ir" ((unsigned short)val
)
262 __asm__
__volatile__(
265 : "ir" ((unsigned int)val
)
269 #if (CAA_BITS_PER_LONG == 64)
272 __asm__
__volatile__(
275 : "er" ((unsigned long)val
)
281 /* generate an illegal instruction. Cannot catch this with linker tricks
282 * when optimizations are disabled. */
283 __asm__
__volatile__("ud2");
287 #define _uatomic_and(addr, v) \
288 (__uatomic_and((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
292 static inline __attribute__((always_inline
))
293 void __uatomic_or(void *addr
, unsigned long val
, int len
)
298 __asm__
__volatile__(
301 : "iq" ((unsigned char)val
)
307 __asm__
__volatile__(
310 : "ir" ((unsigned short)val
)
316 __asm__
__volatile__(
319 : "ir" ((unsigned int)val
)
323 #if (CAA_BITS_PER_LONG == 64)
326 __asm__
__volatile__(
329 : "er" ((unsigned long)val
)
335 /* generate an illegal instruction. Cannot catch this with linker tricks
336 * when optimizations are disabled. */
337 __asm__
__volatile__("ud2");
341 #define _uatomic_or(addr, v) \
342 (__uatomic_or((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
346 static inline __attribute__((always_inline
))
347 void __uatomic_add(void *addr
, unsigned long val
, int len
)
352 __asm__
__volatile__(
355 : "iq" ((unsigned char)val
)
361 __asm__
__volatile__(
364 : "ir" ((unsigned short)val
)
370 __asm__
__volatile__(
373 : "ir" ((unsigned int)val
)
377 #if (CAA_BITS_PER_LONG == 64)
380 __asm__
__volatile__(
383 : "er" ((unsigned long)val
)
389 /* generate an illegal instruction. Cannot catch this with linker tricks
390 * when optimizations are disabled. */
391 __asm__
__volatile__("ud2");
395 #define _uatomic_add(addr, v) \
396 (__uatomic_add((addr), caa_cast_long_keep_sign(v), sizeof(*(addr))))
401 static inline __attribute__((always_inline
))
402 void __uatomic_inc(void *addr
, int len
)
407 __asm__
__volatile__(
416 __asm__
__volatile__(
425 __asm__
__volatile__(
432 #if (CAA_BITS_PER_LONG == 64)
435 __asm__
__volatile__(
444 /* generate an illegal instruction. Cannot catch this with linker tricks
445 * when optimizations are disabled. */
446 __asm__
__volatile__("ud2");
450 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
454 static inline __attribute__((always_inline
))
455 void __uatomic_dec(void *addr
, int len
)
460 __asm__
__volatile__(
469 __asm__
__volatile__(
478 __asm__
__volatile__(
485 #if (CAA_BITS_PER_LONG == 64)
488 __asm__
__volatile__(
497 /* generate an illegal instruction. Cannot catch this with linker tricks
498 * when optimizations are disabled. */
499 __asm__
__volatile__("ud2");
503 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
505 #if ((CAA_BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
506 extern int __rcu_cas_avail
;
507 extern int __rcu_cas_init(void);
509 #define UATOMIC_COMPAT(insn) \
510 ((caa_likely(__rcu_cas_avail > 0)) \
511 ? (_uatomic_##insn) \
512 : ((caa_unlikely(__rcu_cas_avail < 0) \
513 ? ((__rcu_cas_init() > 0) \
514 ? (_uatomic_##insn) \
515 : (compat_uatomic_##insn)) \
516 : (compat_uatomic_##insn))))
518 extern unsigned long _compat_uatomic_set(void *addr
,
519 unsigned long _new
, int len
);
520 #define compat_uatomic_set(addr, _new) \
521 ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
522 caa_cast_long_keep_sign(_new), \
526 extern unsigned long _compat_uatomic_xchg(void *addr
,
527 unsigned long _new
, int len
);
528 #define compat_uatomic_xchg(addr, _new) \
529 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
530 caa_cast_long_keep_sign(_new), \
533 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
534 unsigned long _new
, int len
);
535 #define compat_uatomic_cmpxchg(addr, old, _new) \
536 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
537 caa_cast_long_keep_sign(old), \
538 caa_cast_long_keep_sign(_new), \
541 extern void _compat_uatomic_and(void *addr
, unsigned long _new
, int len
);
542 #define compat_uatomic_and(addr, v) \
543 (_compat_uatomic_and((addr), \
544 caa_cast_long_keep_sign(v), \
547 extern void _compat_uatomic_or(void *addr
, unsigned long _new
, int len
);
548 #define compat_uatomic_or(addr, v) \
549 (_compat_uatomic_or((addr), \
550 caa_cast_long_keep_sign(v), \
553 extern unsigned long _compat_uatomic_add_return(void *addr
,
554 unsigned long _new
, int len
);
555 #define compat_uatomic_add_return(addr, v) \
556 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
557 caa_cast_long_keep_sign(v), \
560 #define compat_uatomic_add(addr, v) \
561 ((void)compat_uatomic_add_return((addr), (v)))
562 #define compat_uatomic_inc(addr) \
563 (compat_uatomic_add((addr), 1))
564 #define compat_uatomic_dec(addr) \
565 (compat_uatomic_add((addr), -1))
568 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
571 /* Read is atomic even in compat mode */
572 #define uatomic_set(addr, v) \
573 UATOMIC_COMPAT(set(addr, v))
575 #define uatomic_cmpxchg(addr, old, _new) \
576 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
577 #define uatomic_xchg(addr, v) \
578 UATOMIC_COMPAT(xchg(addr, v))
580 #define uatomic_and(addr, v) \
581 UATOMIC_COMPAT(and(addr, v))
582 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
583 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
585 #define uatomic_or(addr, v) \
586 UATOMIC_COMPAT(or(addr, v))
587 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
588 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
590 #define uatomic_add_return(addr, v) \
591 UATOMIC_COMPAT(add_return(addr, v))
593 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
594 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
595 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
597 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
598 #define cmm_smp_mb__before_uatomic_inc() cmm_barrier()
599 #define cmm_smp_mb__after_uatomic_inc() cmm_barrier()
601 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
602 #define cmm_smp_mb__before_uatomic_dec() cmm_barrier()
603 #define cmm_smp_mb__after_uatomic_dec() cmm_barrier()
609 #include <urcu/uatomic/generic.h>
611 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.045926 seconds and 5 git commands to generate.