e358be3673552dd9919ca6579e2c83071410157a
1 #ifndef _URCU_ARCH_UATOMIC_X86_H
2 #define _URCU_ARCH_UATOMIC_X86_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
30 #ifndef __SIZEOF_LONG__
31 #if defined(__x86_64__) || defined(__amd64__)
32 #define __SIZEOF_LONG__ 8
34 #define __SIZEOF_LONG__ 4
39 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
43 * Derived from AO_compare_and_swap() and AO_test_and_set_full().
46 struct __uatomic_dummy
{
49 #define __hp(x) ((struct __uatomic_dummy *)(x))
51 #define _uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
55 static inline __attribute__((always_inline
))
56 unsigned long __uatomic_cmpxchg(void *addr
, unsigned long old
,
57 unsigned long _new
, int len
)
62 unsigned char result
= old
;
65 "lock; cmpxchgb %2, %1"
66 : "+a"(result
), "+m"(*__hp(addr
))
67 : "q"((unsigned char)_new
)
73 unsigned short result
= old
;
76 "lock; cmpxchgw %2, %1"
77 : "+a"(result
), "+m"(*__hp(addr
))
78 : "r"((unsigned short)_new
)
84 unsigned int result
= old
;
87 "lock; cmpxchgl %2, %1"
88 : "+a"(result
), "+m"(*__hp(addr
))
89 : "r"((unsigned int)_new
)
93 #if (BITS_PER_LONG == 64)
96 unsigned long result
= old
;
99 "lock; cmpxchgq %2, %1"
100 : "+a"(result
), "+m"(*__hp(addr
))
101 : "r"((unsigned long)_new
)
107 /* generate an illegal instruction. Cannot catch this with linker tricks
108 * when optimizations are disabled. */
109 __asm__
__volatile__("ud2");
113 #define _uatomic_cmpxchg(addr, old, _new) \
114 ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\
115 (unsigned long)(_new), \
120 static inline __attribute__((always_inline
))
121 unsigned long __uatomic_exchange(void *addr
, unsigned long val
, int len
)
123 /* Note: the "xchg" instruction does not need a "lock" prefix. */
127 unsigned char result
;
128 __asm__
__volatile__(
130 : "=q"(result
), "+m"(*__hp(addr
))
131 : "0" ((unsigned char)val
)
137 unsigned short result
;
138 __asm__
__volatile__(
140 : "=r"(result
), "+m"(*__hp(addr
))
141 : "0" ((unsigned short)val
)
148 __asm__
__volatile__(
150 : "=r"(result
), "+m"(*__hp(addr
))
151 : "0" ((unsigned int)val
)
155 #if (BITS_PER_LONG == 64)
158 unsigned long result
;
159 __asm__
__volatile__(
161 : "=r"(result
), "+m"(*__hp(addr
))
162 : "0" ((unsigned long)val
)
168 /* generate an illegal instruction. Cannot catch this with linker tricks
169 * when optimizations are disabled. */
170 __asm__
__volatile__("ud2");
174 #define _uatomic_xchg(addr, v) \
175 ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \
178 /* uatomic_add_return */
180 static inline __attribute__((always_inline
))
181 unsigned long __uatomic_add_return(void *addr
, unsigned long val
,
187 unsigned char result
= val
;
189 __asm__
__volatile__(
191 : "+m"(*__hp(addr
)), "+q" (result
)
194 return result
+ (unsigned char)val
;
198 unsigned short result
= val
;
200 __asm__
__volatile__(
202 : "+m"(*__hp(addr
)), "+r" (result
)
205 return result
+ (unsigned short)val
;
209 unsigned int result
= val
;
211 __asm__
__volatile__(
213 : "+m"(*__hp(addr
)), "+r" (result
)
216 return result
+ (unsigned int)val
;
218 #if (BITS_PER_LONG == 64)
221 unsigned long result
= val
;
223 __asm__
__volatile__(
225 : "+m"(*__hp(addr
)), "+r" (result
)
228 return result
+ (unsigned long)val
;
232 /* generate an illegal instruction. Cannot catch this with linker tricks
233 * when optimizations are disabled. */
234 __asm__
__volatile__("ud2");
238 #define _uatomic_add_return(addr, v) \
239 ((__typeof__(*(addr))) __uatomic_add_return((addr), \
240 (unsigned long)(v), \
245 static inline __attribute__((always_inline
))
246 void __uatomic_add(void *addr
, unsigned long val
, int len
)
251 __asm__
__volatile__(
254 : "iq" ((unsigned char)val
)
260 __asm__
__volatile__(
263 : "ir" ((unsigned short)val
)
269 __asm__
__volatile__(
272 : "ir" ((unsigned int)val
)
276 #if (BITS_PER_LONG == 64)
279 __asm__
__volatile__(
282 : "er" ((unsigned long)val
)
288 /* generate an illegal instruction. Cannot catch this with linker tricks
289 * when optimizations are disabled. */
290 __asm__
__volatile__("ud2");
294 #define _uatomic_add(addr, v) \
295 (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr))))
300 static inline __attribute__((always_inline
))
301 void __uatomic_inc(void *addr
, int len
)
306 __asm__
__volatile__(
315 __asm__
__volatile__(
324 __asm__
__volatile__(
331 #if (BITS_PER_LONG == 64)
334 __asm__
__volatile__(
343 /* generate an illegal instruction. Cannot catch this with linker tricks
344 * when optimizations are disabled. */
345 __asm__
__volatile__("ud2");
349 #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr))))
353 static inline __attribute__((always_inline
))
354 void __uatomic_dec(void *addr
, int len
)
359 __asm__
__volatile__(
368 __asm__
__volatile__(
377 __asm__
__volatile__(
384 #if (BITS_PER_LONG == 64)
387 __asm__
__volatile__(
396 /* generate an illegal instruction. Cannot catch this with linker tricks
397 * when optimizations are disabled. */
398 __asm__
__volatile__("ud2");
402 #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr))))
404 #if ((BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH))
405 extern int __rcu_cas_avail
;
406 extern int __rcu_cas_init(void);
408 #define UATOMIC_COMPAT(insn) \
409 ((likely(__rcu_cas_avail > 0)) \
410 ? (_uatomic_##insn) \
411 : ((unlikely(__rcu_cas_avail < 0) \
412 ? ((__rcu_cas_init() > 0) \
413 ? (_uatomic_##insn) \
414 : (compat_uatomic_##insn)) \
415 : (compat_uatomic_##insn))))
417 extern unsigned long _compat_uatomic_set(void *addr
,
418 unsigned long _new
, int len
);
419 #define compat_uatomic_set(addr, _new) \
420 ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
421 (unsigned long)(_new), \
425 extern unsigned long _compat_uatomic_xchg(void *addr
,
426 unsigned long _new
, int len
);
427 #define compat_uatomic_xchg(addr, _new) \
428 ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \
429 (unsigned long)(_new), \
432 extern unsigned long _compat_uatomic_cmpxchg(void *addr
, unsigned long old
,
433 unsigned long _new
, int len
);
434 #define compat_uatomic_cmpxchg(addr, old, _new) \
435 ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \
436 (unsigned long)(old), \
437 (unsigned long)(_new), \
440 extern unsigned long _compat_uatomic_xchg(void *addr
,
441 unsigned long _new
, int len
);
442 #define compat_uatomic_add_return(addr, v) \
443 ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \
444 (unsigned long)(v), \
447 #define compat_uatomic_add(addr, v) \
448 ((void)compat_uatomic_add_return((addr), (v)))
449 #define compat_uatomic_inc(addr) \
450 (compat_uatomic_add((addr), 1))
451 #define compat_uatomic_dec(addr) \
452 (compat_uatomic_add((addr), -1))
455 #define UATOMIC_COMPAT(insn) (_uatomic_##insn)
458 /* Read is atomic even in compat mode */
459 #define uatomic_set(addr, v) \
460 UATOMIC_COMPAT(set(addr, v))
462 #define uatomic_cmpxchg(addr, old, _new) \
463 UATOMIC_COMPAT(cmpxchg(addr, old, _new))
464 #define uatomic_xchg(addr, v) \
465 UATOMIC_COMPAT(xchg(addr, v))
466 #define uatomic_add_return(addr, v) \
467 UATOMIC_COMPAT(add_return(addr, v))
469 #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
470 #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
471 #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
477 #include <urcu/uatomic_generic.h>
479 #endif /* _URCU_ARCH_UATOMIC_X86_H */
This page took 0.038229 seconds and 4 git commands to generate.