1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
90 /* uatomic_add_return */
92 #ifndef uatomic_add_return
93 static inline __attribute__((always_inline
))
94 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
98 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 return __sync_add_and_fetch_1(addr
, val
);
102 #ifdef UATOMIC_HAS_ATOMIC_SHORT
104 return __sync_add_and_fetch_2(addr
, val
);
107 return __sync_add_and_fetch_4(addr
, val
);
108 #if (BITS_PER_LONG == 64)
110 return __sync_add_and_fetch_8(addr
, val
);
113 _uatomic_link_error();
118 #define uatomic_add_return(addr, v) \
119 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
120 (unsigned long)(v), \
122 #endif /* #ifndef uatomic_add_return */
127 static inline __attribute__((always_inline
))
128 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
131 #ifdef UATOMIC_HAS_ATOMIC_BYTE
137 old
= uatomic_read((unsigned char *)addr
);
138 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
143 #ifdef UATOMIC_HAS_ATOMIC_SHORT
149 old
= uatomic_read((unsigned short *)addr
);
150 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
160 old
= uatomic_read((unsigned int *)addr
);
161 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
165 #if (BITS_PER_LONG == 64)
171 old
= uatomic_read((unsigned long *)addr
);
172 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
178 _uatomic_link_error();
182 #define uatomic_xchg(addr, v) \
183 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
185 #endif /* #ifndef uatomic_xchg */
187 #else /* #ifndef uatomic_cmpxchg */
189 #ifndef uatomic_add_return
190 /* uatomic_add_return */
192 static inline __attribute__((always_inline
))
193 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
196 #ifdef UATOMIC_HAS_ATOMIC_BYTE
199 unsigned char old
, oldt
;
201 oldt
= uatomic_read((unsigned char *)addr
);
204 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 1);
205 } while (oldt
!= old
);
210 #ifdef UATOMIC_HAS_ATOMIC_SHORT
213 unsigned short old
, oldt
;
215 oldt
= uatomic_read((unsigned short *)addr
);
218 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 2);
219 } while (oldt
!= old
);
226 unsigned int old
, oldt
;
228 oldt
= uatomic_read((unsigned int *)addr
);
231 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 4);
232 } while (oldt
!= old
);
236 #if (BITS_PER_LONG == 64)
239 unsigned long old
, oldt
;
241 oldt
= uatomic_read((unsigned long *)addr
);
244 oldt
= _uatomic_cmpxchg(addr
, old
, old
+ val
, 8);
245 } while (oldt
!= old
);
251 _uatomic_link_error();
255 #define uatomic_add_return(addr, v) \
256 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
257 (unsigned long)(v), \
259 #endif /* #ifndef uatomic_add_return */
264 static inline __attribute__((always_inline
))
265 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
268 #ifdef UATOMIC_HAS_ATOMIC_BYTE
271 unsigned char old
, oldt
;
273 oldt
= uatomic_read((unsigned char *)addr
);
276 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 1);
277 } while (oldt
!= old
);
282 #ifdef UATOMIC_HAS_ATOMIC_SHORT
285 unsigned short old
, oldt
;
287 oldt
= uatomic_read((unsigned short *)addr
);
290 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 2);
291 } while (oldt
!= old
);
298 unsigned int old
, oldt
;
300 oldt
= uatomic_read((unsigned int *)addr
);
303 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 4);
304 } while (oldt
!= old
);
308 #if (BITS_PER_LONG == 64)
311 unsigned long old
, oldt
;
313 oldt
= uatomic_read((unsigned long *)addr
);
316 oldt
= _uatomic_cmpxchg(addr
, old
, val
, 8);
317 } while (oldt
!= old
);
323 _uatomic_link_error();
327 #define uatomic_xchg(addr, v) \
328 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
330 #endif /* #ifndef uatomic_xchg */
332 #endif /* #else #ifndef uatomic_cmpxchg */
334 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
337 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
340 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
341 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
344 #define uatomic_inc(addr) uatomic_add((addr), 1)
348 #define uatomic_dec(addr) uatomic_add((addr), -1)
355 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.040862 seconds and 5 git commands to generate.