1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline
))
41 void _uatomic_link_error()
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__
__volatile__(ILLEGAL_INSTR
);
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline
))
60 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
61 unsigned long _new
, int len
)
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
66 return __sync_val_compare_and_swap_1(addr
, old
, _new
);
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
70 return __sync_val_compare_and_swap_2(addr
, old
, _new
);
73 return __sync_val_compare_and_swap_4(addr
, old
, _new
);
74 #if (CAA_BITS_PER_LONG == 64)
76 return __sync_val_compare_and_swap_8(addr
, old
, _new
);
79 _uatomic_link_error();
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
93 static inline __attribute__((always_inline
))
94 void _uatomic_and(void *addr
, unsigned long val
,
98 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 __sync_and_and_fetch_1(addr
, val
);
103 #ifdef UATOMIC_HAS_ATOMIC_SHORT
105 __sync_and_and_fetch_2(addr
, val
);
109 __sync_and_and_fetch_4(addr
, val
);
111 #if (CAA_BITS_PER_LONG == 64)
113 __sync_and_and_fetch_8(addr
, val
);
117 _uatomic_link_error();
120 #define uatomic_and(addr, v) \
121 (_uatomic_and((addr), \
122 (unsigned long)(v), \
129 static inline __attribute__((always_inline
))
130 void _uatomic_or(void *addr
, unsigned long val
,
134 #ifdef UATOMIC_HAS_ATOMIC_BYTE
136 __sync_or_and_fetch_1(addr
, val
);
139 #ifdef UATOMIC_HAS_ATOMIC_SHORT
141 __sync_or_and_fetch_2(addr
, val
);
145 __sync_or_and_fetch_4(addr
, val
);
147 #if (CAA_BITS_PER_LONG == 64)
149 __sync_or_and_fetch_8(addr
, val
);
153 _uatomic_link_error();
157 #define uatomic_or(addr, v) \
158 (_uatomic_or((addr), \
159 (unsigned long)(v), \
163 /* uatomic_add_return */
165 #ifndef uatomic_add_return
166 static inline __attribute__((always_inline
))
167 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
171 #ifdef UATOMIC_HAS_ATOMIC_BYTE
173 return __sync_add_and_fetch_1(addr
, val
);
175 #ifdef UATOMIC_HAS_ATOMIC_SHORT
177 return __sync_add_and_fetch_2(addr
, val
);
180 return __sync_add_and_fetch_4(addr
, val
);
181 #if (CAA_BITS_PER_LONG == 64)
183 return __sync_add_and_fetch_8(addr
, val
);
186 _uatomic_link_error();
191 #define uatomic_add_return(addr, v) \
192 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
193 (unsigned long)(v), \
195 #endif /* #ifndef uatomic_add_return */
200 static inline __attribute__((always_inline
))
201 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
204 #ifdef UATOMIC_HAS_ATOMIC_BYTE
210 old
= uatomic_read((unsigned char *)addr
);
211 } while (!__sync_bool_compare_and_swap_1(addr
, old
, val
));
216 #ifdef UATOMIC_HAS_ATOMIC_SHORT
222 old
= uatomic_read((unsigned short *)addr
);
223 } while (!__sync_bool_compare_and_swap_2(addr
, old
, val
));
233 old
= uatomic_read((unsigned int *)addr
);
234 } while (!__sync_bool_compare_and_swap_4(addr
, old
, val
));
238 #if (CAA_BITS_PER_LONG == 64)
244 old
= uatomic_read((unsigned long *)addr
);
245 } while (!__sync_bool_compare_and_swap_8(addr
, old
, val
));
251 _uatomic_link_error();
255 #define uatomic_xchg(addr, v) \
256 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
258 #endif /* #ifndef uatomic_xchg */
260 #else /* #ifndef uatomic_cmpxchg */
265 static inline __attribute__((always_inline
))
266 void _uatomic_and(void *addr
, unsigned long val
, int len
)
269 #ifdef UATOMIC_HAS_ATOMIC_BYTE
272 unsigned char old
, oldt
;
274 oldt
= uatomic_read((unsigned char *)addr
);
277 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 1);
278 } while (oldt
!= old
);
283 #ifdef UATOMIC_HAS_ATOMIC_SHORT
286 unsigned short old
, oldt
;
288 oldt
= uatomic_read((unsigned short *)addr
);
291 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 2);
292 } while (oldt
!= old
);
297 unsigned int old
, oldt
;
299 oldt
= uatomic_read((unsigned int *)addr
);
302 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 4);
303 } while (oldt
!= old
);
307 #if (CAA_BITS_PER_LONG == 64)
310 unsigned long old
, oldt
;
312 oldt
= uatomic_read((unsigned long *)addr
);
315 oldt
= _uatomic_cmpxchg(addr
, old
, old
& val
, 8);
316 } while (oldt
!= old
);
322 _uatomic_link_error();
325 #define uatomic_and(addr, v) \
326 (_uatomic_and((addr), \
327 (unsigned long)(v), \
329 #endif /* #ifndef uatomic_and */
334 static inline __attribute__((always_inline
))
335 void _uatomic_or(void *addr
, unsigned long val
, int len
)
338 #ifdef UATOMIC_HAS_ATOMIC_BYTE
341 unsigned char old
, oldt
;
343 oldt
= uatomic_read((unsigned char *)addr
);
346 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 1);
347 } while (oldt
!= old
);
352 #ifdef UATOMIC_HAS_ATOMIC_SHORT
355 unsigned short old
, oldt
;
357 oldt
= uatomic_read((unsigned short *)addr
);
360 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 2);
361 } while (oldt
!= old
);
368 unsigned int old
, oldt
;
370 oldt
= uatomic_read((unsigned int *)addr
);
373 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 4);
374 } while (oldt
!= old
);
378 #if (CAA_BITS_PER_LONG == 64)
381 unsigned long old
, oldt
;
383 oldt
= uatomic_read((unsigned long *)addr
);
386 oldt
= _uatomic_cmpxchg(addr
, old
, old
| val
, 8);
387 } while (oldt
!= old
);
393 _uatomic_link_error();
396 #define uatomic_or(addr, v) \
397 (_uatomic_or((addr), \
400 #endif /* #ifndef uatomic_or */
402 #ifndef uatomic_add_return
403 /* uatomic_add_return */
405 static inline __attribute__((always_inline
))
406 unsigned long _uatomic_add_return(void *addr
, unsigned long val
, int len
)
409 #ifdef UATOMIC_HAS_ATOMIC_BYTE
412 unsigned char old
, oldt
;
414 oldt
= uatomic_read((unsigned char *)addr
);
417 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
419 } while (oldt
!= old
);
424 #ifdef UATOMIC_HAS_ATOMIC_SHORT
427 unsigned short old
, oldt
;
429 oldt
= uatomic_read((unsigned short *)addr
);
432 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
434 } while (oldt
!= old
);
441 unsigned int old
, oldt
;
443 oldt
= uatomic_read((unsigned int *)addr
);
446 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
448 } while (oldt
!= old
);
452 #if (CAA_BITS_PER_LONG == 64)
455 unsigned long old
, oldt
;
457 oldt
= uatomic_read((unsigned long *)addr
);
460 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
462 } while (oldt
!= old
);
468 _uatomic_link_error();
472 #define uatomic_add_return(addr, v) \
473 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
474 (unsigned long)(v), \
476 #endif /* #ifndef uatomic_add_return */
481 static inline __attribute__((always_inline
))
482 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
485 #ifdef UATOMIC_HAS_ATOMIC_BYTE
488 unsigned char old
, oldt
;
490 oldt
= uatomic_read((unsigned char *)addr
);
493 oldt
= uatomic_cmpxchg((unsigned char *)addr
,
495 } while (oldt
!= old
);
500 #ifdef UATOMIC_HAS_ATOMIC_SHORT
503 unsigned short old
, oldt
;
505 oldt
= uatomic_read((unsigned short *)addr
);
508 oldt
= uatomic_cmpxchg((unsigned short *)addr
,
510 } while (oldt
!= old
);
517 unsigned int old
, oldt
;
519 oldt
= uatomic_read((unsigned int *)addr
);
522 oldt
= uatomic_cmpxchg((unsigned int *)addr
,
524 } while (oldt
!= old
);
528 #if (CAA_BITS_PER_LONG == 64)
531 unsigned long old
, oldt
;
533 oldt
= uatomic_read((unsigned long *)addr
);
536 oldt
= uatomic_cmpxchg((unsigned long *)addr
,
538 } while (oldt
!= old
);
544 _uatomic_link_error();
548 #define uatomic_xchg(addr, v) \
549 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
551 #endif /* #ifndef uatomic_xchg */
553 #endif /* #else #ifndef uatomic_cmpxchg */
555 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
558 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
561 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
562 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
565 #define uatomic_inc(addr) uatomic_add((addr), 1)
569 #define uatomic_dec(addr) uatomic_add((addr), -1)
576 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.040842 seconds and 4 git commands to generate.