Document uatomic operations
[urcu.git] / urcu / uatomic / generic.h
CommitLineData
8760d94e
PB
1#ifndef _URCU_UATOMIC_GENERIC_H
2#define _URCU_UATOMIC_GENERIC_H
3
4/*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24#include <urcu/compiler.h>
25#include <urcu/system.h>
26
27#ifdef __cplusplus
28extern "C" {
29#endif
30
8760d94e 31#ifndef uatomic_set
3daae22a 32#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
8760d94e
PB
33#endif
34
35#ifndef uatomic_read
6cf3827c 36#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
37#endif
38
39#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40static inline __attribute__((always_inline))
41void _uatomic_link_error()
42{
43#ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47#else
48 __builtin_trap ();
49#endif
50}
51
52#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53extern void _uatomic_link_error ();
54#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56/* cmpxchg */
57
58#ifndef uatomic_cmpxchg
59static inline __attribute__((always_inline))
60unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62{
63 switch (len) {
f469d839
PB
64#ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67#endif
68#ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71#endif
8760d94e
PB
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
b39e1761 74#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77#endif
78 }
79 _uatomic_link_error();
80 return 0;
81}
82
83
e56d99bf
MD
84#define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
86 caa_cast_long_keep_sign(old), \
87 caa_cast_long_keep_sign(_new),\
8760d94e
PB
88 sizeof(*(addr))))
89
90
bf33aaea
PB
91/* uatomic_and */
92
93#ifndef uatomic_and
94static inline __attribute__((always_inline))
95void _uatomic_and(void *addr, unsigned long val,
96 int len)
97{
98 switch (len) {
99#ifdef UATOMIC_HAS_ATOMIC_BYTE
100 case 1:
101 __sync_and_and_fetch_1(addr, val);
c51d5c6e 102 return;
bf33aaea
PB
103#endif
104#ifdef UATOMIC_HAS_ATOMIC_SHORT
105 case 2:
106 __sync_and_and_fetch_2(addr, val);
c51d5c6e 107 return;
bf33aaea
PB
108#endif
109 case 4:
110 __sync_and_and_fetch_4(addr, val);
c51d5c6e 111 return;
bf33aaea
PB
112#if (CAA_BITS_PER_LONG == 64)
113 case 8:
114 __sync_and_and_fetch_8(addr, val);
c51d5c6e 115 return;
bf33aaea
PB
116#endif
117 }
118 _uatomic_link_error();
bf33aaea
PB
119}
120
121#define uatomic_and(addr, v) \
122 (_uatomic_and((addr), \
e56d99bf
MD
123 caa_cast_long_keep_sign(v), \
124 sizeof(*(addr))))
42e83919
MD
125#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
126#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 127
bf33aaea
PB
128#endif
129
985b35b1
PB
130/* uatomic_or */
131
132#ifndef uatomic_or
133static inline __attribute__((always_inline))
134void _uatomic_or(void *addr, unsigned long val,
135 int len)
136{
137 switch (len) {
138#ifdef UATOMIC_HAS_ATOMIC_BYTE
139 case 1:
140 __sync_or_and_fetch_1(addr, val);
c51d5c6e 141 return;
985b35b1
PB
142#endif
143#ifdef UATOMIC_HAS_ATOMIC_SHORT
144 case 2:
145 __sync_or_and_fetch_2(addr, val);
c51d5c6e 146 return;
985b35b1
PB
147#endif
148 case 4:
149 __sync_or_and_fetch_4(addr, val);
c51d5c6e 150 return;
985b35b1
PB
151#if (CAA_BITS_PER_LONG == 64)
152 case 8:
153 __sync_or_and_fetch_8(addr, val);
c51d5c6e 154 return;
985b35b1
PB
155#endif
156 }
157 _uatomic_link_error();
c51d5c6e 158 return;
985b35b1
PB
159}
160
161#define uatomic_or(addr, v) \
162 (_uatomic_or((addr), \
e56d99bf
MD
163 caa_cast_long_keep_sign(v), \
164 sizeof(*(addr))))
42e83919
MD
165#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
166#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 167
985b35b1
PB
168#endif
169
2812a2d2 170
8760d94e
PB
171/* uatomic_add_return */
172
173#ifndef uatomic_add_return
174static inline __attribute__((always_inline))
175unsigned long _uatomic_add_return(void *addr, unsigned long val,
176 int len)
177{
178 switch (len) {
f469d839
PB
179#ifdef UATOMIC_HAS_ATOMIC_BYTE
180 case 1:
181 return __sync_add_and_fetch_1(addr, val);
182#endif
183#ifdef UATOMIC_HAS_ATOMIC_SHORT
184 case 2:
185 return __sync_add_and_fetch_2(addr, val);
186#endif
8760d94e
PB
187 case 4:
188 return __sync_add_and_fetch_4(addr, val);
b39e1761 189#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
190 case 8:
191 return __sync_add_and_fetch_8(addr, val);
192#endif
193 }
194 _uatomic_link_error();
195 return 0;
196}
197
198
e56d99bf
MD
199#define uatomic_add_return(addr, v) \
200 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
201 caa_cast_long_keep_sign(v), \
202 sizeof(*(addr))))
8760d94e
PB
203#endif /* #ifndef uatomic_add_return */
204
205#ifndef uatomic_xchg
206/* xchg */
207
208static inline __attribute__((always_inline))
209unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
210{
211 switch (len) {
f469d839
PB
212#ifdef UATOMIC_HAS_ATOMIC_BYTE
213 case 1:
214 {
215 unsigned char old;
216
217 do {
218 old = uatomic_read((unsigned char *)addr);
219 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
220
221 return old;
222 }
223#endif
224#ifdef UATOMIC_HAS_ATOMIC_SHORT
225 case 2:
226 {
227 unsigned short old;
228
229 do {
230 old = uatomic_read((unsigned short *)addr);
231 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
232
233 return old;
234 }
235#endif
8760d94e
PB
236 case 4:
237 {
238 unsigned int old;
239
240 do {
241 old = uatomic_read((unsigned int *)addr);
2f2908d0 242 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
8760d94e 243
2f2908d0 244 return old;
8760d94e 245 }
b39e1761 246#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
247 case 8:
248 {
249 unsigned long old;
250
251 do {
252 old = uatomic_read((unsigned long *)addr);
253 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
254
255 return old;
256 }
257#endif
258 }
259 _uatomic_link_error();
260 return 0;
261}
262
263#define uatomic_xchg(addr, v) \
e56d99bf
MD
264 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
265 caa_cast_long_keep_sign(v), \
8760d94e
PB
266 sizeof(*(addr))))
267#endif /* #ifndef uatomic_xchg */
268
269#else /* #ifndef uatomic_cmpxchg */
270
bf33aaea
PB
271#ifndef uatomic_and
272/* uatomic_and */
273
274static inline __attribute__((always_inline))
275void _uatomic_and(void *addr, unsigned long val, int len)
276{
277 switch (len) {
278#ifdef UATOMIC_HAS_ATOMIC_BYTE
279 case 1:
280 {
281 unsigned char old, oldt;
282
283 oldt = uatomic_read((unsigned char *)addr);
284 do {
285 old = oldt;
286 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
287 } while (oldt != old);
e6e5957d
MD
288
289 return;
bf33aaea
PB
290 }
291#endif
292#ifdef UATOMIC_HAS_ATOMIC_SHORT
293 case 2:
294 {
295 unsigned short old, oldt;
296
297 oldt = uatomic_read((unsigned short *)addr);
298 do {
299 old = oldt;
300 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
301 } while (oldt != old);
302 }
303#endif
304 case 4:
305 {
306 unsigned int old, oldt;
307
308 oldt = uatomic_read((unsigned int *)addr);
309 do {
310 old = oldt;
311 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
312 } while (oldt != old);
e6e5957d
MD
313
314 return;
bf33aaea
PB
315 }
316#if (CAA_BITS_PER_LONG == 64)
317 case 8:
318 {
319 unsigned long old, oldt;
320
321 oldt = uatomic_read((unsigned long *)addr);
322 do {
323 old = oldt;
324 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
325 } while (oldt != old);
e6e5957d
MD
326
327 return;
bf33aaea
PB
328 }
329#endif
330 }
331 _uatomic_link_error();
bf33aaea
PB
332}
333
e56d99bf
MD
334#define uatomic_and(addr, v) \
335 (_uatomic_and((addr), \
336 caa_cast_long_keep_sign(v), \
337 sizeof(*(addr))))
42e83919
MD
338#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
339#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 340
bf33aaea
PB
341#endif /* #ifndef uatomic_and */
342
985b35b1
PB
343#ifndef uatomic_or
344/* uatomic_or */
345
346static inline __attribute__((always_inline))
347void _uatomic_or(void *addr, unsigned long val, int len)
348{
349 switch (len) {
350#ifdef UATOMIC_HAS_ATOMIC_BYTE
351 case 1:
352 {
353 unsigned char old, oldt;
354
355 oldt = uatomic_read((unsigned char *)addr);
356 do {
357 old = oldt;
358 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
359 } while (oldt != old);
e6e5957d
MD
360
361 return;
985b35b1
PB
362 }
363#endif
364#ifdef UATOMIC_HAS_ATOMIC_SHORT
365 case 2:
366 {
367 unsigned short old, oldt;
368
369 oldt = uatomic_read((unsigned short *)addr);
370 do {
371 old = oldt;
372 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
373 } while (oldt != old);
e6e5957d
MD
374
375 return;
985b35b1
PB
376 }
377#endif
378 case 4:
379 {
380 unsigned int old, oldt;
381
382 oldt = uatomic_read((unsigned int *)addr);
383 do {
384 old = oldt;
385 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
386 } while (oldt != old);
e6e5957d
MD
387
388 return;
985b35b1
PB
389 }
390#if (CAA_BITS_PER_LONG == 64)
391 case 8:
392 {
393 unsigned long old, oldt;
394
395 oldt = uatomic_read((unsigned long *)addr);
396 do {
397 old = oldt;
398 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
399 } while (oldt != old);
e6e5957d
MD
400
401 return;
985b35b1
PB
402 }
403#endif
404 }
405 _uatomic_link_error();
985b35b1
PB
406}
407
e56d99bf
MD
408#define uatomic_or(addr, v) \
409 (_uatomic_or((addr), \
410 caa_cast_long_keep_sign(v), \
411 sizeof(*(addr))))
42e83919
MD
412#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
413#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 414
985b35b1
PB
415#endif /* #ifndef uatomic_or */
416
8760d94e
PB
417#ifndef uatomic_add_return
418/* uatomic_add_return */
419
420static inline __attribute__((always_inline))
421unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
422{
423 switch (len) {
f469d839
PB
424#ifdef UATOMIC_HAS_ATOMIC_BYTE
425 case 1:
426 {
427 unsigned char old, oldt;
428
429 oldt = uatomic_read((unsigned char *)addr);
430 do {
431 old = oldt;
b4e6d540
PB
432 oldt = uatomic_cmpxchg((unsigned char *)addr,
433 old, old + val);
f469d839
PB
434 } while (oldt != old);
435
436 return old + val;
437 }
438#endif
439#ifdef UATOMIC_HAS_ATOMIC_SHORT
440 case 2:
441 {
442 unsigned short old, oldt;
443
444 oldt = uatomic_read((unsigned short *)addr);
445 do {
446 old = oldt;
b4e6d540
PB
447 oldt = uatomic_cmpxchg((unsigned short *)addr,
448 old, old + val);
f469d839
PB
449 } while (oldt != old);
450
451 return old + val;
452 }
453#endif
8760d94e
PB
454 case 4:
455 {
456 unsigned int old, oldt;
457
458 oldt = uatomic_read((unsigned int *)addr);
459 do {
460 old = oldt;
b4e6d540
PB
461 oldt = uatomic_cmpxchg((unsigned int *)addr,
462 old, old + val);
8760d94e
PB
463 } while (oldt != old);
464
465 return old + val;
466 }
b39e1761 467#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
468 case 8:
469 {
470 unsigned long old, oldt;
471
472 oldt = uatomic_read((unsigned long *)addr);
473 do {
474 old = oldt;
b4e6d540
PB
475 oldt = uatomic_cmpxchg((unsigned long *)addr,
476 old, old + val);
8760d94e
PB
477 } while (oldt != old);
478
479 return old + val;
480 }
481#endif
482 }
483 _uatomic_link_error();
484 return 0;
485}
486
e56d99bf
MD
487#define uatomic_add_return(addr, v) \
488 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
489 caa_cast_long_keep_sign(v), \
490 sizeof(*(addr))))
8760d94e
PB
491#endif /* #ifndef uatomic_add_return */
492
493#ifndef uatomic_xchg
494/* xchg */
495
496static inline __attribute__((always_inline))
497unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
498{
499 switch (len) {
f469d839
PB
500#ifdef UATOMIC_HAS_ATOMIC_BYTE
501 case 1:
502 {
503 unsigned char old, oldt;
504
505 oldt = uatomic_read((unsigned char *)addr);
506 do {
507 old = oldt;
b4e6d540
PB
508 oldt = uatomic_cmpxchg((unsigned char *)addr,
509 old, val);
f469d839
PB
510 } while (oldt != old);
511
512 return old;
513 }
514#endif
515#ifdef UATOMIC_HAS_ATOMIC_SHORT
516 case 2:
517 {
518 unsigned short old, oldt;
519
520 oldt = uatomic_read((unsigned short *)addr);
521 do {
522 old = oldt;
b4e6d540
PB
523 oldt = uatomic_cmpxchg((unsigned short *)addr,
524 old, val);
f469d839
PB
525 } while (oldt != old);
526
527 return old;
528 }
529#endif
8760d94e
PB
530 case 4:
531 {
532 unsigned int old, oldt;
533
534 oldt = uatomic_read((unsigned int *)addr);
535 do {
536 old = oldt;
b4e6d540
PB
537 oldt = uatomic_cmpxchg((unsigned int *)addr,
538 old, val);
8760d94e
PB
539 } while (oldt != old);
540
541 return old;
542 }
b39e1761 543#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
544 case 8:
545 {
546 unsigned long old, oldt;
547
548 oldt = uatomic_read((unsigned long *)addr);
549 do {
550 old = oldt;
b4e6d540
PB
551 oldt = uatomic_cmpxchg((unsigned long *)addr,
552 old, val);
8760d94e
PB
553 } while (oldt != old);
554
555 return old;
556 }
557#endif
558 }
559 _uatomic_link_error();
560 return 0;
561}
562
563#define uatomic_xchg(addr, v) \
e56d99bf
MD
564 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
565 caa_cast_long_keep_sign(v), \
8760d94e
PB
566 sizeof(*(addr))))
567#endif /* #ifndef uatomic_xchg */
568
569#endif /* #else #ifndef uatomic_cmpxchg */
570
571/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
572
573#ifndef uatomic_add
574#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
42e83919
MD
575#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
576#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
8760d94e
PB
577#endif
578
e56d99bf
MD
579#define uatomic_sub_return(addr, v) \
580 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
581#define uatomic_sub(addr, v) \
582 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
42e83919
MD
583#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
584#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
585
586#ifndef uatomic_inc
587#define uatomic_inc(addr) uatomic_add((addr), 1)
42e83919
MD
588#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
589#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
590#endif
591
592#ifndef uatomic_dec
593#define uatomic_dec(addr) uatomic_add((addr), -1)
42e83919
MD
594#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
595#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
596#endif
597
598#ifdef __cplusplus
599}
600#endif
601
602#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.04857 seconds and 4 git commands to generate.