Cleanup: remove leftover manual pthread detection
[urcu.git] / urcu / uatomic / generic.h
CommitLineData
8760d94e
PB
1#ifndef _URCU_UATOMIC_GENERIC_H
2#define _URCU_UATOMIC_GENERIC_H
3
4/*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
2917c006 24#include <stdint.h>
8760d94e
PB
25#include <urcu/compiler.h>
26#include <urcu/system.h>
27
28#ifdef __cplusplus
29extern "C" {
30#endif
31
8760d94e 32#ifndef uatomic_set
3daae22a 33#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
8760d94e
PB
34#endif
35
36#ifndef uatomic_read
6cf3827c 37#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
38#endif
39
40#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
41static inline __attribute__((always_inline))
5c28497b 42void _uatomic_link_error(void)
8760d94e
PB
43{
44#ifdef ILLEGAL_INSTR
d0bbd9c2
MD
45 /*
46 * generate an illegal instruction. Cannot catch this with
47 * linker tricks when optimizations are disabled.
48 */
8760d94e
PB
49 __asm__ __volatile__(ILLEGAL_INSTR);
50#else
5c28497b 51 __builtin_trap();
8760d94e
PB
52#endif
53}
54
55#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
5c28497b 56extern void _uatomic_link_error(void);
8760d94e
PB
57#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
58
59/* cmpxchg */
60
61#ifndef uatomic_cmpxchg
62static inline __attribute__((always_inline))
63unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
64 unsigned long _new, int len)
65{
66 switch (len) {
f469d839
PB
67#ifdef UATOMIC_HAS_ATOMIC_BYTE
68 case 1:
2917c006
KR
69 return __sync_val_compare_and_swap_1((uint8_t *) addr, old,
70 _new);
f469d839
PB
71#endif
72#ifdef UATOMIC_HAS_ATOMIC_SHORT
73 case 2:
2917c006
KR
74 return __sync_val_compare_and_swap_2((uint16_t *) addr, old,
75 _new);
f469d839 76#endif
8760d94e 77 case 4:
2917c006
KR
78 return __sync_val_compare_and_swap_4((uint32_t *) addr, old,
79 _new);
b39e1761 80#if (CAA_BITS_PER_LONG == 64)
8760d94e 81 case 8:
2917c006
KR
82 return __sync_val_compare_and_swap_8((uint64_t *) addr, old,
83 _new);
8760d94e
PB
84#endif
85 }
86 _uatomic_link_error();
87 return 0;
88}
89
90
e56d99bf
MD
91#define uatomic_cmpxchg(addr, old, _new) \
92 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
93 caa_cast_long_keep_sign(old), \
94 caa_cast_long_keep_sign(_new),\
8760d94e
PB
95 sizeof(*(addr))))
96
97
bf33aaea
PB
98/* uatomic_and */
99
100#ifndef uatomic_and
101static inline __attribute__((always_inline))
102void _uatomic_and(void *addr, unsigned long val,
103 int len)
104{
105 switch (len) {
106#ifdef UATOMIC_HAS_ATOMIC_BYTE
107 case 1:
2917c006 108 __sync_and_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 109 return;
bf33aaea
PB
110#endif
111#ifdef UATOMIC_HAS_ATOMIC_SHORT
112 case 2:
2917c006 113 __sync_and_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 114 return;
bf33aaea
PB
115#endif
116 case 4:
2917c006 117 __sync_and_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 118 return;
bf33aaea
PB
119#if (CAA_BITS_PER_LONG == 64)
120 case 8:
2917c006 121 __sync_and_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 122 return;
bf33aaea
PB
123#endif
124 }
125 _uatomic_link_error();
bf33aaea
PB
126}
127
128#define uatomic_and(addr, v) \
129 (_uatomic_and((addr), \
e56d99bf
MD
130 caa_cast_long_keep_sign(v), \
131 sizeof(*(addr))))
42e83919
MD
132#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
133#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 134
bf33aaea
PB
135#endif
136
985b35b1
PB
137/* uatomic_or */
138
139#ifndef uatomic_or
140static inline __attribute__((always_inline))
141void _uatomic_or(void *addr, unsigned long val,
142 int len)
143{
144 switch (len) {
145#ifdef UATOMIC_HAS_ATOMIC_BYTE
146 case 1:
2917c006 147 __sync_or_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 148 return;
985b35b1
PB
149#endif
150#ifdef UATOMIC_HAS_ATOMIC_SHORT
151 case 2:
2917c006 152 __sync_or_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 153 return;
985b35b1
PB
154#endif
155 case 4:
2917c006 156 __sync_or_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 157 return;
985b35b1
PB
158#if (CAA_BITS_PER_LONG == 64)
159 case 8:
2917c006 160 __sync_or_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 161 return;
985b35b1
PB
162#endif
163 }
164 _uatomic_link_error();
c51d5c6e 165 return;
985b35b1
PB
166}
167
168#define uatomic_or(addr, v) \
169 (_uatomic_or((addr), \
e56d99bf
MD
170 caa_cast_long_keep_sign(v), \
171 sizeof(*(addr))))
42e83919
MD
172#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
173#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 174
985b35b1
PB
175#endif
176
2812a2d2 177
8760d94e
PB
178/* uatomic_add_return */
179
180#ifndef uatomic_add_return
181static inline __attribute__((always_inline))
182unsigned long _uatomic_add_return(void *addr, unsigned long val,
183 int len)
184{
185 switch (len) {
f469d839
PB
186#ifdef UATOMIC_HAS_ATOMIC_BYTE
187 case 1:
2917c006 188 return __sync_add_and_fetch_1((uint8_t *) addr, val);
f469d839
PB
189#endif
190#ifdef UATOMIC_HAS_ATOMIC_SHORT
191 case 2:
2917c006 192 return __sync_add_and_fetch_2((uint16_t *) addr, val);
f469d839 193#endif
8760d94e 194 case 4:
2917c006 195 return __sync_add_and_fetch_4((uint32_t *) addr, val);
b39e1761 196#if (CAA_BITS_PER_LONG == 64)
8760d94e 197 case 8:
2917c006 198 return __sync_add_and_fetch_8((uint64_t *) addr, val);
8760d94e
PB
199#endif
200 }
201 _uatomic_link_error();
202 return 0;
203}
204
205
e56d99bf
MD
206#define uatomic_add_return(addr, v) \
207 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
208 caa_cast_long_keep_sign(v), \
209 sizeof(*(addr))))
8760d94e
PB
210#endif /* #ifndef uatomic_add_return */
211
212#ifndef uatomic_xchg
213/* xchg */
214
215static inline __attribute__((always_inline))
216unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
217{
218 switch (len) {
f469d839
PB
219#ifdef UATOMIC_HAS_ATOMIC_BYTE
220 case 1:
221 {
2917c006 222 uint8_t old;
f469d839
PB
223
224 do {
2917c006
KR
225 old = uatomic_read((uint8_t *) addr);
226 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr,
227 old, val));
f469d839
PB
228
229 return old;
230 }
231#endif
232#ifdef UATOMIC_HAS_ATOMIC_SHORT
233 case 2:
234 {
2917c006 235 uint16_t old;
f469d839
PB
236
237 do {
2917c006
KR
238 old = uatomic_read((uint16_t *) addr);
239 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr,
240 old, val));
f469d839
PB
241
242 return old;
243 }
244#endif
8760d94e
PB
245 case 4:
246 {
2917c006 247 uint32_t old;
8760d94e
PB
248
249 do {
2917c006
KR
250 old = uatomic_read((uint32_t *) addr);
251 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr,
252 old, val));
8760d94e 253
2f2908d0 254 return old;
8760d94e 255 }
b39e1761 256#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
257 case 8:
258 {
2917c006 259 uint64_t old;
8760d94e
PB
260
261 do {
2917c006
KR
262 old = uatomic_read((uint64_t *) addr);
263 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr,
264 old, val));
8760d94e
PB
265
266 return old;
267 }
268#endif
269 }
270 _uatomic_link_error();
271 return 0;
272}
273
274#define uatomic_xchg(addr, v) \
e56d99bf
MD
275 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
276 caa_cast_long_keep_sign(v), \
8760d94e
PB
277 sizeof(*(addr))))
278#endif /* #ifndef uatomic_xchg */
279
280#else /* #ifndef uatomic_cmpxchg */
281
bf33aaea
PB
282#ifndef uatomic_and
283/* uatomic_and */
284
285static inline __attribute__((always_inline))
286void _uatomic_and(void *addr, unsigned long val, int len)
287{
288 switch (len) {
289#ifdef UATOMIC_HAS_ATOMIC_BYTE
290 case 1:
291 {
2917c006 292 uint8_t old, oldt;
bf33aaea 293
2917c006 294 oldt = uatomic_read((uint8_t *) addr);
bf33aaea
PB
295 do {
296 old = oldt;
297 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
298 } while (oldt != old);
e6e5957d
MD
299
300 return;
bf33aaea
PB
301 }
302#endif
303#ifdef UATOMIC_HAS_ATOMIC_SHORT
304 case 2:
305 {
2917c006 306 uint16_t old, oldt;
bf33aaea 307
2917c006 308 oldt = uatomic_read((uint16_t *) addr);
bf33aaea
PB
309 do {
310 old = oldt;
311 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
312 } while (oldt != old);
313 }
314#endif
315 case 4:
316 {
2917c006 317 uint32_t old, oldt;
bf33aaea 318
2917c006 319 oldt = uatomic_read((uint32_t *) addr);
bf33aaea
PB
320 do {
321 old = oldt;
322 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
323 } while (oldt != old);
e6e5957d
MD
324
325 return;
bf33aaea
PB
326 }
327#if (CAA_BITS_PER_LONG == 64)
328 case 8:
329 {
2917c006 330 uint64_t old, oldt;
bf33aaea 331
2917c006 332 oldt = uatomic_read((uint64_t *) addr);
bf33aaea
PB
333 do {
334 old = oldt;
335 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
336 } while (oldt != old);
e6e5957d
MD
337
338 return;
bf33aaea
PB
339 }
340#endif
341 }
342 _uatomic_link_error();
bf33aaea
PB
343}
344
e56d99bf
MD
345#define uatomic_and(addr, v) \
346 (_uatomic_and((addr), \
347 caa_cast_long_keep_sign(v), \
348 sizeof(*(addr))))
42e83919
MD
349#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
350#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 351
bf33aaea
PB
352#endif /* #ifndef uatomic_and */
353
985b35b1
PB
354#ifndef uatomic_or
355/* uatomic_or */
356
357static inline __attribute__((always_inline))
358void _uatomic_or(void *addr, unsigned long val, int len)
359{
360 switch (len) {
361#ifdef UATOMIC_HAS_ATOMIC_BYTE
362 case 1:
363 {
2917c006 364 uint8_t old, oldt;
985b35b1 365
2917c006 366 oldt = uatomic_read((uint8_t *) addr);
985b35b1
PB
367 do {
368 old = oldt;
369 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
370 } while (oldt != old);
e6e5957d
MD
371
372 return;
985b35b1
PB
373 }
374#endif
375#ifdef UATOMIC_HAS_ATOMIC_SHORT
376 case 2:
377 {
2917c006 378 uint16_t old, oldt;
985b35b1 379
2917c006 380 oldt = uatomic_read((uint16_t *) addr);
985b35b1
PB
381 do {
382 old = oldt;
383 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
384 } while (oldt != old);
e6e5957d
MD
385
386 return;
985b35b1
PB
387 }
388#endif
389 case 4:
390 {
2917c006 391 uint32_t old, oldt;
985b35b1 392
2917c006 393 oldt = uatomic_read((uint32_t *) addr);
985b35b1
PB
394 do {
395 old = oldt;
396 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
397 } while (oldt != old);
e6e5957d
MD
398
399 return;
985b35b1
PB
400 }
401#if (CAA_BITS_PER_LONG == 64)
402 case 8:
403 {
2917c006 404 uint64_t old, oldt;
985b35b1 405
2917c006 406 oldt = uatomic_read((uint64_t *) addr);
985b35b1
PB
407 do {
408 old = oldt;
409 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
410 } while (oldt != old);
e6e5957d
MD
411
412 return;
985b35b1
PB
413 }
414#endif
415 }
416 _uatomic_link_error();
985b35b1
PB
417}
418
e56d99bf
MD
419#define uatomic_or(addr, v) \
420 (_uatomic_or((addr), \
421 caa_cast_long_keep_sign(v), \
422 sizeof(*(addr))))
42e83919
MD
423#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
424#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 425
985b35b1
PB
426#endif /* #ifndef uatomic_or */
427
8760d94e
PB
428#ifndef uatomic_add_return
429/* uatomic_add_return */
430
431static inline __attribute__((always_inline))
432unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
433{
434 switch (len) {
f469d839
PB
435#ifdef UATOMIC_HAS_ATOMIC_BYTE
436 case 1:
437 {
2917c006 438 uint8_t old, oldt;
f469d839 439
2917c006 440 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
441 do {
442 old = oldt;
2917c006 443 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 444 old, old + val);
f469d839
PB
445 } while (oldt != old);
446
447 return old + val;
448 }
449#endif
450#ifdef UATOMIC_HAS_ATOMIC_SHORT
451 case 2:
452 {
2917c006 453 uint16_t old, oldt;
f469d839 454
2917c006 455 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
456 do {
457 old = oldt;
2917c006 458 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 459 old, old + val);
f469d839
PB
460 } while (oldt != old);
461
462 return old + val;
463 }
464#endif
8760d94e
PB
465 case 4:
466 {
2917c006 467 uint32_t old, oldt;
8760d94e 468
2917c006 469 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
470 do {
471 old = oldt;
2917c006 472 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 473 old, old + val);
8760d94e
PB
474 } while (oldt != old);
475
476 return old + val;
477 }
b39e1761 478#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
479 case 8:
480 {
2917c006 481 uint64_t old, oldt;
8760d94e 482
2917c006 483 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
484 do {
485 old = oldt;
2917c006 486 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 487 old, old + val);
8760d94e
PB
488 } while (oldt != old);
489
490 return old + val;
491 }
492#endif
493 }
494 _uatomic_link_error();
495 return 0;
496}
497
e56d99bf
MD
498#define uatomic_add_return(addr, v) \
499 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
500 caa_cast_long_keep_sign(v), \
501 sizeof(*(addr))))
8760d94e
PB
502#endif /* #ifndef uatomic_add_return */
503
504#ifndef uatomic_xchg
505/* xchg */
506
507static inline __attribute__((always_inline))
508unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
509{
510 switch (len) {
f469d839
PB
511#ifdef UATOMIC_HAS_ATOMIC_BYTE
512 case 1:
513 {
2917c006 514 uint8_t old, oldt;
f469d839 515
2917c006 516 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
517 do {
518 old = oldt;
2917c006 519 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 520 old, val);
f469d839
PB
521 } while (oldt != old);
522
523 return old;
524 }
525#endif
526#ifdef UATOMIC_HAS_ATOMIC_SHORT
527 case 2:
528 {
2917c006 529 uint16_t old, oldt;
f469d839 530
2917c006 531 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
532 do {
533 old = oldt;
2917c006 534 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 535 old, val);
f469d839
PB
536 } while (oldt != old);
537
538 return old;
539 }
540#endif
8760d94e
PB
541 case 4:
542 {
2917c006 543 uint32_t old, oldt;
8760d94e 544
2917c006 545 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
546 do {
547 old = oldt;
2917c006 548 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 549 old, val);
8760d94e
PB
550 } while (oldt != old);
551
552 return old;
553 }
b39e1761 554#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
555 case 8:
556 {
2917c006 557 uint64_t old, oldt;
8760d94e 558
2917c006 559 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
560 do {
561 old = oldt;
2917c006 562 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 563 old, val);
8760d94e
PB
564 } while (oldt != old);
565
566 return old;
567 }
568#endif
569 }
570 _uatomic_link_error();
571 return 0;
572}
573
574#define uatomic_xchg(addr, v) \
e56d99bf
MD
575 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
576 caa_cast_long_keep_sign(v), \
8760d94e
PB
577 sizeof(*(addr))))
578#endif /* #ifndef uatomic_xchg */
579
580#endif /* #else #ifndef uatomic_cmpxchg */
581
582/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
583
584#ifndef uatomic_add
585#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
42e83919
MD
586#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
587#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
8760d94e
PB
588#endif
589
e56d99bf
MD
590#define uatomic_sub_return(addr, v) \
591 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
592#define uatomic_sub(addr, v) \
593 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
42e83919
MD
594#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
595#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
596
597#ifndef uatomic_inc
598#define uatomic_inc(addr) uatomic_add((addr), 1)
42e83919
MD
599#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
600#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
601#endif
602
603#ifndef uatomic_dec
604#define uatomic_dec(addr) uatomic_add((addr), -1)
42e83919
MD
605#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
606#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
607#endif
608
609#ifdef __cplusplus
610}
611#endif
612
613#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.062483 seconds and 4 git commands to generate.