uatomic/x86: Remove redundant memory barriers
[urcu.git] / include / urcu / uatomic / generic.h
CommitLineData
d3d3857f
MJ
1// SPDX-FileCopyrightText: 1991-1994 by Xerox Corporation. All rights reserved.
2// SPDX-FileCopyrightText: 1996-1999 by Silicon Graphics. All rights reserved.
3// SPDX-FileCopyrightText: 1999-2004 Hewlett-Packard Development Company, L.P.
4// SPDX-FileCopyrightText: 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5// SPDX-FileCopyrightText: 2010 Paolo Bonzini
6//
7// SPDX-License-Identifier: LicenseRef-Boehm-GC
8
8760d94e
PB
9#ifndef _URCU_UATOMIC_GENERIC_H
10#define _URCU_UATOMIC_GENERIC_H
11
12/*
8760d94e
PB
13 * Code inspired from libuatomic_ops-1.2, inherited in part from the
14 * Boehm-Demers-Weiser conservative garbage collector.
15 */
16
2917c006 17#include <stdint.h>
8760d94e
PB
18#include <urcu/compiler.h>
19#include <urcu/system.h>
20
21#ifdef __cplusplus
22extern "C" {
23#endif
24
8760d94e 25#ifndef uatomic_set
3daae22a 26#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
8760d94e
PB
27#endif
28
ae5712d1
OD
29/*
30 * Can be defined for the architecture.
31 *
32 * What needs to be emitted _before_ the `operation' with memory ordering `mo'.
33 */
34#ifndef _cmm_compat_c11_smp_mb__before_mo
35# define _cmm_compat_c11_smp_mb__before_mo(operation, mo) cmm_smp_mb()
36#endif
37
38/*
39 * Can be defined for the architecture.
40 *
41 * What needs to be emitted _after_ the `operation' with memory ordering `mo'.
42 */
43#ifndef _cmm_compat_c11_smp_mb__after_mo
44# define _cmm_compat_c11_smp_mb__after_mo(operation, mo) cmm_smp_mb()
45#endif
46
47#define uatomic_load_store_return_op(op, addr, v, mo) \
48 __extension__ \
49 ({ \
50 _cmm_compat_c11_smp_mb__before_mo(op, mo); \
51 __typeof__((*addr)) _value = op(addr, v); \
52 _cmm_compat_c11_smp_mb__after_mo(op, mo); \
53 \
54 _value; \
d1854484
OD
55 })
56
ae5712d1
OD
57#define uatomic_load_store_op(op, addr, v, mo) \
58 do { \
59 _cmm_compat_c11_smp_mb__before_mo(op, mo); \
60 op(addr, v); \
61 _cmm_compat_c11_smp_mb__after_mo(op, mo); \
d1854484
OD
62 } while (0)
63
ae5712d1
OD
64#define uatomic_store(addr, v, mo) \
65 do { \
66 _cmm_compat_c11_smp_mb__before_mo(uatomic_set, mo); \
67 uatomic_set(addr, v); \
68 _cmm_compat_c11_smp_mb__after_mo(uatomic_set, mo); \
d1854484
OD
69 } while (0)
70
ae5712d1 71#define uatomic_and_mo(addr, v, mo) \
d1854484
OD
72 uatomic_load_store_op(uatomic_and, addr, v, mo)
73
ae5712d1 74#define uatomic_or_mo(addr, v, mo) \
d1854484
OD
75 uatomic_load_store_op(uatomic_or, addr, v, mo)
76
ae5712d1 77#define uatomic_add_mo(addr, v, mo) \
d1854484
OD
78 uatomic_load_store_op(uatomic_add, addr, v, mo)
79
ae5712d1 80#define uatomic_sub_mo(addr, v, mo) \
d1854484
OD
81 uatomic_load_store_op(uatomic_sub, addr, v, mo)
82
ae5712d1 83#define uatomic_inc_mo(addr, mo) \
d1854484
OD
84 uatomic_load_store_op(uatomic_add, addr, 1, mo)
85
86#define uatomic_dec_mo(addr, mo) \
87 uatomic_load_store_op(uatomic_add, addr, -1, mo)
88/*
89 * NOTE: We can not just do switch (_value == (old) ? mos : mof) otherwise the
90 * compiler emit a -Wduplicated-cond warning.
91 */
92#define uatomic_cmpxchg_mo(addr, old, new, mos, mof) \
93 __extension__ \
94 ({ \
ae5712d1 95 _cmm_compat_c11_smp_mb__before_mo(uatomic_cmpxchg, mos); \
d1854484
OD
96 __typeof__(*(addr)) _value = uatomic_cmpxchg(addr, old, \
97 new); \
98 \
99 if (_value == (old)) { \
ae5712d1 100 _cmm_compat_c11_smp_mb__after_mo(uatomic_cmpxchg, mos); \
d1854484 101 } else { \
ae5712d1 102 _cmm_compat_c11_smp_mb__after_mo(uatomic_cmpxchg, mof); \
d1854484
OD
103 } \
104 _value; \
105 })
106
107#define uatomic_xchg_mo(addr, v, mo) \
108 uatomic_load_store_return_op(uatomic_xchg, addr, v, mo)
109
110#define uatomic_add_return_mo(addr, v, mo) \
111 uatomic_load_store_return_op(uatomic_add_return, addr, v)
112
113#define uatomic_sub_return_mo(addr, v, mo) \
114 uatomic_load_store_return_op(uatomic_sub_return, addr, v)
115
8760d94e 116#ifndef uatomic_read
6cf3827c 117#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
118#endif
119
d1854484
OD
120#define uatomic_load(addr, mo) \
121 __extension__ \
122 ({ \
ae5712d1 123 _cmm_compat_c11_smp_mb__before_mo(uatomic_read, mo); \
d1854484 124 __typeof__(*(addr)) _rcu_value = uatomic_read(addr); \
ae5712d1 125 _cmm_compat_c11_smp_mb__after_mo(uatomic_read, mo); \
d1854484
OD
126 \
127 _rcu_value; \
128 })
129
8760d94e 130#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
106ed137
MJ
131#ifdef ILLEGAL_INSTR
132static inline __attribute__((always_inline))
5c28497b 133void _uatomic_link_error(void)
8760d94e 134{
d0bbd9c2
MD
135 /*
136 * generate an illegal instruction. Cannot catch this with
137 * linker tricks when optimizations are disabled.
138 */
8760d94e 139 __asm__ __volatile__(ILLEGAL_INSTR);
106ed137 140}
8760d94e 141#else
106ed137
MJ
142static inline __attribute__((always_inline, __noreturn__))
143void _uatomic_link_error(void)
144{
5c28497b 145 __builtin_trap();
8760d94e 146}
106ed137 147#endif
8760d94e
PB
148
149#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
5c28497b 150extern void _uatomic_link_error(void);
8760d94e
PB
151#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
152
153/* cmpxchg */
154
155#ifndef uatomic_cmpxchg
156static inline __attribute__((always_inline))
157unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
158 unsigned long _new, int len)
159{
160 switch (len) {
f469d839
PB
161#ifdef UATOMIC_HAS_ATOMIC_BYTE
162 case 1:
2917c006
KR
163 return __sync_val_compare_and_swap_1((uint8_t *) addr, old,
164 _new);
f469d839
PB
165#endif
166#ifdef UATOMIC_HAS_ATOMIC_SHORT
167 case 2:
2917c006
KR
168 return __sync_val_compare_and_swap_2((uint16_t *) addr, old,
169 _new);
f469d839 170#endif
8760d94e 171 case 4:
2917c006
KR
172 return __sync_val_compare_and_swap_4((uint32_t *) addr, old,
173 _new);
b39e1761 174#if (CAA_BITS_PER_LONG == 64)
8760d94e 175 case 8:
2917c006
KR
176 return __sync_val_compare_and_swap_8((uint64_t *) addr, old,
177 _new);
8760d94e
PB
178#endif
179 }
180 _uatomic_link_error();
181 return 0;
182}
183
184
e56d99bf
MD
185#define uatomic_cmpxchg(addr, old, _new) \
186 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
187 caa_cast_long_keep_sign(old), \
188 caa_cast_long_keep_sign(_new),\
8760d94e
PB
189 sizeof(*(addr))))
190
191
bf33aaea
PB
192/* uatomic_and */
193
194#ifndef uatomic_and
195static inline __attribute__((always_inline))
196void _uatomic_and(void *addr, unsigned long val,
197 int len)
198{
199 switch (len) {
200#ifdef UATOMIC_HAS_ATOMIC_BYTE
201 case 1:
2917c006 202 __sync_and_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 203 return;
bf33aaea
PB
204#endif
205#ifdef UATOMIC_HAS_ATOMIC_SHORT
206 case 2:
2917c006 207 __sync_and_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 208 return;
bf33aaea
PB
209#endif
210 case 4:
2917c006 211 __sync_and_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 212 return;
bf33aaea
PB
213#if (CAA_BITS_PER_LONG == 64)
214 case 8:
2917c006 215 __sync_and_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 216 return;
bf33aaea
PB
217#endif
218 }
219 _uatomic_link_error();
bf33aaea
PB
220}
221
222#define uatomic_and(addr, v) \
223 (_uatomic_and((addr), \
e56d99bf
MD
224 caa_cast_long_keep_sign(v), \
225 sizeof(*(addr))))
42e83919
MD
226#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
227#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 228
bf33aaea
PB
229#endif
230
985b35b1
PB
231/* uatomic_or */
232
233#ifndef uatomic_or
234static inline __attribute__((always_inline))
235void _uatomic_or(void *addr, unsigned long val,
236 int len)
237{
238 switch (len) {
239#ifdef UATOMIC_HAS_ATOMIC_BYTE
240 case 1:
2917c006 241 __sync_or_and_fetch_1((uint8_t *) addr, val);
c51d5c6e 242 return;
985b35b1
PB
243#endif
244#ifdef UATOMIC_HAS_ATOMIC_SHORT
245 case 2:
2917c006 246 __sync_or_and_fetch_2((uint16_t *) addr, val);
c51d5c6e 247 return;
985b35b1
PB
248#endif
249 case 4:
2917c006 250 __sync_or_and_fetch_4((uint32_t *) addr, val);
c51d5c6e 251 return;
985b35b1
PB
252#if (CAA_BITS_PER_LONG == 64)
253 case 8:
2917c006 254 __sync_or_and_fetch_8((uint64_t *) addr, val);
c51d5c6e 255 return;
985b35b1
PB
256#endif
257 }
258 _uatomic_link_error();
c51d5c6e 259 return;
985b35b1
PB
260}
261
262#define uatomic_or(addr, v) \
263 (_uatomic_or((addr), \
e56d99bf
MD
264 caa_cast_long_keep_sign(v), \
265 sizeof(*(addr))))
42e83919
MD
266#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
267#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 268
985b35b1
PB
269#endif
270
2812a2d2 271
8760d94e
PB
272/* uatomic_add_return */
273
274#ifndef uatomic_add_return
275static inline __attribute__((always_inline))
276unsigned long _uatomic_add_return(void *addr, unsigned long val,
277 int len)
278{
279 switch (len) {
f469d839
PB
280#ifdef UATOMIC_HAS_ATOMIC_BYTE
281 case 1:
2917c006 282 return __sync_add_and_fetch_1((uint8_t *) addr, val);
f469d839
PB
283#endif
284#ifdef UATOMIC_HAS_ATOMIC_SHORT
285 case 2:
2917c006 286 return __sync_add_and_fetch_2((uint16_t *) addr, val);
f469d839 287#endif
8760d94e 288 case 4:
2917c006 289 return __sync_add_and_fetch_4((uint32_t *) addr, val);
b39e1761 290#if (CAA_BITS_PER_LONG == 64)
8760d94e 291 case 8:
2917c006 292 return __sync_add_and_fetch_8((uint64_t *) addr, val);
8760d94e
PB
293#endif
294 }
295 _uatomic_link_error();
296 return 0;
297}
298
299
e56d99bf
MD
300#define uatomic_add_return(addr, v) \
301 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
302 caa_cast_long_keep_sign(v), \
303 sizeof(*(addr))))
8760d94e
PB
304#endif /* #ifndef uatomic_add_return */
305
306#ifndef uatomic_xchg
307/* xchg */
308
309static inline __attribute__((always_inline))
310unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
311{
312 switch (len) {
f469d839
PB
313#ifdef UATOMIC_HAS_ATOMIC_BYTE
314 case 1:
315 {
2917c006 316 uint8_t old;
f469d839
PB
317
318 do {
2917c006
KR
319 old = uatomic_read((uint8_t *) addr);
320 } while (!__sync_bool_compare_and_swap_1((uint8_t *) addr,
321 old, val));
f469d839
PB
322
323 return old;
324 }
325#endif
326#ifdef UATOMIC_HAS_ATOMIC_SHORT
327 case 2:
328 {
2917c006 329 uint16_t old;
f469d839
PB
330
331 do {
2917c006
KR
332 old = uatomic_read((uint16_t *) addr);
333 } while (!__sync_bool_compare_and_swap_2((uint16_t *) addr,
334 old, val));
f469d839
PB
335
336 return old;
337 }
338#endif
8760d94e
PB
339 case 4:
340 {
2917c006 341 uint32_t old;
8760d94e
PB
342
343 do {
2917c006
KR
344 old = uatomic_read((uint32_t *) addr);
345 } while (!__sync_bool_compare_and_swap_4((uint32_t *) addr,
346 old, val));
8760d94e 347
2f2908d0 348 return old;
8760d94e 349 }
b39e1761 350#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
351 case 8:
352 {
2917c006 353 uint64_t old;
8760d94e
PB
354
355 do {
2917c006
KR
356 old = uatomic_read((uint64_t *) addr);
357 } while (!__sync_bool_compare_and_swap_8((uint64_t *) addr,
358 old, val));
8760d94e
PB
359
360 return old;
361 }
362#endif
363 }
364 _uatomic_link_error();
365 return 0;
366}
367
368#define uatomic_xchg(addr, v) \
e56d99bf
MD
369 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
370 caa_cast_long_keep_sign(v), \
8760d94e
PB
371 sizeof(*(addr))))
372#endif /* #ifndef uatomic_xchg */
373
374#else /* #ifndef uatomic_cmpxchg */
375
bf33aaea
PB
376#ifndef uatomic_and
377/* uatomic_and */
378
379static inline __attribute__((always_inline))
380void _uatomic_and(void *addr, unsigned long val, int len)
381{
382 switch (len) {
383#ifdef UATOMIC_HAS_ATOMIC_BYTE
384 case 1:
385 {
2917c006 386 uint8_t old, oldt;
bf33aaea 387
2917c006 388 oldt = uatomic_read((uint8_t *) addr);
bf33aaea
PB
389 do {
390 old = oldt;
391 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
392 } while (oldt != old);
e6e5957d
MD
393
394 return;
bf33aaea
PB
395 }
396#endif
397#ifdef UATOMIC_HAS_ATOMIC_SHORT
398 case 2:
399 {
2917c006 400 uint16_t old, oldt;
bf33aaea 401
2917c006 402 oldt = uatomic_read((uint16_t *) addr);
bf33aaea
PB
403 do {
404 old = oldt;
405 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
406 } while (oldt != old);
407 }
408#endif
409 case 4:
410 {
2917c006 411 uint32_t old, oldt;
bf33aaea 412
2917c006 413 oldt = uatomic_read((uint32_t *) addr);
bf33aaea
PB
414 do {
415 old = oldt;
416 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
417 } while (oldt != old);
e6e5957d
MD
418
419 return;
bf33aaea
PB
420 }
421#if (CAA_BITS_PER_LONG == 64)
422 case 8:
423 {
2917c006 424 uint64_t old, oldt;
bf33aaea 425
2917c006 426 oldt = uatomic_read((uint64_t *) addr);
bf33aaea
PB
427 do {
428 old = oldt;
429 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
430 } while (oldt != old);
e6e5957d
MD
431
432 return;
bf33aaea
PB
433 }
434#endif
435 }
436 _uatomic_link_error();
bf33aaea
PB
437}
438
e56d99bf
MD
439#define uatomic_and(addr, v) \
440 (_uatomic_and((addr), \
441 caa_cast_long_keep_sign(v), \
442 sizeof(*(addr))))
42e83919
MD
443#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
444#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
2812a2d2 445
bf33aaea
PB
446#endif /* #ifndef uatomic_and */
447
985b35b1
PB
448#ifndef uatomic_or
449/* uatomic_or */
450
451static inline __attribute__((always_inline))
452void _uatomic_or(void *addr, unsigned long val, int len)
453{
454 switch (len) {
455#ifdef UATOMIC_HAS_ATOMIC_BYTE
456 case 1:
457 {
2917c006 458 uint8_t old, oldt;
985b35b1 459
2917c006 460 oldt = uatomic_read((uint8_t *) addr);
985b35b1
PB
461 do {
462 old = oldt;
463 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
464 } while (oldt != old);
e6e5957d
MD
465
466 return;
985b35b1
PB
467 }
468#endif
469#ifdef UATOMIC_HAS_ATOMIC_SHORT
470 case 2:
471 {
2917c006 472 uint16_t old, oldt;
985b35b1 473
2917c006 474 oldt = uatomic_read((uint16_t *) addr);
985b35b1
PB
475 do {
476 old = oldt;
477 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
478 } while (oldt != old);
e6e5957d
MD
479
480 return;
985b35b1
PB
481 }
482#endif
483 case 4:
484 {
2917c006 485 uint32_t old, oldt;
985b35b1 486
2917c006 487 oldt = uatomic_read((uint32_t *) addr);
985b35b1
PB
488 do {
489 old = oldt;
490 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
491 } while (oldt != old);
e6e5957d
MD
492
493 return;
985b35b1
PB
494 }
495#if (CAA_BITS_PER_LONG == 64)
496 case 8:
497 {
2917c006 498 uint64_t old, oldt;
985b35b1 499
2917c006 500 oldt = uatomic_read((uint64_t *) addr);
985b35b1
PB
501 do {
502 old = oldt;
503 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
504 } while (oldt != old);
e6e5957d
MD
505
506 return;
985b35b1
PB
507 }
508#endif
509 }
510 _uatomic_link_error();
985b35b1
PB
511}
512
e56d99bf
MD
513#define uatomic_or(addr, v) \
514 (_uatomic_or((addr), \
515 caa_cast_long_keep_sign(v), \
516 sizeof(*(addr))))
42e83919
MD
517#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
518#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
2812a2d2 519
985b35b1
PB
520#endif /* #ifndef uatomic_or */
521
8760d94e
PB
522#ifndef uatomic_add_return
523/* uatomic_add_return */
524
525static inline __attribute__((always_inline))
526unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
527{
528 switch (len) {
f469d839
PB
529#ifdef UATOMIC_HAS_ATOMIC_BYTE
530 case 1:
531 {
2917c006 532 uint8_t old, oldt;
f469d839 533
2917c006 534 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
535 do {
536 old = oldt;
2917c006 537 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 538 old, old + val);
f469d839
PB
539 } while (oldt != old);
540
541 return old + val;
542 }
543#endif
544#ifdef UATOMIC_HAS_ATOMIC_SHORT
545 case 2:
546 {
2917c006 547 uint16_t old, oldt;
f469d839 548
2917c006 549 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
550 do {
551 old = oldt;
2917c006 552 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 553 old, old + val);
f469d839
PB
554 } while (oldt != old);
555
556 return old + val;
557 }
558#endif
8760d94e
PB
559 case 4:
560 {
2917c006 561 uint32_t old, oldt;
8760d94e 562
2917c006 563 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
564 do {
565 old = oldt;
2917c006 566 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 567 old, old + val);
8760d94e
PB
568 } while (oldt != old);
569
570 return old + val;
571 }
b39e1761 572#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
573 case 8:
574 {
2917c006 575 uint64_t old, oldt;
8760d94e 576
2917c006 577 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
578 do {
579 old = oldt;
2917c006 580 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 581 old, old + val);
8760d94e
PB
582 } while (oldt != old);
583
584 return old + val;
585 }
586#endif
587 }
588 _uatomic_link_error();
589 return 0;
590}
591
e56d99bf
MD
592#define uatomic_add_return(addr, v) \
593 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
594 caa_cast_long_keep_sign(v), \
595 sizeof(*(addr))))
8760d94e
PB
596#endif /* #ifndef uatomic_add_return */
597
598#ifndef uatomic_xchg
599/* xchg */
600
601static inline __attribute__((always_inline))
602unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
603{
604 switch (len) {
f469d839
PB
605#ifdef UATOMIC_HAS_ATOMIC_BYTE
606 case 1:
607 {
2917c006 608 uint8_t old, oldt;
f469d839 609
2917c006 610 oldt = uatomic_read((uint8_t *) addr);
f469d839
PB
611 do {
612 old = oldt;
2917c006 613 oldt = uatomic_cmpxchg((uint8_t *) addr,
b4e6d540 614 old, val);
f469d839
PB
615 } while (oldt != old);
616
617 return old;
618 }
619#endif
620#ifdef UATOMIC_HAS_ATOMIC_SHORT
621 case 2:
622 {
2917c006 623 uint16_t old, oldt;
f469d839 624
2917c006 625 oldt = uatomic_read((uint16_t *) addr);
f469d839
PB
626 do {
627 old = oldt;
2917c006 628 oldt = uatomic_cmpxchg((uint16_t *) addr,
b4e6d540 629 old, val);
f469d839
PB
630 } while (oldt != old);
631
632 return old;
633 }
634#endif
8760d94e
PB
635 case 4:
636 {
2917c006 637 uint32_t old, oldt;
8760d94e 638
2917c006 639 oldt = uatomic_read((uint32_t *) addr);
8760d94e
PB
640 do {
641 old = oldt;
2917c006 642 oldt = uatomic_cmpxchg((uint32_t *) addr,
b4e6d540 643 old, val);
8760d94e
PB
644 } while (oldt != old);
645
646 return old;
647 }
b39e1761 648#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
649 case 8:
650 {
2917c006 651 uint64_t old, oldt;
8760d94e 652
2917c006 653 oldt = uatomic_read((uint64_t *) addr);
8760d94e
PB
654 do {
655 old = oldt;
2917c006 656 oldt = uatomic_cmpxchg((uint64_t *) addr,
b4e6d540 657 old, val);
8760d94e
PB
658 } while (oldt != old);
659
660 return old;
661 }
662#endif
663 }
664 _uatomic_link_error();
665 return 0;
666}
667
668#define uatomic_xchg(addr, v) \
e56d99bf
MD
669 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
670 caa_cast_long_keep_sign(v), \
8760d94e
PB
671 sizeof(*(addr))))
672#endif /* #ifndef uatomic_xchg */
673
674#endif /* #else #ifndef uatomic_cmpxchg */
675
676/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
677
678#ifndef uatomic_add
679#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
42e83919
MD
680#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
681#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
8760d94e
PB
682#endif
683
e56d99bf
MD
684#define uatomic_sub_return(addr, v) \
685 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
686#define uatomic_sub(addr, v) \
687 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
42e83919
MD
688#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
689#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
690
691#ifndef uatomic_inc
692#define uatomic_inc(addr) uatomic_add((addr), 1)
42e83919
MD
693#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
694#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
695#endif
696
697#ifndef uatomic_dec
698#define uatomic_dec(addr) uatomic_add((addr), -1)
42e83919
MD
699#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
700#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
8760d94e
PB
701#endif
702
703#ifdef __cplusplus
704}
705#endif
706
707#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.080668 seconds and 4 git commands to generate.