Commit | Line | Data |
---|---|---|
1c64a40d MJ |
1 | // SPDX-FileCopyrightText: 2023 Olivier Dion <odion@efficios.com> |
2 | // | |
3 | // SPDX-License-Identifier: LGPL-2.1-or-later | |
4 | ||
d1854484 OD |
5 | /* |
6 | * urcu/uatomic/builtins-generic.h | |
d1854484 OD |
7 | */ |
8 | ||
9 | #ifndef _URCU_UATOMIC_BUILTINS_GENERIC_H | |
10 | #define _URCU_UATOMIC_BUILTINS_GENERIC_H | |
11 | ||
12 | #include <urcu/compiler.h> | |
13 | #include <urcu/system.h> | |
14 | ||
15 | #define uatomic_store(addr, v, mo) \ | |
16 | do { \ | |
17 | __atomic_store_n(cmm_cast_volatile(addr), v, \ | |
18 | cmm_to_c11(mo)); \ | |
19 | cmm_seq_cst_fence_after_atomic(mo); \ | |
20 | } while (0) | |
21 | ||
22 | #define uatomic_set(addr, v) \ | |
23 | do { \ | |
24 | uatomic_store(addr, v, CMM_RELAXED); \ | |
25 | } while (0) | |
26 | ||
27 | #define uatomic_load(addr, mo) \ | |
28 | __extension__ \ | |
29 | ({ \ | |
30 | __typeof__(*(addr)) _value = \ | |
31 | __atomic_load_n(cmm_cast_volatile(addr), \ | |
32 | cmm_to_c11(mo)); \ | |
33 | cmm_seq_cst_fence_after_atomic(mo); \ | |
34 | \ | |
35 | _value; \ | |
36 | }) | |
37 | ||
38 | #define uatomic_read(addr) \ | |
39 | uatomic_load(addr, CMM_RELAXED) | |
40 | ||
41 | #define uatomic_cmpxchg_mo(addr, old, new, mos, mof) \ | |
42 | __extension__ \ | |
43 | ({ \ | |
44 | __typeof__(*(addr)) _old = (__typeof__(*(addr)))old; \ | |
45 | \ | |
46 | if (__atomic_compare_exchange_n(cmm_cast_volatile(addr), \ | |
47 | &_old, new, 0, \ | |
48 | cmm_to_c11(mos), \ | |
49 | cmm_to_c11(mof))) { \ | |
50 | cmm_seq_cst_fence_after_atomic(mos); \ | |
51 | } else { \ | |
52 | cmm_seq_cst_fence_after_atomic(mof); \ | |
53 | } \ | |
54 | _old; \ | |
55 | }) | |
56 | ||
57 | #define uatomic_cmpxchg(addr, old, new) \ | |
58 | uatomic_cmpxchg_mo(addr, old, new, CMM_SEQ_CST_FENCE, CMM_RELAXED) | |
59 | ||
60 | #define uatomic_xchg_mo(addr, v, mo) \ | |
61 | __extension__ \ | |
62 | ({ \ | |
63 | __typeof__((*addr)) _old = \ | |
64 | __atomic_exchange_n(cmm_cast_volatile(addr), v, \ | |
65 | cmm_to_c11(mo)); \ | |
66 | cmm_seq_cst_fence_after_atomic(mo); \ | |
67 | _old; \ | |
68 | }) | |
69 | ||
70 | #define uatomic_xchg(addr, v) \ | |
71 | uatomic_xchg_mo(addr, v, CMM_SEQ_CST_FENCE) | |
72 | ||
73 | #define uatomic_add_return_mo(addr, v, mo) \ | |
74 | __extension__ \ | |
75 | ({ \ | |
76 | __typeof__(*(addr)) _old = \ | |
77 | __atomic_add_fetch(cmm_cast_volatile(addr), v, \ | |
78 | cmm_to_c11(mo)); \ | |
79 | cmm_seq_cst_fence_after_atomic(mo); \ | |
80 | _old; \ | |
81 | }) | |
82 | ||
83 | #define uatomic_add_return(addr, v) \ | |
84 | uatomic_add_return_mo(addr, v, CMM_SEQ_CST_FENCE) | |
85 | ||
86 | #define uatomic_sub_return_mo(addr, v, mo) \ | |
87 | __extension__ \ | |
88 | ({ \ | |
89 | __typeof__(*(addr)) _old = \ | |
90 | __atomic_sub_fetch(cmm_cast_volatile(addr), v, \ | |
91 | cmm_to_c11(mo)); \ | |
92 | cmm_seq_cst_fence_after_atomic(mo); \ | |
93 | _old; \ | |
94 | }) | |
95 | ||
96 | #define uatomic_sub_return(addr, v) \ | |
97 | uatomic_sub_return_mo(addr, v, CMM_SEQ_CST_FENCE) | |
98 | ||
99 | #define uatomic_and_mo(addr, mask, mo) \ | |
100 | do { \ | |
101 | (void) __atomic_and_fetch(cmm_cast_volatile(addr), mask, \ | |
102 | cmm_to_c11(mo)); \ | |
103 | cmm_seq_cst_fence_after_atomic(mo); \ | |
104 | } while (0) | |
105 | ||
106 | #define uatomic_and(addr, mask) \ | |
107 | uatomic_and_mo(addr, mask, CMM_SEQ_CST) | |
108 | ||
109 | #define uatomic_or_mo(addr, mask, mo) \ | |
110 | do { \ | |
111 | (void) __atomic_or_fetch(cmm_cast_volatile(addr), mask, \ | |
112 | cmm_to_c11(mo)); \ | |
113 | cmm_seq_cst_fence_after_atomic(mo); \ | |
114 | } while (0) | |
115 | ||
116 | ||
117 | #define uatomic_or(addr, mask) \ | |
118 | uatomic_or_mo(addr, mask, CMM_RELAXED) | |
119 | ||
120 | #define uatomic_add_mo(addr, v, mo) \ | |
121 | (void) uatomic_add_return_mo(addr, v, mo) | |
122 | ||
123 | #define uatomic_add(addr, v) \ | |
124 | uatomic_add_mo(addr, v, CMM_RELAXED) | |
125 | ||
126 | #define uatomic_sub_mo(addr, v, mo) \ | |
127 | (void) uatomic_sub_return_mo(addr, v, mo) | |
128 | ||
129 | #define uatomic_sub(addr, v) \ | |
130 | uatomic_sub_mo(addr, v, CMM_RELAXED) | |
131 | ||
132 | #define uatomic_inc_mo(addr, mo) \ | |
133 | uatomic_add_mo(addr, 1, mo) | |
134 | ||
135 | #define uatomic_inc(addr) \ | |
136 | uatomic_inc_mo(addr, CMM_RELAXED) | |
137 | ||
138 | #define uatomic_dec_mo(addr, mo) \ | |
139 | uatomic_sub_mo(addr, 1, mo) | |
140 | ||
141 | #define uatomic_dec(addr) \ | |
142 | uatomic_dec_mo(addr, CMM_RELAXED) | |
143 | ||
144 | #define cmm_smp_mb__before_uatomic_and() cmm_smp_mb() | |
145 | #define cmm_smp_mb__after_uatomic_and() cmm_smp_mb() | |
146 | ||
147 | #define cmm_smp_mb__before_uatomic_or() cmm_smp_mb() | |
148 | #define cmm_smp_mb__after_uatomic_or() cmm_smp_mb() | |
149 | ||
150 | #define cmm_smp_mb__before_uatomic_add() cmm_smp_mb() | |
151 | #define cmm_smp_mb__after_uatomic_add() cmm_smp_mb() | |
152 | ||
153 | #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb() | |
154 | #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb() | |
155 | ||
156 | #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb() | |
157 | #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb() | |
158 | ||
159 | #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb() | |
160 | #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb() | |
161 | ||
a84350f8 | 162 | #endif /* _URCU_UATOMIC_BUILTINS_GENERIC_H */ |