Add missing -lwfqueue to add rcu flavors
[urcu.git] / urcu / uatomic_generic.h
CommitLineData
8760d94e
PB
1#ifndef _URCU_UATOMIC_GENERIC_H
2#define _URCU_UATOMIC_GENERIC_H
3
4/*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24#include <urcu/compiler.h>
25#include <urcu/system.h>
26
27#ifdef __cplusplus
28extern "C" {
29#endif
30
8760d94e 31#ifndef uatomic_set
6cf3827c 32#define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
8760d94e
PB
33#endif
34
35#ifndef uatomic_read
6cf3827c 36#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
37#endif
38
39#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40static inline __attribute__((always_inline))
41void _uatomic_link_error()
42{
43#ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47#else
48 __builtin_trap ();
49#endif
50}
51
52#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53extern void _uatomic_link_error ();
54#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56/* cmpxchg */
57
58#ifndef uatomic_cmpxchg
59static inline __attribute__((always_inline))
60unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62{
63 switch (len) {
f469d839
PB
64#ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67#endif
68#ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71#endif
8760d94e
PB
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
b39e1761 74#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77#endif
78 }
79 _uatomic_link_error();
80 return 0;
81}
82
83
84#define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
87 sizeof(*(addr))))
88
89
90/* uatomic_add_return */
91
92#ifndef uatomic_add_return
93static inline __attribute__((always_inline))
94unsigned long _uatomic_add_return(void *addr, unsigned long val,
95 int len)
96{
97 switch (len) {
f469d839
PB
98#ifdef UATOMIC_HAS_ATOMIC_BYTE
99 case 1:
100 return __sync_add_and_fetch_1(addr, val);
101#endif
102#ifdef UATOMIC_HAS_ATOMIC_SHORT
103 case 2:
104 return __sync_add_and_fetch_2(addr, val);
105#endif
8760d94e
PB
106 case 4:
107 return __sync_add_and_fetch_4(addr, val);
b39e1761 108#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
109 case 8:
110 return __sync_add_and_fetch_8(addr, val);
111#endif
112 }
113 _uatomic_link_error();
114 return 0;
115}
116
117
118#define uatomic_add_return(addr, v) \
119 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
120 (unsigned long)(v), \
121 sizeof(*(addr))))
122#endif /* #ifndef uatomic_add_return */
123
124#ifndef uatomic_xchg
125/* xchg */
126
127static inline __attribute__((always_inline))
128unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
129{
130 switch (len) {
f469d839
PB
131#ifdef UATOMIC_HAS_ATOMIC_BYTE
132 case 1:
133 {
134 unsigned char old;
135
136 do {
137 old = uatomic_read((unsigned char *)addr);
138 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
139
140 return old;
141 }
142#endif
143#ifdef UATOMIC_HAS_ATOMIC_SHORT
144 case 2:
145 {
146 unsigned short old;
147
148 do {
149 old = uatomic_read((unsigned short *)addr);
150 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
151
152 return old;
153 }
154#endif
8760d94e
PB
155 case 4:
156 {
157 unsigned int old;
158
159 do {
160 old = uatomic_read((unsigned int *)addr);
2f2908d0 161 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
8760d94e 162
2f2908d0 163 return old;
8760d94e 164 }
b39e1761 165#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
166 case 8:
167 {
168 unsigned long old;
169
170 do {
171 old = uatomic_read((unsigned long *)addr);
172 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
173
174 return old;
175 }
176#endif
177 }
178 _uatomic_link_error();
179 return 0;
180}
181
182#define uatomic_xchg(addr, v) \
183 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
184 sizeof(*(addr))))
185#endif /* #ifndef uatomic_xchg */
186
187#else /* #ifndef uatomic_cmpxchg */
188
189#ifndef uatomic_add_return
190/* uatomic_add_return */
191
192static inline __attribute__((always_inline))
193unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
194{
195 switch (len) {
f469d839
PB
196#ifdef UATOMIC_HAS_ATOMIC_BYTE
197 case 1:
198 {
199 unsigned char old, oldt;
200
201 oldt = uatomic_read((unsigned char *)addr);
202 do {
203 old = oldt;
204 oldt = _uatomic_cmpxchg(addr, old, old + val, 1);
205 } while (oldt != old);
206
207 return old + val;
208 }
209#endif
210#ifdef UATOMIC_HAS_ATOMIC_SHORT
211 case 2:
212 {
213 unsigned short old, oldt;
214
215 oldt = uatomic_read((unsigned short *)addr);
216 do {
217 old = oldt;
218 oldt = _uatomic_cmpxchg(addr, old, old + val, 2);
219 } while (oldt != old);
220
221 return old + val;
222 }
223#endif
8760d94e
PB
224 case 4:
225 {
226 unsigned int old, oldt;
227
228 oldt = uatomic_read((unsigned int *)addr);
229 do {
230 old = oldt;
231 oldt = _uatomic_cmpxchg(addr, old, old + val, 4);
232 } while (oldt != old);
233
234 return old + val;
235 }
b39e1761 236#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
237 case 8:
238 {
239 unsigned long old, oldt;
240
241 oldt = uatomic_read((unsigned long *)addr);
242 do {
243 old = oldt;
244 oldt = _uatomic_cmpxchg(addr, old, old + val, 8);
245 } while (oldt != old);
246
247 return old + val;
248 }
249#endif
250 }
251 _uatomic_link_error();
252 return 0;
253}
254
255#define uatomic_add_return(addr, v) \
256 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
257 (unsigned long)(v), \
258 sizeof(*(addr))))
259#endif /* #ifndef uatomic_add_return */
260
261#ifndef uatomic_xchg
262/* xchg */
263
264static inline __attribute__((always_inline))
265unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
266{
267 switch (len) {
f469d839
PB
268#ifdef UATOMIC_HAS_ATOMIC_BYTE
269 case 1:
270 {
271 unsigned char old, oldt;
272
273 oldt = uatomic_read((unsigned char *)addr);
274 do {
275 old = oldt;
276 oldt = _uatomic_cmpxchg(addr, old, val, 1);
277 } while (oldt != old);
278
279 return old;
280 }
281#endif
282#ifdef UATOMIC_HAS_ATOMIC_SHORT
283 case 2:
284 {
285 unsigned short old, oldt;
286
287 oldt = uatomic_read((unsigned short *)addr);
288 do {
289 old = oldt;
290 oldt = _uatomic_cmpxchg(addr, old, val, 2);
291 } while (oldt != old);
292
293 return old;
294 }
295#endif
8760d94e
PB
296 case 4:
297 {
298 unsigned int old, oldt;
299
300 oldt = uatomic_read((unsigned int *)addr);
301 do {
302 old = oldt;
303 oldt = _uatomic_cmpxchg(addr, old, val, 4);
304 } while (oldt != old);
305
306 return old;
307 }
b39e1761 308#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
309 case 8:
310 {
311 unsigned long old, oldt;
312
313 oldt = uatomic_read((unsigned long *)addr);
314 do {
315 old = oldt;
316 oldt = _uatomic_cmpxchg(addr, old, val, 8);
317 } while (oldt != old);
318
319 return old;
320 }
321#endif
322 }
323 _uatomic_link_error();
324 return 0;
325}
326
327#define uatomic_xchg(addr, v) \
328 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
329 sizeof(*(addr))))
330#endif /* #ifndef uatomic_xchg */
331
332#endif /* #else #ifndef uatomic_cmpxchg */
333
334/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
335
336#ifndef uatomic_add
337#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
338#endif
339
340#define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
341#define uatomic_sub(addr, v) uatomic_add((addr), -(v))
342
343#ifndef uatomic_inc
344#define uatomic_inc(addr) uatomic_add((addr), 1)
345#endif
346
347#ifndef uatomic_dec
348#define uatomic_dec(addr) uatomic_add((addr), -1)
349#endif
350
351#ifdef __cplusplus
352}
353#endif
354
355#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.035142 seconds and 4 git commands to generate.