uatomic: add uatomic_or
[urcu.git] / urcu / uatomic_generic.h
CommitLineData
8760d94e
PB
1#ifndef _URCU_UATOMIC_GENERIC_H
2#define _URCU_UATOMIC_GENERIC_H
3
4/*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24#include <urcu/compiler.h>
25#include <urcu/system.h>
26
27#ifdef __cplusplus
28extern "C" {
29#endif
30
8760d94e 31#ifndef uatomic_set
6cf3827c 32#define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
8760d94e
PB
33#endif
34
35#ifndef uatomic_read
6cf3827c 36#define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
8760d94e
PB
37#endif
38
39#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40static inline __attribute__((always_inline))
41void _uatomic_link_error()
42{
43#ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47#else
48 __builtin_trap ();
49#endif
50}
51
52#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53extern void _uatomic_link_error ();
54#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56/* cmpxchg */
57
58#ifndef uatomic_cmpxchg
59static inline __attribute__((always_inline))
60unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62{
63 switch (len) {
f469d839
PB
64#ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67#endif
68#ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71#endif
8760d94e
PB
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
b39e1761 74#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77#endif
78 }
79 _uatomic_link_error();
80 return 0;
81}
82
83
84#define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
87 sizeof(*(addr))))
88
89
985b35b1
PB
90/* uatomic_or */
91
92#ifndef uatomic_or
93static inline __attribute__((always_inline))
94void _uatomic_or(void *addr, unsigned long val,
95 int len)
96{
97 switch (len) {
98#ifdef UATOMIC_HAS_ATOMIC_BYTE
99 case 1:
100 __sync_or_and_fetch_1(addr, val);
101#endif
102#ifdef UATOMIC_HAS_ATOMIC_SHORT
103 case 2:
104 __sync_or_and_fetch_2(addr, val);
105#endif
106 case 4:
107 __sync_or_and_fetch_4(addr, val);
108#if (CAA_BITS_PER_LONG == 64)
109 case 8:
110 __sync_or_and_fetch_8(addr, val);
111#endif
112 }
113 _uatomic_link_error();
114 return 0;
115}
116
117#define uatomic_or(addr, v) \
118 (_uatomic_or((addr), \
119 (unsigned long)(v), \
120 sizeof(*(addr))))
121#endif
122
8760d94e
PB
123/* uatomic_add_return */
124
125#ifndef uatomic_add_return
126static inline __attribute__((always_inline))
127unsigned long _uatomic_add_return(void *addr, unsigned long val,
128 int len)
129{
130 switch (len) {
f469d839
PB
131#ifdef UATOMIC_HAS_ATOMIC_BYTE
132 case 1:
133 return __sync_add_and_fetch_1(addr, val);
134#endif
135#ifdef UATOMIC_HAS_ATOMIC_SHORT
136 case 2:
137 return __sync_add_and_fetch_2(addr, val);
138#endif
8760d94e
PB
139 case 4:
140 return __sync_add_and_fetch_4(addr, val);
b39e1761 141#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
142 case 8:
143 return __sync_add_and_fetch_8(addr, val);
144#endif
145 }
146 _uatomic_link_error();
147 return 0;
148}
149
150
151#define uatomic_add_return(addr, v) \
152 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
153 (unsigned long)(v), \
154 sizeof(*(addr))))
155#endif /* #ifndef uatomic_add_return */
156
157#ifndef uatomic_xchg
158/* xchg */
159
160static inline __attribute__((always_inline))
161unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
162{
163 switch (len) {
f469d839
PB
164#ifdef UATOMIC_HAS_ATOMIC_BYTE
165 case 1:
166 {
167 unsigned char old;
168
169 do {
170 old = uatomic_read((unsigned char *)addr);
171 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
172
173 return old;
174 }
175#endif
176#ifdef UATOMIC_HAS_ATOMIC_SHORT
177 case 2:
178 {
179 unsigned short old;
180
181 do {
182 old = uatomic_read((unsigned short *)addr);
183 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
184
185 return old;
186 }
187#endif
8760d94e
PB
188 case 4:
189 {
190 unsigned int old;
191
192 do {
193 old = uatomic_read((unsigned int *)addr);
2f2908d0 194 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
8760d94e 195
2f2908d0 196 return old;
8760d94e 197 }
b39e1761 198#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
199 case 8:
200 {
201 unsigned long old;
202
203 do {
204 old = uatomic_read((unsigned long *)addr);
205 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
206
207 return old;
208 }
209#endif
210 }
211 _uatomic_link_error();
212 return 0;
213}
214
215#define uatomic_xchg(addr, v) \
216 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
217 sizeof(*(addr))))
218#endif /* #ifndef uatomic_xchg */
219
220#else /* #ifndef uatomic_cmpxchg */
221
985b35b1
PB
222#ifndef uatomic_or
223/* uatomic_or */
224
225static inline __attribute__((always_inline))
226void _uatomic_or(void *addr, unsigned long val, int len)
227{
228 switch (len) {
229#ifdef UATOMIC_HAS_ATOMIC_BYTE
230 case 1:
231 {
232 unsigned char old, oldt;
233
234 oldt = uatomic_read((unsigned char *)addr);
235 do {
236 old = oldt;
237 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
238 } while (oldt != old);
239 }
240#endif
241#ifdef UATOMIC_HAS_ATOMIC_SHORT
242 case 2:
243 {
244 unsigned short old, oldt;
245
246 oldt = uatomic_read((unsigned short *)addr);
247 do {
248 old = oldt;
249 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
250 } while (oldt != old);
251 }
252#endif
253 case 4:
254 {
255 unsigned int old, oldt;
256
257 oldt = uatomic_read((unsigned int *)addr);
258 do {
259 old = oldt;
260 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
261 } while (oldt != old);
262 }
263#if (CAA_BITS_PER_LONG == 64)
264 case 8:
265 {
266 unsigned long old, oldt;
267
268 oldt = uatomic_read((unsigned long *)addr);
269 do {
270 old = oldt;
271 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
272 } while (oldt != old);
273 }
274#endif
275 }
276 _uatomic_link_error();
277 return 0;
278}
279
280#define uatomic_or(addr, v) \
281 (uatomic_or((addr), \
282 (unsigned long)(v), \
283 sizeof(*(addr))))
284#endif /* #ifndef uatomic_or */
285
8760d94e
PB
286#ifndef uatomic_add_return
287/* uatomic_add_return */
288
289static inline __attribute__((always_inline))
290unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
291{
292 switch (len) {
f469d839
PB
293#ifdef UATOMIC_HAS_ATOMIC_BYTE
294 case 1:
295 {
296 unsigned char old, oldt;
297
298 oldt = uatomic_read((unsigned char *)addr);
299 do {
300 old = oldt;
301 oldt = _uatomic_cmpxchg(addr, old, old + val, 1);
302 } while (oldt != old);
303
304 return old + val;
305 }
306#endif
307#ifdef UATOMIC_HAS_ATOMIC_SHORT
308 case 2:
309 {
310 unsigned short old, oldt;
311
312 oldt = uatomic_read((unsigned short *)addr);
313 do {
314 old = oldt;
315 oldt = _uatomic_cmpxchg(addr, old, old + val, 2);
316 } while (oldt != old);
317
318 return old + val;
319 }
320#endif
8760d94e
PB
321 case 4:
322 {
323 unsigned int old, oldt;
324
325 oldt = uatomic_read((unsigned int *)addr);
326 do {
327 old = oldt;
328 oldt = _uatomic_cmpxchg(addr, old, old + val, 4);
329 } while (oldt != old);
330
331 return old + val;
332 }
b39e1761 333#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
334 case 8:
335 {
336 unsigned long old, oldt;
337
338 oldt = uatomic_read((unsigned long *)addr);
339 do {
340 old = oldt;
341 oldt = _uatomic_cmpxchg(addr, old, old + val, 8);
342 } while (oldt != old);
343
344 return old + val;
345 }
346#endif
347 }
348 _uatomic_link_error();
349 return 0;
350}
351
352#define uatomic_add_return(addr, v) \
353 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
354 (unsigned long)(v), \
355 sizeof(*(addr))))
356#endif /* #ifndef uatomic_add_return */
357
358#ifndef uatomic_xchg
359/* xchg */
360
361static inline __attribute__((always_inline))
362unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
363{
364 switch (len) {
f469d839
PB
365#ifdef UATOMIC_HAS_ATOMIC_BYTE
366 case 1:
367 {
368 unsigned char old, oldt;
369
370 oldt = uatomic_read((unsigned char *)addr);
371 do {
372 old = oldt;
373 oldt = _uatomic_cmpxchg(addr, old, val, 1);
374 } while (oldt != old);
375
376 return old;
377 }
378#endif
379#ifdef UATOMIC_HAS_ATOMIC_SHORT
380 case 2:
381 {
382 unsigned short old, oldt;
383
384 oldt = uatomic_read((unsigned short *)addr);
385 do {
386 old = oldt;
387 oldt = _uatomic_cmpxchg(addr, old, val, 2);
388 } while (oldt != old);
389
390 return old;
391 }
392#endif
8760d94e
PB
393 case 4:
394 {
395 unsigned int old, oldt;
396
397 oldt = uatomic_read((unsigned int *)addr);
398 do {
399 old = oldt;
400 oldt = _uatomic_cmpxchg(addr, old, val, 4);
401 } while (oldt != old);
402
403 return old;
404 }
b39e1761 405#if (CAA_BITS_PER_LONG == 64)
8760d94e
PB
406 case 8:
407 {
408 unsigned long old, oldt;
409
410 oldt = uatomic_read((unsigned long *)addr);
411 do {
412 old = oldt;
413 oldt = _uatomic_cmpxchg(addr, old, val, 8);
414 } while (oldt != old);
415
416 return old;
417 }
418#endif
419 }
420 _uatomic_link_error();
421 return 0;
422}
423
424#define uatomic_xchg(addr, v) \
425 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
426 sizeof(*(addr))))
427#endif /* #ifndef uatomic_xchg */
428
429#endif /* #else #ifndef uatomic_cmpxchg */
430
431/* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
432
433#ifndef uatomic_add
434#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
435#endif
436
437#define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
438#define uatomic_sub(addr, v) uatomic_add((addr), -(v))
439
440#ifndef uatomic_inc
441#define uatomic_inc(addr) uatomic_add((addr), 1)
442#endif
443
444#ifndef uatomic_dec
445#define uatomic_dec(addr) uatomic_add((addr), -1)
446#endif
447
448#ifdef __cplusplus
449}
450#endif
451
452#endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.038074 seconds and 4 git commands to generate.