Merge branch 'master' into urcu/ht
[urcu.git] / urcu / uatomic / generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
26
27 #ifdef __cplusplus
28 extern "C" {
29 #endif
30
31 #ifndef uatomic_set
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
33 #endif
34
35 #ifndef uatomic_read
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
37 #endif
38
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline))
41 void _uatomic_link_error()
42 {
43 #ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47 #else
48 __builtin_trap ();
49 #endif
50 }
51
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56 /* cmpxchg */
57
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline))
60 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62 {
63 switch (len) {
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67 #endif
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71 #endif
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
74 #if (CAA_BITS_PER_LONG == 64)
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77 #endif
78 }
79 _uatomic_link_error();
80 return 0;
81 }
82
83
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
86 (unsigned long)(_new), \
87 sizeof(*(addr))))
88
89
90 /* uatomic_and */
91
92 #ifndef uatomic_and
93 static inline __attribute__((always_inline))
94 void _uatomic_and(void *addr, unsigned long val,
95 int len)
96 {
97 switch (len) {
98 #ifdef UATOMIC_HAS_ATOMIC_BYTE
99 case 1:
100 __sync_and_and_fetch_1(addr, val);
101 #endif
102 #ifdef UATOMIC_HAS_ATOMIC_SHORT
103 case 2:
104 __sync_and_and_fetch_2(addr, val);
105 #endif
106 case 4:
107 __sync_and_and_fetch_4(addr, val);
108 #if (CAA_BITS_PER_LONG == 64)
109 case 8:
110 __sync_and_and_fetch_8(addr, val);
111 #endif
112 }
113 _uatomic_link_error();
114 return 0;
115 }
116
117 #define uatomic_and(addr, v) \
118 (_uatomic_and((addr), \
119 (unsigned long)(v), \
120 sizeof(*(addr))))
121 #endif
122
123 /* uatomic_or */
124
125 #ifndef uatomic_or
126 static inline __attribute__((always_inline))
127 void _uatomic_or(void *addr, unsigned long val,
128 int len)
129 {
130 switch (len) {
131 #ifdef UATOMIC_HAS_ATOMIC_BYTE
132 case 1:
133 __sync_or_and_fetch_1(addr, val);
134 #endif
135 #ifdef UATOMIC_HAS_ATOMIC_SHORT
136 case 2:
137 __sync_or_and_fetch_2(addr, val);
138 #endif
139 case 4:
140 __sync_or_and_fetch_4(addr, val);
141 #if (CAA_BITS_PER_LONG == 64)
142 case 8:
143 __sync_or_and_fetch_8(addr, val);
144 #endif
145 }
146 _uatomic_link_error();
147 return 0;
148 }
149
150 #define uatomic_or(addr, v) \
151 (_uatomic_or((addr), \
152 (unsigned long)(v), \
153 sizeof(*(addr))))
154 #endif
155
156 /* uatomic_add_return */
157
158 #ifndef uatomic_add_return
159 static inline __attribute__((always_inline))
160 unsigned long _uatomic_add_return(void *addr, unsigned long val,
161 int len)
162 {
163 switch (len) {
164 #ifdef UATOMIC_HAS_ATOMIC_BYTE
165 case 1:
166 return __sync_add_and_fetch_1(addr, val);
167 #endif
168 #ifdef UATOMIC_HAS_ATOMIC_SHORT
169 case 2:
170 return __sync_add_and_fetch_2(addr, val);
171 #endif
172 case 4:
173 return __sync_add_and_fetch_4(addr, val);
174 #if (CAA_BITS_PER_LONG == 64)
175 case 8:
176 return __sync_add_and_fetch_8(addr, val);
177 #endif
178 }
179 _uatomic_link_error();
180 return 0;
181 }
182
183
184 #define uatomic_add_return(addr, v) \
185 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
186 (unsigned long)(v), \
187 sizeof(*(addr))))
188 #endif /* #ifndef uatomic_add_return */
189
190 #ifndef uatomic_xchg
191 /* xchg */
192
193 static inline __attribute__((always_inline))
194 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
195 {
196 switch (len) {
197 #ifdef UATOMIC_HAS_ATOMIC_BYTE
198 case 1:
199 {
200 unsigned char old;
201
202 do {
203 old = uatomic_read((unsigned char *)addr);
204 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
205
206 return old;
207 }
208 #endif
209 #ifdef UATOMIC_HAS_ATOMIC_SHORT
210 case 2:
211 {
212 unsigned short old;
213
214 do {
215 old = uatomic_read((unsigned short *)addr);
216 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
217
218 return old;
219 }
220 #endif
221 case 4:
222 {
223 unsigned int old;
224
225 do {
226 old = uatomic_read((unsigned int *)addr);
227 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
228
229 return old;
230 }
231 #if (CAA_BITS_PER_LONG == 64)
232 case 8:
233 {
234 unsigned long old;
235
236 do {
237 old = uatomic_read((unsigned long *)addr);
238 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
239
240 return old;
241 }
242 #endif
243 }
244 _uatomic_link_error();
245 return 0;
246 }
247
248 #define uatomic_xchg(addr, v) \
249 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
250 sizeof(*(addr))))
251 #endif /* #ifndef uatomic_xchg */
252
253 #else /* #ifndef uatomic_cmpxchg */
254
255 #ifndef uatomic_and
256 /* uatomic_and */
257
258 static inline __attribute__((always_inline))
259 void _uatomic_and(void *addr, unsigned long val, int len)
260 {
261 switch (len) {
262 #ifdef UATOMIC_HAS_ATOMIC_BYTE
263 case 1:
264 {
265 unsigned char old, oldt;
266
267 oldt = uatomic_read((unsigned char *)addr);
268 do {
269 old = oldt;
270 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
271 } while (oldt != old);
272
273 return;
274 }
275 #endif
276 #ifdef UATOMIC_HAS_ATOMIC_SHORT
277 case 2:
278 {
279 unsigned short old, oldt;
280
281 oldt = uatomic_read((unsigned short *)addr);
282 do {
283 old = oldt;
284 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
285 } while (oldt != old);
286 }
287 #endif
288 case 4:
289 {
290 unsigned int old, oldt;
291
292 oldt = uatomic_read((unsigned int *)addr);
293 do {
294 old = oldt;
295 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
296 } while (oldt != old);
297
298 return;
299 }
300 #if (CAA_BITS_PER_LONG == 64)
301 case 8:
302 {
303 unsigned long old, oldt;
304
305 oldt = uatomic_read((unsigned long *)addr);
306 do {
307 old = oldt;
308 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
309 } while (oldt != old);
310
311 return;
312 }
313 #endif
314 }
315 _uatomic_link_error();
316 }
317
318 #define uatomic_and(addr, v) \
319 (_uatomic_and((addr), \
320 (unsigned long)(v), \
321 sizeof(*(addr))))
322 #endif /* #ifndef uatomic_and */
323
324 #ifndef uatomic_or
325 /* uatomic_or */
326
327 static inline __attribute__((always_inline))
328 void _uatomic_or(void *addr, unsigned long val, int len)
329 {
330 switch (len) {
331 #ifdef UATOMIC_HAS_ATOMIC_BYTE
332 case 1:
333 {
334 unsigned char old, oldt;
335
336 oldt = uatomic_read((unsigned char *)addr);
337 do {
338 old = oldt;
339 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
340 } while (oldt != old);
341
342 return;
343 }
344 #endif
345 #ifdef UATOMIC_HAS_ATOMIC_SHORT
346 case 2:
347 {
348 unsigned short old, oldt;
349
350 oldt = uatomic_read((unsigned short *)addr);
351 do {
352 old = oldt;
353 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
354 } while (oldt != old);
355
356 return;
357 }
358 #endif
359 case 4:
360 {
361 unsigned int old, oldt;
362
363 oldt = uatomic_read((unsigned int *)addr);
364 do {
365 old = oldt;
366 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
367 } while (oldt != old);
368
369 return;
370 }
371 #if (CAA_BITS_PER_LONG == 64)
372 case 8:
373 {
374 unsigned long old, oldt;
375
376 oldt = uatomic_read((unsigned long *)addr);
377 do {
378 old = oldt;
379 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
380 } while (oldt != old);
381
382 return;
383 }
384 #endif
385 }
386 _uatomic_link_error();
387 }
388
389 #define uatomic_or(addr, v) \
390 (_uatomic_or((addr), \
391 (unsigned long)(v),\
392 sizeof(*(addr))))
393 #endif /* #ifndef uatomic_or */
394
395 #ifndef uatomic_add_return
396 /* uatomic_add_return */
397
398 static inline __attribute__((always_inline))
399 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
400 {
401 switch (len) {
402 #ifdef UATOMIC_HAS_ATOMIC_BYTE
403 case 1:
404 {
405 unsigned char old, oldt;
406
407 oldt = uatomic_read((unsigned char *)addr);
408 do {
409 old = oldt;
410 oldt = uatomic_cmpxchg((unsigned char *)addr,
411 old, old + val);
412 } while (oldt != old);
413
414 return old + val;
415 }
416 #endif
417 #ifdef UATOMIC_HAS_ATOMIC_SHORT
418 case 2:
419 {
420 unsigned short old, oldt;
421
422 oldt = uatomic_read((unsigned short *)addr);
423 do {
424 old = oldt;
425 oldt = uatomic_cmpxchg((unsigned short *)addr,
426 old, old + val);
427 } while (oldt != old);
428
429 return old + val;
430 }
431 #endif
432 case 4:
433 {
434 unsigned int old, oldt;
435
436 oldt = uatomic_read((unsigned int *)addr);
437 do {
438 old = oldt;
439 oldt = uatomic_cmpxchg((unsigned int *)addr,
440 old, old + val);
441 } while (oldt != old);
442
443 return old + val;
444 }
445 #if (CAA_BITS_PER_LONG == 64)
446 case 8:
447 {
448 unsigned long old, oldt;
449
450 oldt = uatomic_read((unsigned long *)addr);
451 do {
452 old = oldt;
453 oldt = uatomic_cmpxchg((unsigned long *)addr,
454 old, old + val);
455 } while (oldt != old);
456
457 return old + val;
458 }
459 #endif
460 }
461 _uatomic_link_error();
462 return 0;
463 }
464
465 #define uatomic_add_return(addr, v) \
466 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
467 (unsigned long)(v), \
468 sizeof(*(addr))))
469 #endif /* #ifndef uatomic_add_return */
470
471 #ifndef uatomic_xchg
472 /* xchg */
473
474 static inline __attribute__((always_inline))
475 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
476 {
477 switch (len) {
478 #ifdef UATOMIC_HAS_ATOMIC_BYTE
479 case 1:
480 {
481 unsigned char old, oldt;
482
483 oldt = uatomic_read((unsigned char *)addr);
484 do {
485 old = oldt;
486 oldt = uatomic_cmpxchg((unsigned char *)addr,
487 old, val);
488 } while (oldt != old);
489
490 return old;
491 }
492 #endif
493 #ifdef UATOMIC_HAS_ATOMIC_SHORT
494 case 2:
495 {
496 unsigned short old, oldt;
497
498 oldt = uatomic_read((unsigned short *)addr);
499 do {
500 old = oldt;
501 oldt = uatomic_cmpxchg((unsigned short *)addr,
502 old, val);
503 } while (oldt != old);
504
505 return old;
506 }
507 #endif
508 case 4:
509 {
510 unsigned int old, oldt;
511
512 oldt = uatomic_read((unsigned int *)addr);
513 do {
514 old = oldt;
515 oldt = uatomic_cmpxchg((unsigned int *)addr,
516 old, val);
517 } while (oldt != old);
518
519 return old;
520 }
521 #if (CAA_BITS_PER_LONG == 64)
522 case 8:
523 {
524 unsigned long old, oldt;
525
526 oldt = uatomic_read((unsigned long *)addr);
527 do {
528 old = oldt;
529 oldt = uatomic_cmpxchg((unsigned long *)addr,
530 old, val);
531 } while (oldt != old);
532
533 return old;
534 }
535 #endif
536 }
537 _uatomic_link_error();
538 return 0;
539 }
540
541 #define uatomic_xchg(addr, v) \
542 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
543 sizeof(*(addr))))
544 #endif /* #ifndef uatomic_xchg */
545
546 #endif /* #else #ifndef uatomic_cmpxchg */
547
548 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
549
550 #ifndef uatomic_add
551 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
552 #endif
553
554 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
555 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
556
557 #ifndef uatomic_inc
558 #define uatomic_inc(addr) uatomic_add((addr), 1)
559 #endif
560
561 #ifndef uatomic_dec
562 #define uatomic_dec(addr) uatomic_add((addr), -1)
563 #endif
564
565 #ifdef __cplusplus
566 }
567 #endif
568
569 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.040154 seconds and 5 git commands to generate.