Update return value of "set" operations
[urcu.git] / urcu / uatomic / generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
26
27 #ifdef __cplusplus
28 extern "C" {
29 #endif
30
31 #ifndef uatomic_set
32 #define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
33 #endif
34
35 #ifndef uatomic_read
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
37 #endif
38
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline))
41 void _uatomic_link_error()
42 {
43 #ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47 #else
48 __builtin_trap ();
49 #endif
50 }
51
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56 /* cmpxchg */
57
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline))
60 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62 {
63 switch (len) {
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67 #endif
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71 #endif
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
74 #if (CAA_BITS_PER_LONG == 64)
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77 #endif
78 }
79 _uatomic_link_error();
80 return 0;
81 }
82
83
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
86 caa_cast_long_keep_sign(old), \
87 caa_cast_long_keep_sign(_new),\
88 sizeof(*(addr))))
89
90
91 /* uatomic_and */
92
93 #ifndef uatomic_and
94 static inline __attribute__((always_inline))
95 void _uatomic_and(void *addr, unsigned long val,
96 int len)
97 {
98 switch (len) {
99 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 case 1:
101 __sync_and_and_fetch_1(addr, val);
102 return;
103 #endif
104 #ifdef UATOMIC_HAS_ATOMIC_SHORT
105 case 2:
106 __sync_and_and_fetch_2(addr, val);
107 return;
108 #endif
109 case 4:
110 __sync_and_and_fetch_4(addr, val);
111 return;
112 #if (CAA_BITS_PER_LONG == 64)
113 case 8:
114 __sync_and_and_fetch_8(addr, val);
115 return;
116 #endif
117 }
118 _uatomic_link_error();
119 }
120
121 #define uatomic_and(addr, v) \
122 (_uatomic_and((addr), \
123 caa_cast_long_keep_sign(v), \
124 sizeof(*(addr))))
125 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
126 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
127
128 #endif
129
130 /* uatomic_or */
131
132 #ifndef uatomic_or
133 static inline __attribute__((always_inline))
134 void _uatomic_or(void *addr, unsigned long val,
135 int len)
136 {
137 switch (len) {
138 #ifdef UATOMIC_HAS_ATOMIC_BYTE
139 case 1:
140 __sync_or_and_fetch_1(addr, val);
141 return;
142 #endif
143 #ifdef UATOMIC_HAS_ATOMIC_SHORT
144 case 2:
145 __sync_or_and_fetch_2(addr, val);
146 return;
147 #endif
148 case 4:
149 __sync_or_and_fetch_4(addr, val);
150 return;
151 #if (CAA_BITS_PER_LONG == 64)
152 case 8:
153 __sync_or_and_fetch_8(addr, val);
154 return;
155 #endif
156 }
157 _uatomic_link_error();
158 return;
159 }
160
161 #define uatomic_or(addr, v) \
162 (_uatomic_or((addr), \
163 caa_cast_long_keep_sign(v), \
164 sizeof(*(addr))))
165 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
166 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
167
168 #endif
169
170
171 /* uatomic_add_return */
172
173 #ifndef uatomic_add_return
174 static inline __attribute__((always_inline))
175 unsigned long _uatomic_add_return(void *addr, unsigned long val,
176 int len)
177 {
178 switch (len) {
179 #ifdef UATOMIC_HAS_ATOMIC_BYTE
180 case 1:
181 return __sync_add_and_fetch_1(addr, val);
182 #endif
183 #ifdef UATOMIC_HAS_ATOMIC_SHORT
184 case 2:
185 return __sync_add_and_fetch_2(addr, val);
186 #endif
187 case 4:
188 return __sync_add_and_fetch_4(addr, val);
189 #if (CAA_BITS_PER_LONG == 64)
190 case 8:
191 return __sync_add_and_fetch_8(addr, val);
192 #endif
193 }
194 _uatomic_link_error();
195 return 0;
196 }
197
198
199 #define uatomic_add_return(addr, v) \
200 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
201 caa_cast_long_keep_sign(v), \
202 sizeof(*(addr))))
203 #endif /* #ifndef uatomic_add_return */
204
205 #ifndef uatomic_xchg
206 /* xchg */
207
208 static inline __attribute__((always_inline))
209 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
210 {
211 switch (len) {
212 #ifdef UATOMIC_HAS_ATOMIC_BYTE
213 case 1:
214 {
215 unsigned char old;
216
217 do {
218 old = uatomic_read((unsigned char *)addr);
219 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
220
221 return old;
222 }
223 #endif
224 #ifdef UATOMIC_HAS_ATOMIC_SHORT
225 case 2:
226 {
227 unsigned short old;
228
229 do {
230 old = uatomic_read((unsigned short *)addr);
231 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
232
233 return old;
234 }
235 #endif
236 case 4:
237 {
238 unsigned int old;
239
240 do {
241 old = uatomic_read((unsigned int *)addr);
242 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
243
244 return old;
245 }
246 #if (CAA_BITS_PER_LONG == 64)
247 case 8:
248 {
249 unsigned long old;
250
251 do {
252 old = uatomic_read((unsigned long *)addr);
253 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
254
255 return old;
256 }
257 #endif
258 }
259 _uatomic_link_error();
260 return 0;
261 }
262
263 #define uatomic_xchg(addr, v) \
264 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
265 caa_cast_long_keep_sign(v), \
266 sizeof(*(addr))))
267 #endif /* #ifndef uatomic_xchg */
268
269 #else /* #ifndef uatomic_cmpxchg */
270
271 #ifndef uatomic_and
272 /* uatomic_and */
273
274 static inline __attribute__((always_inline))
275 void _uatomic_and(void *addr, unsigned long val, int len)
276 {
277 switch (len) {
278 #ifdef UATOMIC_HAS_ATOMIC_BYTE
279 case 1:
280 {
281 unsigned char old, oldt;
282
283 oldt = uatomic_read((unsigned char *)addr);
284 do {
285 old = oldt;
286 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
287 } while (oldt != old);
288
289 return;
290 }
291 #endif
292 #ifdef UATOMIC_HAS_ATOMIC_SHORT
293 case 2:
294 {
295 unsigned short old, oldt;
296
297 oldt = uatomic_read((unsigned short *)addr);
298 do {
299 old = oldt;
300 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
301 } while (oldt != old);
302 }
303 #endif
304 case 4:
305 {
306 unsigned int old, oldt;
307
308 oldt = uatomic_read((unsigned int *)addr);
309 do {
310 old = oldt;
311 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
312 } while (oldt != old);
313
314 return;
315 }
316 #if (CAA_BITS_PER_LONG == 64)
317 case 8:
318 {
319 unsigned long old, oldt;
320
321 oldt = uatomic_read((unsigned long *)addr);
322 do {
323 old = oldt;
324 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
325 } while (oldt != old);
326
327 return;
328 }
329 #endif
330 }
331 _uatomic_link_error();
332 }
333
334 #define uatomic_and(addr, v) \
335 (_uatomic_and((addr), \
336 caa_cast_long_keep_sign(v), \
337 sizeof(*(addr))))
338 #define cmm_smp_mb__before_uatomic_and() cmm_barrier()
339 #define cmm_smp_mb__after_uatomic_and() cmm_barrier()
340
341 #endif /* #ifndef uatomic_and */
342
343 #ifndef uatomic_or
344 /* uatomic_or */
345
346 static inline __attribute__((always_inline))
347 void _uatomic_or(void *addr, unsigned long val, int len)
348 {
349 switch (len) {
350 #ifdef UATOMIC_HAS_ATOMIC_BYTE
351 case 1:
352 {
353 unsigned char old, oldt;
354
355 oldt = uatomic_read((unsigned char *)addr);
356 do {
357 old = oldt;
358 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
359 } while (oldt != old);
360
361 return;
362 }
363 #endif
364 #ifdef UATOMIC_HAS_ATOMIC_SHORT
365 case 2:
366 {
367 unsigned short old, oldt;
368
369 oldt = uatomic_read((unsigned short *)addr);
370 do {
371 old = oldt;
372 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
373 } while (oldt != old);
374
375 return;
376 }
377 #endif
378 case 4:
379 {
380 unsigned int old, oldt;
381
382 oldt = uatomic_read((unsigned int *)addr);
383 do {
384 old = oldt;
385 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
386 } while (oldt != old);
387
388 return;
389 }
390 #if (CAA_BITS_PER_LONG == 64)
391 case 8:
392 {
393 unsigned long old, oldt;
394
395 oldt = uatomic_read((unsigned long *)addr);
396 do {
397 old = oldt;
398 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
399 } while (oldt != old);
400
401 return;
402 }
403 #endif
404 }
405 _uatomic_link_error();
406 }
407
408 #define uatomic_or(addr, v) \
409 (_uatomic_or((addr), \
410 caa_cast_long_keep_sign(v), \
411 sizeof(*(addr))))
412 #define cmm_smp_mb__before_uatomic_or() cmm_barrier()
413 #define cmm_smp_mb__after_uatomic_or() cmm_barrier()
414
415 #endif /* #ifndef uatomic_or */
416
417 #ifndef uatomic_add_return
418 /* uatomic_add_return */
419
420 static inline __attribute__((always_inline))
421 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
422 {
423 switch (len) {
424 #ifdef UATOMIC_HAS_ATOMIC_BYTE
425 case 1:
426 {
427 unsigned char old, oldt;
428
429 oldt = uatomic_read((unsigned char *)addr);
430 do {
431 old = oldt;
432 oldt = uatomic_cmpxchg((unsigned char *)addr,
433 old, old + val);
434 } while (oldt != old);
435
436 return old + val;
437 }
438 #endif
439 #ifdef UATOMIC_HAS_ATOMIC_SHORT
440 case 2:
441 {
442 unsigned short old, oldt;
443
444 oldt = uatomic_read((unsigned short *)addr);
445 do {
446 old = oldt;
447 oldt = uatomic_cmpxchg((unsigned short *)addr,
448 old, old + val);
449 } while (oldt != old);
450
451 return old + val;
452 }
453 #endif
454 case 4:
455 {
456 unsigned int old, oldt;
457
458 oldt = uatomic_read((unsigned int *)addr);
459 do {
460 old = oldt;
461 oldt = uatomic_cmpxchg((unsigned int *)addr,
462 old, old + val);
463 } while (oldt != old);
464
465 return old + val;
466 }
467 #if (CAA_BITS_PER_LONG == 64)
468 case 8:
469 {
470 unsigned long old, oldt;
471
472 oldt = uatomic_read((unsigned long *)addr);
473 do {
474 old = oldt;
475 oldt = uatomic_cmpxchg((unsigned long *)addr,
476 old, old + val);
477 } while (oldt != old);
478
479 return old + val;
480 }
481 #endif
482 }
483 _uatomic_link_error();
484 return 0;
485 }
486
487 #define uatomic_add_return(addr, v) \
488 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
489 caa_cast_long_keep_sign(v), \
490 sizeof(*(addr))))
491 #endif /* #ifndef uatomic_add_return */
492
493 #ifndef uatomic_xchg
494 /* xchg */
495
496 static inline __attribute__((always_inline))
497 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
498 {
499 switch (len) {
500 #ifdef UATOMIC_HAS_ATOMIC_BYTE
501 case 1:
502 {
503 unsigned char old, oldt;
504
505 oldt = uatomic_read((unsigned char *)addr);
506 do {
507 old = oldt;
508 oldt = uatomic_cmpxchg((unsigned char *)addr,
509 old, val);
510 } while (oldt != old);
511
512 return old;
513 }
514 #endif
515 #ifdef UATOMIC_HAS_ATOMIC_SHORT
516 case 2:
517 {
518 unsigned short old, oldt;
519
520 oldt = uatomic_read((unsigned short *)addr);
521 do {
522 old = oldt;
523 oldt = uatomic_cmpxchg((unsigned short *)addr,
524 old, val);
525 } while (oldt != old);
526
527 return old;
528 }
529 #endif
530 case 4:
531 {
532 unsigned int old, oldt;
533
534 oldt = uatomic_read((unsigned int *)addr);
535 do {
536 old = oldt;
537 oldt = uatomic_cmpxchg((unsigned int *)addr,
538 old, val);
539 } while (oldt != old);
540
541 return old;
542 }
543 #if (CAA_BITS_PER_LONG == 64)
544 case 8:
545 {
546 unsigned long old, oldt;
547
548 oldt = uatomic_read((unsigned long *)addr);
549 do {
550 old = oldt;
551 oldt = uatomic_cmpxchg((unsigned long *)addr,
552 old, val);
553 } while (oldt != old);
554
555 return old;
556 }
557 #endif
558 }
559 _uatomic_link_error();
560 return 0;
561 }
562
563 #define uatomic_xchg(addr, v) \
564 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
565 caa_cast_long_keep_sign(v), \
566 sizeof(*(addr))))
567 #endif /* #ifndef uatomic_xchg */
568
569 #endif /* #else #ifndef uatomic_cmpxchg */
570
571 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
572
573 #ifndef uatomic_add
574 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
575 #define cmm_smp_mb__before_uatomic_add() cmm_barrier()
576 #define cmm_smp_mb__after_uatomic_add() cmm_barrier()
577 #endif
578
579 #define uatomic_sub_return(addr, v) \
580 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
581 #define uatomic_sub(addr, v) \
582 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
583 #define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
584 #define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
585
586 #ifndef uatomic_inc
587 #define uatomic_inc(addr) uatomic_add((addr), 1)
588 #define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
589 #define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
590 #endif
591
592 #ifndef uatomic_dec
593 #define uatomic_dec(addr) uatomic_add((addr), -1)
594 #define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
595 #define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
596 #endif
597
598 #ifdef __cplusplus
599 }
600 #endif
601
602 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.040631 seconds and 5 git commands to generate.