Fix uatomic sign cast
[urcu.git] / urcu / uatomic / generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
26
27 #ifdef __cplusplus
28 extern "C" {
29 #endif
30
31 #ifndef uatomic_set
32 #define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
33 #endif
34
35 #ifndef uatomic_read
36 #define uatomic_read(addr) CMM_LOAD_SHARED(*(addr))
37 #endif
38
39 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
40 static inline __attribute__((always_inline))
41 void _uatomic_link_error()
42 {
43 #ifdef ILLEGAL_INSTR
44 /* generate an illegal instruction. Cannot catch this with linker tricks
45 * when optimizations are disabled. */
46 __asm__ __volatile__(ILLEGAL_INSTR);
47 #else
48 __builtin_trap ();
49 #endif
50 }
51
52 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
53 extern void _uatomic_link_error ();
54 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
55
56 /* cmpxchg */
57
58 #ifndef uatomic_cmpxchg
59 static inline __attribute__((always_inline))
60 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
61 unsigned long _new, int len)
62 {
63 switch (len) {
64 #ifdef UATOMIC_HAS_ATOMIC_BYTE
65 case 1:
66 return __sync_val_compare_and_swap_1(addr, old, _new);
67 #endif
68 #ifdef UATOMIC_HAS_ATOMIC_SHORT
69 case 2:
70 return __sync_val_compare_and_swap_2(addr, old, _new);
71 #endif
72 case 4:
73 return __sync_val_compare_and_swap_4(addr, old, _new);
74 #if (CAA_BITS_PER_LONG == 64)
75 case 8:
76 return __sync_val_compare_and_swap_8(addr, old, _new);
77 #endif
78 }
79 _uatomic_link_error();
80 return 0;
81 }
82
83
84 #define uatomic_cmpxchg(addr, old, _new) \
85 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
86 caa_cast_long_keep_sign(old), \
87 caa_cast_long_keep_sign(_new),\
88 sizeof(*(addr))))
89
90
91 /* uatomic_and */
92
93 #ifndef uatomic_and
94 static inline __attribute__((always_inline))
95 void _uatomic_and(void *addr, unsigned long val,
96 int len)
97 {
98 switch (len) {
99 #ifdef UATOMIC_HAS_ATOMIC_BYTE
100 case 1:
101 __sync_and_and_fetch_1(addr, val);
102 return;
103 #endif
104 #ifdef UATOMIC_HAS_ATOMIC_SHORT
105 case 2:
106 __sync_and_and_fetch_2(addr, val);
107 return;
108 #endif
109 case 4:
110 __sync_and_and_fetch_4(addr, val);
111 return;
112 #if (CAA_BITS_PER_LONG == 64)
113 case 8:
114 __sync_and_and_fetch_8(addr, val);
115 return;
116 #endif
117 }
118 _uatomic_link_error();
119 }
120
121 #define uatomic_and(addr, v) \
122 (_uatomic_and((addr), \
123 caa_cast_long_keep_sign(v), \
124 sizeof(*(addr))))
125 #endif
126
127 /* uatomic_or */
128
129 #ifndef uatomic_or
130 static inline __attribute__((always_inline))
131 void _uatomic_or(void *addr, unsigned long val,
132 int len)
133 {
134 switch (len) {
135 #ifdef UATOMIC_HAS_ATOMIC_BYTE
136 case 1:
137 __sync_or_and_fetch_1(addr, val);
138 return;
139 #endif
140 #ifdef UATOMIC_HAS_ATOMIC_SHORT
141 case 2:
142 __sync_or_and_fetch_2(addr, val);
143 return;
144 #endif
145 case 4:
146 __sync_or_and_fetch_4(addr, val);
147 return;
148 #if (CAA_BITS_PER_LONG == 64)
149 case 8:
150 __sync_or_and_fetch_8(addr, val);
151 return;
152 #endif
153 }
154 _uatomic_link_error();
155 return;
156 }
157
158 #define uatomic_or(addr, v) \
159 (_uatomic_or((addr), \
160 caa_cast_long_keep_sign(v), \
161 sizeof(*(addr))))
162 #endif
163
164 /* uatomic_add_return */
165
166 #ifndef uatomic_add_return
167 static inline __attribute__((always_inline))
168 unsigned long _uatomic_add_return(void *addr, unsigned long val,
169 int len)
170 {
171 switch (len) {
172 #ifdef UATOMIC_HAS_ATOMIC_BYTE
173 case 1:
174 return __sync_add_and_fetch_1(addr, val);
175 #endif
176 #ifdef UATOMIC_HAS_ATOMIC_SHORT
177 case 2:
178 return __sync_add_and_fetch_2(addr, val);
179 #endif
180 case 4:
181 return __sync_add_and_fetch_4(addr, val);
182 #if (CAA_BITS_PER_LONG == 64)
183 case 8:
184 return __sync_add_and_fetch_8(addr, val);
185 #endif
186 }
187 _uatomic_link_error();
188 return 0;
189 }
190
191
192 #define uatomic_add_return(addr, v) \
193 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
194 caa_cast_long_keep_sign(v), \
195 sizeof(*(addr))))
196 #endif /* #ifndef uatomic_add_return */
197
198 #ifndef uatomic_xchg
199 /* xchg */
200
201 static inline __attribute__((always_inline))
202 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
203 {
204 switch (len) {
205 #ifdef UATOMIC_HAS_ATOMIC_BYTE
206 case 1:
207 {
208 unsigned char old;
209
210 do {
211 old = uatomic_read((unsigned char *)addr);
212 } while (!__sync_bool_compare_and_swap_1(addr, old, val));
213
214 return old;
215 }
216 #endif
217 #ifdef UATOMIC_HAS_ATOMIC_SHORT
218 case 2:
219 {
220 unsigned short old;
221
222 do {
223 old = uatomic_read((unsigned short *)addr);
224 } while (!__sync_bool_compare_and_swap_2(addr, old, val));
225
226 return old;
227 }
228 #endif
229 case 4:
230 {
231 unsigned int old;
232
233 do {
234 old = uatomic_read((unsigned int *)addr);
235 } while (!__sync_bool_compare_and_swap_4(addr, old, val));
236
237 return old;
238 }
239 #if (CAA_BITS_PER_LONG == 64)
240 case 8:
241 {
242 unsigned long old;
243
244 do {
245 old = uatomic_read((unsigned long *)addr);
246 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
247
248 return old;
249 }
250 #endif
251 }
252 _uatomic_link_error();
253 return 0;
254 }
255
256 #define uatomic_xchg(addr, v) \
257 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
258 caa_cast_long_keep_sign(v), \
259 sizeof(*(addr))))
260 #endif /* #ifndef uatomic_xchg */
261
262 #else /* #ifndef uatomic_cmpxchg */
263
264 #ifndef uatomic_and
265 /* uatomic_and */
266
267 static inline __attribute__((always_inline))
268 void _uatomic_and(void *addr, unsigned long val, int len)
269 {
270 switch (len) {
271 #ifdef UATOMIC_HAS_ATOMIC_BYTE
272 case 1:
273 {
274 unsigned char old, oldt;
275
276 oldt = uatomic_read((unsigned char *)addr);
277 do {
278 old = oldt;
279 oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
280 } while (oldt != old);
281
282 return;
283 }
284 #endif
285 #ifdef UATOMIC_HAS_ATOMIC_SHORT
286 case 2:
287 {
288 unsigned short old, oldt;
289
290 oldt = uatomic_read((unsigned short *)addr);
291 do {
292 old = oldt;
293 oldt = _uatomic_cmpxchg(addr, old, old & val, 2);
294 } while (oldt != old);
295 }
296 #endif
297 case 4:
298 {
299 unsigned int old, oldt;
300
301 oldt = uatomic_read((unsigned int *)addr);
302 do {
303 old = oldt;
304 oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
305 } while (oldt != old);
306
307 return;
308 }
309 #if (CAA_BITS_PER_LONG == 64)
310 case 8:
311 {
312 unsigned long old, oldt;
313
314 oldt = uatomic_read((unsigned long *)addr);
315 do {
316 old = oldt;
317 oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
318 } while (oldt != old);
319
320 return;
321 }
322 #endif
323 }
324 _uatomic_link_error();
325 }
326
327 #define uatomic_and(addr, v) \
328 (_uatomic_and((addr), \
329 caa_cast_long_keep_sign(v), \
330 sizeof(*(addr))))
331 #endif /* #ifndef uatomic_and */
332
333 #ifndef uatomic_or
334 /* uatomic_or */
335
336 static inline __attribute__((always_inline))
337 void _uatomic_or(void *addr, unsigned long val, int len)
338 {
339 switch (len) {
340 #ifdef UATOMIC_HAS_ATOMIC_BYTE
341 case 1:
342 {
343 unsigned char old, oldt;
344
345 oldt = uatomic_read((unsigned char *)addr);
346 do {
347 old = oldt;
348 oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
349 } while (oldt != old);
350
351 return;
352 }
353 #endif
354 #ifdef UATOMIC_HAS_ATOMIC_SHORT
355 case 2:
356 {
357 unsigned short old, oldt;
358
359 oldt = uatomic_read((unsigned short *)addr);
360 do {
361 old = oldt;
362 oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
363 } while (oldt != old);
364
365 return;
366 }
367 #endif
368 case 4:
369 {
370 unsigned int old, oldt;
371
372 oldt = uatomic_read((unsigned int *)addr);
373 do {
374 old = oldt;
375 oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
376 } while (oldt != old);
377
378 return;
379 }
380 #if (CAA_BITS_PER_LONG == 64)
381 case 8:
382 {
383 unsigned long old, oldt;
384
385 oldt = uatomic_read((unsigned long *)addr);
386 do {
387 old = oldt;
388 oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
389 } while (oldt != old);
390
391 return;
392 }
393 #endif
394 }
395 _uatomic_link_error();
396 }
397
398 #define uatomic_or(addr, v) \
399 (_uatomic_or((addr), \
400 caa_cast_long_keep_sign(v), \
401 sizeof(*(addr))))
402 #endif /* #ifndef uatomic_or */
403
404 #ifndef uatomic_add_return
405 /* uatomic_add_return */
406
407 static inline __attribute__((always_inline))
408 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
409 {
410 switch (len) {
411 #ifdef UATOMIC_HAS_ATOMIC_BYTE
412 case 1:
413 {
414 unsigned char old, oldt;
415
416 oldt = uatomic_read((unsigned char *)addr);
417 do {
418 old = oldt;
419 oldt = uatomic_cmpxchg((unsigned char *)addr,
420 old, old + val);
421 } while (oldt != old);
422
423 return old + val;
424 }
425 #endif
426 #ifdef UATOMIC_HAS_ATOMIC_SHORT
427 case 2:
428 {
429 unsigned short old, oldt;
430
431 oldt = uatomic_read((unsigned short *)addr);
432 do {
433 old = oldt;
434 oldt = uatomic_cmpxchg((unsigned short *)addr,
435 old, old + val);
436 } while (oldt != old);
437
438 return old + val;
439 }
440 #endif
441 case 4:
442 {
443 unsigned int old, oldt;
444
445 oldt = uatomic_read((unsigned int *)addr);
446 do {
447 old = oldt;
448 oldt = uatomic_cmpxchg((unsigned int *)addr,
449 old, old + val);
450 } while (oldt != old);
451
452 return old + val;
453 }
454 #if (CAA_BITS_PER_LONG == 64)
455 case 8:
456 {
457 unsigned long old, oldt;
458
459 oldt = uatomic_read((unsigned long *)addr);
460 do {
461 old = oldt;
462 oldt = uatomic_cmpxchg((unsigned long *)addr,
463 old, old + val);
464 } while (oldt != old);
465
466 return old + val;
467 }
468 #endif
469 }
470 _uatomic_link_error();
471 return 0;
472 }
473
474 #define uatomic_add_return(addr, v) \
475 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
476 caa_cast_long_keep_sign(v), \
477 sizeof(*(addr))))
478 #endif /* #ifndef uatomic_add_return */
479
480 #ifndef uatomic_xchg
481 /* xchg */
482
483 static inline __attribute__((always_inline))
484 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
485 {
486 switch (len) {
487 #ifdef UATOMIC_HAS_ATOMIC_BYTE
488 case 1:
489 {
490 unsigned char old, oldt;
491
492 oldt = uatomic_read((unsigned char *)addr);
493 do {
494 old = oldt;
495 oldt = uatomic_cmpxchg((unsigned char *)addr,
496 old, val);
497 } while (oldt != old);
498
499 return old;
500 }
501 #endif
502 #ifdef UATOMIC_HAS_ATOMIC_SHORT
503 case 2:
504 {
505 unsigned short old, oldt;
506
507 oldt = uatomic_read((unsigned short *)addr);
508 do {
509 old = oldt;
510 oldt = uatomic_cmpxchg((unsigned short *)addr,
511 old, val);
512 } while (oldt != old);
513
514 return old;
515 }
516 #endif
517 case 4:
518 {
519 unsigned int old, oldt;
520
521 oldt = uatomic_read((unsigned int *)addr);
522 do {
523 old = oldt;
524 oldt = uatomic_cmpxchg((unsigned int *)addr,
525 old, val);
526 } while (oldt != old);
527
528 return old;
529 }
530 #if (CAA_BITS_PER_LONG == 64)
531 case 8:
532 {
533 unsigned long old, oldt;
534
535 oldt = uatomic_read((unsigned long *)addr);
536 do {
537 old = oldt;
538 oldt = uatomic_cmpxchg((unsigned long *)addr,
539 old, val);
540 } while (oldt != old);
541
542 return old;
543 }
544 #endif
545 }
546 _uatomic_link_error();
547 return 0;
548 }
549
550 #define uatomic_xchg(addr, v) \
551 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
552 caa_cast_long_keep_sign(v), \
553 sizeof(*(addr))))
554 #endif /* #ifndef uatomic_xchg */
555
556 #endif /* #else #ifndef uatomic_cmpxchg */
557
558 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
559
560 #ifndef uatomic_add
561 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
562 #endif
563
564 #define uatomic_sub_return(addr, v) \
565 uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
566 #define uatomic_sub(addr, v) \
567 uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
568
569 #ifndef uatomic_inc
570 #define uatomic_inc(addr) uatomic_add((addr), 1)
571 #endif
572
573 #ifndef uatomic_dec
574 #define uatomic_dec(addr) uatomic_add((addr), -1)
575 #endif
576
577 #ifdef __cplusplus
578 }
579 #endif
580
581 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.042213 seconds and 5 git commands to generate.