Commit | Line | Data |
---|---|---|
ec4e58a3 MD |
1 | #ifndef _URCU_ARCH_UATOMIC_X86_H |
2 | #define _URCU_ARCH_UATOMIC_X86_H | |
0114ba7f MD |
3 | |
4 | /* | |
5 | * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved. | |
6 | * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved. | |
7 | * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P. | |
8 | * Copyright (c) 2009 Mathieu Desnoyers | |
9 | * | |
10 | * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED | |
11 | * OR IMPLIED. ANY USE IS AT YOUR OWN RISK. | |
12 | * | |
13 | * Permission is hereby granted to use or copy this program | |
14 | * for any purpose, provided the above notices are retained on all copies. | |
15 | * Permission to modify the code and to distribute modified code is granted, | |
16 | * provided the above notices are retained, and a notice that the code was | |
17 | * modified is included with the above copyright notice. | |
18 | * | |
ec4e58a3 | 19 | * Code inspired from libuatomic_ops-1.2, inherited in part from the |
0114ba7f MD |
20 | * Boehm-Demers-Weiser conservative garbage collector. |
21 | */ | |
22 | ||
ec4e58a3 | 23 | #include <urcu/compiler.h> |
bf9de1b7 | 24 | #include <urcu/system.h> |
0fad128b | 25 | |
f469d839 PB |
26 | #define UATOMIC_HAS_ATOMIC_BYTE |
27 | #define UATOMIC_HAS_ATOMIC_SHORT | |
28 | ||
36bc70a8 MD |
29 | #ifdef __cplusplus |
30 | extern "C" { | |
31 | #endif | |
32 | ||
0114ba7f | 33 | /* |
0114ba7f MD |
34 | * Derived from AO_compare_and_swap() and AO_test_and_set_full(). |
35 | */ | |
36 | ||
ec4e58a3 | 37 | struct __uatomic_dummy { |
cc1be41b MD |
38 | unsigned long v[10]; |
39 | }; | |
ec4e58a3 | 40 | #define __hp(x) ((struct __uatomic_dummy *)(x)) |
cc1be41b | 41 | |
6cf3827c | 42 | #define _uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v)) |
0fad128b | 43 | |
cc1be41b MD |
44 | /* cmpxchg */ |
45 | ||
5dba80f9 | 46 | static inline __attribute__((always_inline)) |
bf9de1b7 | 47 | unsigned long __uatomic_cmpxchg(void *addr, unsigned long old, |
0fad128b | 48 | unsigned long _new, int len) |
0114ba7f | 49 | { |
cc1be41b MD |
50 | switch (len) { |
51 | case 1: | |
52 | { | |
53 | unsigned char result = old; | |
0fad128b | 54 | |
cc1be41b MD |
55 | __asm__ __volatile__( |
56 | "lock; cmpxchgb %2, %1" | |
57 | : "+a"(result), "+m"(*__hp(addr)) | |
58 | : "q"((unsigned char)_new) | |
0114ba7f | 59 | : "memory"); |
cc1be41b MD |
60 | return result; |
61 | } | |
62 | case 2: | |
63 | { | |
64 | unsigned short result = old; | |
0fad128b | 65 | |
cc1be41b MD |
66 | __asm__ __volatile__( |
67 | "lock; cmpxchgw %2, %1" | |
68 | : "+a"(result), "+m"(*__hp(addr)) | |
69 | : "r"((unsigned short)_new) | |
70 | : "memory"); | |
71 | return result; | |
72 | } | |
73 | case 4: | |
74 | { | |
75 | unsigned int result = old; | |
0fad128b | 76 | |
cc1be41b MD |
77 | __asm__ __volatile__( |
78 | "lock; cmpxchgl %2, %1" | |
79 | : "+a"(result), "+m"(*__hp(addr)) | |
80 | : "r"((unsigned int)_new) | |
81 | : "memory"); | |
82 | return result; | |
83 | } | |
e040d717 | 84 | #if (CAA_BITS_PER_LONG == 64) |
cc1be41b MD |
85 | case 8: |
86 | { | |
6edb297e | 87 | unsigned long result = old; |
0fad128b | 88 | |
cc1be41b | 89 | __asm__ __volatile__( |
2c5e5fb3 | 90 | "lock; cmpxchgq %2, %1" |
cc1be41b MD |
91 | : "+a"(result), "+m"(*__hp(addr)) |
92 | : "r"((unsigned long)_new) | |
93 | : "memory"); | |
94 | return result; | |
95 | } | |
96 | #endif | |
97 | } | |
98 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
99 | * when optimizations are disabled. */ | |
100 | __asm__ __volatile__("ud2"); | |
101 | return 0; | |
0114ba7f MD |
102 | } |
103 | ||
bf9de1b7 MD |
104 | #define _uatomic_cmpxchg(addr, old, _new) \ |
105 | ((__typeof__(*(addr))) __uatomic_cmpxchg((addr), (unsigned long)(old),\ | |
106 | (unsigned long)(_new), \ | |
cc1be41b MD |
107 | sizeof(*(addr)))) |
108 | ||
109 | /* xchg */ | |
0114ba7f | 110 | |
5dba80f9 | 111 | static inline __attribute__((always_inline)) |
bf9de1b7 | 112 | unsigned long __uatomic_exchange(void *addr, unsigned long val, int len) |
0114ba7f | 113 | { |
cc1be41b MD |
114 | /* Note: the "xchg" instruction does not need a "lock" prefix. */ |
115 | switch (len) { | |
116 | case 1: | |
117 | { | |
118 | unsigned char result; | |
119 | __asm__ __volatile__( | |
120 | "xchgb %0, %1" | |
121 | : "=q"(result), "+m"(*__hp(addr)) | |
122 | : "0" ((unsigned char)val) | |
123 | : "memory"); | |
124 | return result; | |
125 | } | |
126 | case 2: | |
127 | { | |
128 | unsigned short result; | |
129 | __asm__ __volatile__( | |
130 | "xchgw %0, %1" | |
131 | : "=r"(result), "+m"(*__hp(addr)) | |
132 | : "0" ((unsigned short)val) | |
133 | : "memory"); | |
134 | return result; | |
135 | } | |
136 | case 4: | |
137 | { | |
138 | unsigned int result; | |
139 | __asm__ __volatile__( | |
140 | "xchgl %0, %1" | |
141 | : "=r"(result), "+m"(*__hp(addr)) | |
142 | : "0" ((unsigned int)val) | |
143 | : "memory"); | |
144 | return result; | |
145 | } | |
e040d717 | 146 | #if (CAA_BITS_PER_LONG == 64) |
cc1be41b MD |
147 | case 8: |
148 | { | |
149 | unsigned long result; | |
150 | __asm__ __volatile__( | |
0114ba7f | 151 | "xchgq %0, %1" |
cc1be41b MD |
152 | : "=r"(result), "+m"(*__hp(addr)) |
153 | : "0" ((unsigned long)val) | |
0114ba7f | 154 | : "memory"); |
cc1be41b MD |
155 | return result; |
156 | } | |
157 | #endif | |
158 | } | |
159 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
160 | * when optimizations are disabled. */ | |
161 | __asm__ __volatile__("ud2"); | |
162 | return 0; | |
0114ba7f MD |
163 | } |
164 | ||
bf9de1b7 MD |
165 | #define _uatomic_xchg(addr, v) \ |
166 | ((__typeof__(*(addr))) __uatomic_exchange((addr), (unsigned long)(v), \ | |
cc1be41b MD |
167 | sizeof(*(addr)))) |
168 | ||
8760d94e | 169 | /* uatomic_add_return */ |
0fad128b MD |
170 | |
171 | static inline __attribute__((always_inline)) | |
bf9de1b7 | 172 | unsigned long __uatomic_add_return(void *addr, unsigned long val, |
0fad128b MD |
173 | int len) |
174 | { | |
175 | switch (len) { | |
176 | case 1: | |
177 | { | |
178 | unsigned char result = val; | |
179 | ||
180 | __asm__ __volatile__( | |
181 | "lock; xaddb %1, %0" | |
182 | : "+m"(*__hp(addr)), "+q" (result) | |
183 | : | |
184 | : "memory"); | |
185 | return result + (unsigned char)val; | |
186 | } | |
187 | case 2: | |
188 | { | |
189 | unsigned short result = val; | |
190 | ||
191 | __asm__ __volatile__( | |
192 | "lock; xaddw %1, %0" | |
193 | : "+m"(*__hp(addr)), "+r" (result) | |
194 | : | |
195 | : "memory"); | |
196 | return result + (unsigned short)val; | |
197 | } | |
198 | case 4: | |
199 | { | |
200 | unsigned int result = val; | |
201 | ||
202 | __asm__ __volatile__( | |
203 | "lock; xaddl %1, %0" | |
204 | : "+m"(*__hp(addr)), "+r" (result) | |
205 | : | |
206 | : "memory"); | |
207 | return result + (unsigned int)val; | |
208 | } | |
e040d717 | 209 | #if (CAA_BITS_PER_LONG == 64) |
0fad128b MD |
210 | case 8: |
211 | { | |
212 | unsigned long result = val; | |
213 | ||
214 | __asm__ __volatile__( | |
215 | "lock; xaddq %1, %0" | |
216 | : "+m"(*__hp(addr)), "+r" (result) | |
217 | : | |
218 | : "memory"); | |
219 | return result + (unsigned long)val; | |
220 | } | |
221 | #endif | |
222 | } | |
223 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
224 | * when optimizations are disabled. */ | |
225 | __asm__ __volatile__("ud2"); | |
226 | return 0; | |
227 | } | |
228 | ||
bf9de1b7 MD |
229 | #define _uatomic_add_return(addr, v) \ |
230 | ((__typeof__(*(addr))) __uatomic_add_return((addr), \ | |
0fad128b MD |
231 | (unsigned long)(v), \ |
232 | sizeof(*(addr)))) | |
233 | ||
bf33aaea PB |
234 | /* uatomic_and */ |
235 | ||
236 | static inline __attribute__((always_inline)) | |
237 | void __uatomic_and(void *addr, unsigned long val, int len) | |
238 | { | |
239 | switch (len) { | |
240 | case 1: | |
241 | { | |
242 | __asm__ __volatile__( | |
243 | "lock; andb %1, %0" | |
244 | : "=m"(*__hp(addr)) | |
245 | : "iq" ((unsigned char)val) | |
246 | : "memory"); | |
247 | return; | |
248 | } | |
249 | case 2: | |
250 | { | |
251 | __asm__ __volatile__( | |
252 | "lock; andw %1, %0" | |
253 | : "=m"(*__hp(addr)) | |
254 | : "ir" ((unsigned short)val) | |
255 | : "memory"); | |
256 | return; | |
257 | } | |
258 | case 4: | |
259 | { | |
260 | __asm__ __volatile__( | |
261 | "lock; andl %1, %0" | |
262 | : "=m"(*__hp(addr)) | |
263 | : "ir" ((unsigned int)val) | |
264 | : "memory"); | |
265 | return; | |
266 | } | |
267 | #if (CAA_BITS_PER_LONG == 64) | |
268 | case 8: | |
269 | { | |
270 | __asm__ __volatile__( | |
271 | "lock; andq %1, %0" | |
272 | : "=m"(*__hp(addr)) | |
273 | : "er" ((unsigned long)val) | |
274 | : "memory"); | |
275 | return; | |
276 | } | |
277 | #endif | |
278 | } | |
279 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
280 | * when optimizations are disabled. */ | |
281 | __asm__ __volatile__("ud2"); | |
282 | return; | |
283 | } | |
284 | ||
285 | #define _uatomic_and(addr, v) \ | |
286 | (__uatomic_and((addr), (unsigned long)(v), sizeof(*(addr)))) | |
287 | ||
985b35b1 PB |
288 | /* uatomic_or */ |
289 | ||
290 | static inline __attribute__((always_inline)) | |
291 | void __uatomic_or(void *addr, unsigned long val, int len) | |
292 | { | |
293 | switch (len) { | |
294 | case 1: | |
295 | { | |
296 | __asm__ __volatile__( | |
297 | "lock; orb %1, %0" | |
298 | : "=m"(*__hp(addr)) | |
299 | : "iq" ((unsigned char)val) | |
300 | : "memory"); | |
301 | return; | |
302 | } | |
303 | case 2: | |
304 | { | |
305 | __asm__ __volatile__( | |
306 | "lock; orw %1, %0" | |
307 | : "=m"(*__hp(addr)) | |
308 | : "ir" ((unsigned short)val) | |
309 | : "memory"); | |
310 | return; | |
311 | } | |
312 | case 4: | |
313 | { | |
314 | __asm__ __volatile__( | |
315 | "lock; orl %1, %0" | |
316 | : "=m"(*__hp(addr)) | |
317 | : "ir" ((unsigned int)val) | |
318 | : "memory"); | |
319 | return; | |
320 | } | |
321 | #if (CAA_BITS_PER_LONG == 64) | |
322 | case 8: | |
323 | { | |
324 | __asm__ __volatile__( | |
325 | "lock; orq %1, %0" | |
326 | : "=m"(*__hp(addr)) | |
327 | : "er" ((unsigned long)val) | |
328 | : "memory"); | |
329 | return; | |
330 | } | |
331 | #endif | |
332 | } | |
333 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
334 | * when optimizations are disabled. */ | |
335 | __asm__ __volatile__("ud2"); | |
336 | return; | |
337 | } | |
338 | ||
339 | #define _uatomic_or(addr, v) \ | |
340 | (__uatomic_or((addr), (unsigned long)(v), sizeof(*(addr)))) | |
341 | ||
8760d94e | 342 | /* uatomic_add */ |
0114ba7f | 343 | |
5dba80f9 | 344 | static inline __attribute__((always_inline)) |
bf9de1b7 | 345 | void __uatomic_add(void *addr, unsigned long val, int len) |
0114ba7f MD |
346 | { |
347 | switch (len) { | |
cc1be41b MD |
348 | case 1: |
349 | { | |
350 | __asm__ __volatile__( | |
351 | "lock; addb %1, %0" | |
352 | : "=m"(*__hp(addr)) | |
87322fe8 MD |
353 | : "iq" ((unsigned char)val) |
354 | : "memory"); | |
cc1be41b MD |
355 | return; |
356 | } | |
357 | case 2: | |
358 | { | |
359 | __asm__ __volatile__( | |
360 | "lock; addw %1, %0" | |
361 | : "=m"(*__hp(addr)) | |
87322fe8 MD |
362 | : "ir" ((unsigned short)val) |
363 | : "memory"); | |
cc1be41b MD |
364 | return; |
365 | } | |
366 | case 4: | |
367 | { | |
368 | __asm__ __volatile__( | |
369 | "lock; addl %1, %0" | |
370 | : "=m"(*__hp(addr)) | |
87322fe8 MD |
371 | : "ir" ((unsigned int)val) |
372 | : "memory"); | |
cc1be41b MD |
373 | return; |
374 | } | |
e040d717 | 375 | #if (CAA_BITS_PER_LONG == 64) |
cc1be41b MD |
376 | case 8: |
377 | { | |
378 | __asm__ __volatile__( | |
379 | "lock; addq %1, %0" | |
380 | : "=m"(*__hp(addr)) | |
87322fe8 MD |
381 | : "er" ((unsigned long)val) |
382 | : "memory"); | |
cc1be41b MD |
383 | return; |
384 | } | |
0114ba7f MD |
385 | #endif |
386 | } | |
387 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
388 | * when optimizations are disabled. */ | |
389 | __asm__ __volatile__("ud2"); | |
a81b8e5e | 390 | return; |
0114ba7f MD |
391 | } |
392 | ||
bf9de1b7 MD |
393 | #define _uatomic_add(addr, v) \ |
394 | (__uatomic_add((addr), (unsigned long)(v), sizeof(*(addr)))) | |
0114ba7f | 395 | |
2c5e5fb3 | 396 | |
ec4e58a3 | 397 | /* uatomic_inc */ |
2c5e5fb3 MD |
398 | |
399 | static inline __attribute__((always_inline)) | |
bf9de1b7 | 400 | void __uatomic_inc(void *addr, int len) |
2c5e5fb3 MD |
401 | { |
402 | switch (len) { | |
403 | case 1: | |
404 | { | |
405 | __asm__ __volatile__( | |
406 | "lock; incb %0" | |
407 | : "=m"(*__hp(addr)) | |
408 | : | |
409 | : "memory"); | |
410 | return; | |
411 | } | |
412 | case 2: | |
413 | { | |
414 | __asm__ __volatile__( | |
415 | "lock; incw %0" | |
416 | : "=m"(*__hp(addr)) | |
417 | : | |
418 | : "memory"); | |
419 | return; | |
420 | } | |
421 | case 4: | |
422 | { | |
423 | __asm__ __volatile__( | |
424 | "lock; incl %0" | |
425 | : "=m"(*__hp(addr)) | |
426 | : | |
427 | : "memory"); | |
428 | return; | |
429 | } | |
e040d717 | 430 | #if (CAA_BITS_PER_LONG == 64) |
2c5e5fb3 MD |
431 | case 8: |
432 | { | |
433 | __asm__ __volatile__( | |
434 | "lock; incq %0" | |
435 | : "=m"(*__hp(addr)) | |
436 | : | |
437 | : "memory"); | |
438 | return; | |
439 | } | |
440 | #endif | |
441 | } | |
442 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
443 | * when optimizations are disabled. */ | |
444 | __asm__ __volatile__("ud2"); | |
445 | return; | |
446 | } | |
447 | ||
bf9de1b7 | 448 | #define _uatomic_inc(addr) (__uatomic_inc((addr), sizeof(*(addr)))) |
2c5e5fb3 | 449 | |
ec4e58a3 | 450 | /* uatomic_dec */ |
2c5e5fb3 MD |
451 | |
452 | static inline __attribute__((always_inline)) | |
bf9de1b7 | 453 | void __uatomic_dec(void *addr, int len) |
2c5e5fb3 MD |
454 | { |
455 | switch (len) { | |
456 | case 1: | |
457 | { | |
458 | __asm__ __volatile__( | |
459 | "lock; decb %0" | |
460 | : "=m"(*__hp(addr)) | |
461 | : | |
462 | : "memory"); | |
463 | return; | |
464 | } | |
465 | case 2: | |
466 | { | |
467 | __asm__ __volatile__( | |
468 | "lock; decw %0" | |
469 | : "=m"(*__hp(addr)) | |
470 | : | |
471 | : "memory"); | |
472 | return; | |
473 | } | |
474 | case 4: | |
475 | { | |
476 | __asm__ __volatile__( | |
477 | "lock; decl %0" | |
478 | : "=m"(*__hp(addr)) | |
479 | : | |
480 | : "memory"); | |
481 | return; | |
482 | } | |
e040d717 | 483 | #if (CAA_BITS_PER_LONG == 64) |
2c5e5fb3 MD |
484 | case 8: |
485 | { | |
486 | __asm__ __volatile__( | |
487 | "lock; decq %0" | |
488 | : "=m"(*__hp(addr)) | |
489 | : | |
490 | : "memory"); | |
491 | return; | |
492 | } | |
493 | #endif | |
494 | } | |
495 | /* generate an illegal instruction. Cannot catch this with linker tricks | |
496 | * when optimizations are disabled. */ | |
497 | __asm__ __volatile__("ud2"); | |
498 | return; | |
499 | } | |
500 | ||
bf9de1b7 | 501 | #define _uatomic_dec(addr) (__uatomic_dec((addr), sizeof(*(addr)))) |
0114ba7f | 502 | |
e040d717 | 503 | #if ((CAA_BITS_PER_LONG != 64) && defined(CONFIG_RCU_COMPAT_ARCH)) |
02be5561 MD |
504 | extern int __rcu_cas_avail; |
505 | extern int __rcu_cas_init(void); | |
bf9de1b7 MD |
506 | |
507 | #define UATOMIC_COMPAT(insn) \ | |
a0b7f7ea | 508 | ((caa_likely(__rcu_cas_avail > 0)) \ |
bf9de1b7 | 509 | ? (_uatomic_##insn) \ |
a0b7f7ea | 510 | : ((caa_unlikely(__rcu_cas_avail < 0) \ |
02be5561 | 511 | ? ((__rcu_cas_init() > 0) \ |
bf9de1b7 MD |
512 | ? (_uatomic_##insn) \ |
513 | : (compat_uatomic_##insn)) \ | |
514 | : (compat_uatomic_##insn)))) | |
515 | ||
516 | extern unsigned long _compat_uatomic_set(void *addr, | |
517 | unsigned long _new, int len); | |
518 | #define compat_uatomic_set(addr, _new) \ | |
519 | ((__typeof__(*(addr))) _compat_uatomic_set((addr), \ | |
520 | (unsigned long)(_new), \ | |
521 | sizeof(*(addr)))) | |
522 | ||
523 | ||
524 | extern unsigned long _compat_uatomic_xchg(void *addr, | |
525 | unsigned long _new, int len); | |
526 | #define compat_uatomic_xchg(addr, _new) \ | |
527 | ((__typeof__(*(addr))) _compat_uatomic_xchg((addr), \ | |
528 | (unsigned long)(_new), \ | |
529 | sizeof(*(addr)))) | |
7d413817 MD |
530 | |
531 | extern unsigned long _compat_uatomic_cmpxchg(void *addr, unsigned long old, | |
bf9de1b7 MD |
532 | unsigned long _new, int len); |
533 | #define compat_uatomic_cmpxchg(addr, old, _new) \ | |
534 | ((__typeof__(*(addr))) _compat_uatomic_cmpxchg((addr), \ | |
535 | (unsigned long)(old), \ | |
536 | (unsigned long)(_new), \ | |
537 | sizeof(*(addr)))) | |
7d413817 | 538 | |
8c43fe72 | 539 | extern void _compat_uatomic_and(void *addr, unsigned long _new, int len); |
bf33aaea | 540 | #define compat_uatomic_and(addr, v) \ |
8c43fe72 MD |
541 | (_compat_uatomic_and((addr), \ |
542 | (unsigned long)(v), \ | |
543 | sizeof(*(addr)))) | |
bf33aaea | 544 | |
8c43fe72 | 545 | extern void _compat_uatomic_or(void *addr, unsigned long _new, int len); |
985b35b1 | 546 | #define compat_uatomic_or(addr, v) \ |
8c43fe72 MD |
547 | (_compat_uatomic_or((addr), \ |
548 | (unsigned long)(v), \ | |
549 | sizeof(*(addr)))) | |
985b35b1 | 550 | |
28ca843d PB |
551 | extern unsigned long _compat_uatomic_add_return(void *addr, |
552 | unsigned long _new, int len); | |
8c43fe72 MD |
553 | #define compat_uatomic_add_return(addr, v) \ |
554 | ((__typeof__(*(addr))) _compat_uatomic_add_return((addr), \ | |
555 | (unsigned long)(v), \ | |
556 | sizeof(*(addr)))) | |
bf9de1b7 | 557 | |
bf9de1b7 MD |
558 | #define compat_uatomic_add(addr, v) \ |
559 | ((void)compat_uatomic_add_return((addr), (v))) | |
bf9de1b7 MD |
560 | #define compat_uatomic_inc(addr) \ |
561 | (compat_uatomic_add((addr), 1)) | |
562 | #define compat_uatomic_dec(addr) \ | |
8760d94e | 563 | (compat_uatomic_add((addr), -1)) |
bf9de1b7 MD |
564 | |
565 | #else | |
566 | #define UATOMIC_COMPAT(insn) (_uatomic_##insn) | |
7d413817 MD |
567 | #endif |
568 | ||
bf9de1b7 | 569 | /* Read is atomic even in compat mode */ |
bf9de1b7 MD |
570 | #define uatomic_set(addr, v) \ |
571 | UATOMIC_COMPAT(set(addr, v)) | |
8760d94e | 572 | |
bf9de1b7 MD |
573 | #define uatomic_cmpxchg(addr, old, _new) \ |
574 | UATOMIC_COMPAT(cmpxchg(addr, old, _new)) | |
575 | #define uatomic_xchg(addr, v) \ | |
576 | UATOMIC_COMPAT(xchg(addr, v)) | |
bf33aaea PB |
577 | #define uatomic_and(addr, v) \ |
578 | UATOMIC_COMPAT(and(addr, v)) | |
985b35b1 PB |
579 | #define uatomic_or(addr, v) \ |
580 | UATOMIC_COMPAT(or(addr, v)) | |
bf9de1b7 MD |
581 | #define uatomic_add_return(addr, v) \ |
582 | UATOMIC_COMPAT(add_return(addr, v)) | |
8760d94e | 583 | |
bf9de1b7 | 584 | #define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v)) |
bf9de1b7 MD |
585 | #define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr)) |
586 | #define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr)) | |
587 | ||
36bc70a8 MD |
588 | #ifdef __cplusplus |
589 | } | |
590 | #endif | |
591 | ||
a2e7bf9c | 592 | #include <urcu/uatomic/generic.h> |
8760d94e | 593 | |
ec4e58a3 | 594 | #endif /* _URCU_ARCH_UATOMIC_X86_H */ |