sparc,ppc,s390: uatomic ops update
[urcu.git] / urcu / uatomic_arch_sparc64.h
1 #ifndef _URCU_ARCH_UATOMIC_SPARC64_H
2 #define _URCU_ARCH_UATOMIC_SPARC64_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2003 by Hewlett-Packard Company. All rights reserved.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 *
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
12 *
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
18 *
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
21 */
22
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
25
26 #ifndef __SIZEOF_LONG__
27 #ifdef __LP64__
28 #define __SIZEOF_LONG__ 8
29 #else
30 #define __SIZEOF_LONG__ 4
31 #endif
32 #endif
33
34 #ifndef BITS_PER_LONG
35 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
36 #endif
37
38 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
39 #define uatomic_read(addr) LOAD_SHARED(*(addr))
40
41 /* cmpxchg */
42
43 static inline __attribute__((always_inline))
44 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
45 unsigned long _new, int len)
46 {
47 switch (len) {
48 case 4:
49 {
50 __asm__ __volatile__ (
51 "membar #StoreLoad | #LoadLoad\n\t"
52 "cas [%1],%2,%0\n\t"
53 "membar #StoreLoad | #StoreStore\n\t"
54 : "+&r" (_new)
55 : "r" (addr), "r" (old)
56 : "memory");
57
58 return _new;
59 }
60 #if (BITS_PER_LONG == 64)
61 case 8:
62 {
63 __asm__ __volatile__ (
64 "membar #StoreLoad | #LoadLoad\n\t"
65 "casx [%1],%2,%0\n\t"
66 "membar #StoreLoad | #StoreStore\n\t"
67 : "+&r" (_new)
68 : "r" (addr), "r" (old)
69 : "memory");
70
71 return _new;
72 }
73 #endif
74 }
75 __builtin_trap();
76 return 0;
77 }
78
79
80 #define uatomic_cmpxchg(addr, old, _new) \
81 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
82 (unsigned long)(_new), \
83 sizeof(*(addr))))
84
85 /* xchg */
86
87 static inline __attribute__((always_inline))
88 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
89 {
90 switch (len) {
91 case 4:
92 {
93 unsigned int old, oldt;
94
95 oldt = uatomic_read((unsigned int *)addr);
96 do {
97 old = oldt;
98 oldt = _uatomic_cmpxchg(addr, old, val, 4);
99 } while (oldt != old);
100
101 return old;
102 }
103 #if (BITS_PER_LONG == 64)
104 case 8:
105 {
106 unsigned long old, oldt;
107
108 oldt = uatomic_read((unsigned long *)addr);
109 do {
110 old = oldt;
111 oldt = _uatomic_cmpxchg(addr, old, val, 8);
112 } while (oldt != old);
113
114 return old;
115 }
116 #endif
117 }
118 __builtin_trap();
119 return 0;
120 }
121
122 #define uatomic_xchg(addr, v) \
123 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
124 sizeof(*(addr))))
125
126 /* uatomic_add_return */
127
128 static inline __attribute__((always_inline))
129 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
130 {
131 switch (len) {
132 case 4:
133 {
134 unsigned int old, oldt;
135
136 oldt = uatomic_read((unsigned int *)addr);
137 do {
138 old = oldt;
139 oldt = _uatomic_cmpxchg(addr, old, old + val, 4);
140 } while (oldt != old);
141
142 return old + val;
143 }
144 #if (BITS_PER_LONG == 64)
145 case 8:
146 {
147 unsigned long old, oldt;
148
149 oldt = uatomic_read((unsigned long *)addr);
150 do {
151 old = oldt;
152 oldt = _uatomic_cmpxchg(addr, old, old + val, 8);
153 } while (oldt != old);
154
155 return old + val;
156 }
157 #endif
158 }
159 __builtin_trap();
160 return 0;
161 }
162
163 #define uatomic_add_return(addr, v) \
164 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
165 (unsigned long)(v), \
166 sizeof(*(addr))))
167
168 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
169
170 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
171
172 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
173 #define uatomic_sub(addr, v) (void)uatomic_sub_return((addr), (v))
174
175 #define uatomic_inc(addr) uatomic_add((addr), 1)
176 #define uatomic_dec(addr) uatomic_add((addr), -1)
177
178 #define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new)
179
180 #endif /* _URCU_ARCH_UATOMIC_PPC_H */
This page took 0.034292 seconds and 4 git commands to generate.