add uatomic_generic.h, use it for common definitions
[urcu.git] / urcu / uatomic_generic.h
1 #ifndef _URCU_UATOMIC_GENERIC_H
2 #define _URCU_UATOMIC_GENERIC_H
3
4 /*
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
9 * Copyright (c) 2010 Paolo Bonzini
10 *
11 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
12 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 *
14 * Permission is hereby granted to use or copy this program
15 * for any purpose, provided the above notices are retained on all copies.
16 * Permission to modify the code and to distribute modified code is granted,
17 * provided the above notices are retained, and a notice that the code was
18 * modified is included with the above copyright notice.
19 *
20 * Code inspired from libuatomic_ops-1.2, inherited in part from the
21 * Boehm-Demers-Weiser conservative garbage collector.
22 */
23
24 #include <urcu/compiler.h>
25 #include <urcu/system.h>
26
27 #ifdef __cplusplus
28 extern "C" {
29 #endif
30
31 #ifndef BITS_PER_LONG
32 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
33 #endif
34
35 #ifndef uatomic_set
36 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
37 #endif
38
39 #ifndef uatomic_read
40 #define uatomic_read(addr) LOAD_SHARED(*(addr))
41 #endif
42
43 #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
44 static inline __attribute__((always_inline))
45 void _uatomic_link_error()
46 {
47 #ifdef ILLEGAL_INSTR
48 /* generate an illegal instruction. Cannot catch this with linker tricks
49 * when optimizations are disabled. */
50 __asm__ __volatile__(ILLEGAL_INSTR);
51 #else
52 __builtin_trap ();
53 #endif
54 }
55
56 #else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
57 extern void _uatomic_link_error ();
58 #endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
59
60 /* cmpxchg */
61
62 #ifndef uatomic_cmpxchg
63 static inline __attribute__((always_inline))
64 unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
65 unsigned long _new, int len)
66 {
67 switch (len) {
68 case 4:
69 return __sync_val_compare_and_swap_4(addr, old, _new);
70 #if (BITS_PER_LONG == 64)
71 case 8:
72 return __sync_val_compare_and_swap_8(addr, old, _new);
73 #endif
74 }
75 _uatomic_link_error();
76 return 0;
77 }
78
79
80 #define uatomic_cmpxchg(addr, old, _new) \
81 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
82 (unsigned long)(_new), \
83 sizeof(*(addr))))
84
85
86 /* uatomic_add_return */
87
88 #ifndef uatomic_add_return
89 static inline __attribute__((always_inline))
90 unsigned long _uatomic_add_return(void *addr, unsigned long val,
91 int len)
92 {
93 switch (len) {
94 case 4:
95 return __sync_add_and_fetch_4(addr, val);
96 #if (BITS_PER_LONG == 64)
97 case 8:
98 return __sync_add_and_fetch_8(addr, val);
99 #endif
100 }
101 _uatomic_link_error();
102 return 0;
103 }
104
105
106 #define uatomic_add_return(addr, v) \
107 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
108 (unsigned long)(v), \
109 sizeof(*(addr))))
110 #endif /* #ifndef uatomic_add_return */
111
112 #ifndef uatomic_xchg
113 /* xchg */
114
115 static inline __attribute__((always_inline))
116 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
117 {
118 switch (len) {
119 case 4:
120 {
121 unsigned int old;
122
123 do {
124 old = uatomic_read((unsigned int *)addr);
125 while (!__sync_bool_compare_and_swap_4(addr, old, val));
126
127 } return old;
128 }
129 #if (BITS_PER_LONG == 64)
130 case 8:
131 {
132 unsigned long old;
133
134 do {
135 old = uatomic_read((unsigned long *)addr);
136 } while (!__sync_bool_compare_and_swap_8(addr, old, val));
137
138 return old;
139 }
140 #endif
141 }
142 _uatomic_link_error();
143 return 0;
144 }
145
146 #define uatomic_xchg(addr, v) \
147 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
148 sizeof(*(addr))))
149 #endif /* #ifndef uatomic_xchg */
150
151 #else /* #ifndef uatomic_cmpxchg */
152
153 #ifndef uatomic_add_return
154 /* uatomic_add_return */
155
156 static inline __attribute__((always_inline))
157 unsigned long _uatomic_add_return(void *addr, unsigned long val, int len)
158 {
159 switch (len) {
160 case 4:
161 {
162 unsigned int old, oldt;
163
164 oldt = uatomic_read((unsigned int *)addr);
165 do {
166 old = oldt;
167 oldt = _uatomic_cmpxchg(addr, old, old + val, 4);
168 } while (oldt != old);
169
170 return old + val;
171 }
172 #if (BITS_PER_LONG == 64)
173 case 8:
174 {
175 unsigned long old, oldt;
176
177 oldt = uatomic_read((unsigned long *)addr);
178 do {
179 old = oldt;
180 oldt = _uatomic_cmpxchg(addr, old, old + val, 8);
181 } while (oldt != old);
182
183 return old + val;
184 }
185 #endif
186 }
187 _uatomic_link_error();
188 return 0;
189 }
190
191 #define uatomic_add_return(addr, v) \
192 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
193 (unsigned long)(v), \
194 sizeof(*(addr))))
195 #endif /* #ifndef uatomic_add_return */
196
197 #ifndef uatomic_xchg
198 /* xchg */
199
200 static inline __attribute__((always_inline))
201 unsigned long _uatomic_exchange(void *addr, unsigned long val, int len)
202 {
203 switch (len) {
204 case 4:
205 {
206 unsigned int old, oldt;
207
208 oldt = uatomic_read((unsigned int *)addr);
209 do {
210 old = oldt;
211 oldt = _uatomic_cmpxchg(addr, old, val, 4);
212 } while (oldt != old);
213
214 return old;
215 }
216 #if (BITS_PER_LONG == 64)
217 case 8:
218 {
219 unsigned long old, oldt;
220
221 oldt = uatomic_read((unsigned long *)addr);
222 do {
223 old = oldt;
224 oldt = _uatomic_cmpxchg(addr, old, val, 8);
225 } while (oldt != old);
226
227 return old;
228 }
229 #endif
230 }
231 _uatomic_link_error();
232 return 0;
233 }
234
235 #define uatomic_xchg(addr, v) \
236 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
237 sizeof(*(addr))))
238 #endif /* #ifndef uatomic_xchg */
239
240 #endif /* #else #ifndef uatomic_cmpxchg */
241
242 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
243
244 #ifndef uatomic_add
245 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
246 #endif
247
248 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
249 #define uatomic_sub(addr, v) uatomic_add((addr), -(v))
250
251 #ifndef uatomic_inc
252 #define uatomic_inc(addr) uatomic_add((addr), 1)
253 #endif
254
255 #ifndef uatomic_dec
256 #define uatomic_dec(addr) uatomic_add((addr), -1)
257 #endif
258
259 #ifdef __cplusplus
260 }
261 #endif
262
263 #endif /* _URCU_UATOMIC_GENERIC_H */
This page took 0.035702 seconds and 4 git commands to generate.