1 #ifndef _URCU_ARCH_UATOMIC_PPC_H
2 #define _URCU_ARCH_UATOMIC_PPC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
26 #ifndef __SIZEOF_LONG__
28 #define __SIZEOF_LONG__ 8
30 #define __SIZEOF_LONG__ 4
35 #define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
38 #define ILLEGAL_INSTR ".long 0xd00d00"
40 #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v))
41 #define uatomic_read(addr) LOAD_SHARED(*(addr))
44 * Using a isync as second barrier for exchange to provide acquire semantic.
45 * According to uatomic_ops/sysdeps/gcc/powerpc.h, the documentation is "fairly
46 * explicit that this also has acquire semantics."
47 * Derived from AO_compare_and_swap(), but removed the comparison.
52 static inline __attribute__((always_inline
))
53 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
62 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
63 "stwcx. %2,0,%1\n" /* else store conditional */
64 "bne- 1b\n" /* retry if lost reservation */
72 #if (BITS_PER_LONG == 64)
79 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
80 "stdcx. %2,0,%1\n" /* else store conditional */
81 "bne- 1b\n" /* retry if lost reservation */
91 /* generate an illegal instruction. Cannot catch this with linker tricks
92 * when optimizations are disabled. */
93 __asm__
__volatile__(ILLEGAL_INSTR
);
97 #define uatomic_xchg(addr, v) \
98 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
102 static inline __attribute__((always_inline
))
103 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
104 unsigned long _new
, int len
)
109 unsigned int old_val
;
111 __asm__
__volatile__(
113 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
114 "cmpd %0,%3\n" /* if load is not equal to */
115 "bne 2f\n" /* old, fail */
116 "stwcx. %2,0,%1\n" /* else store conditional */
117 "bne- 1b\n" /* retry if lost reservation */
121 : "r"(addr
), "r"((unsigned int)_new
),
122 "r"((unsigned int)old
)
127 #if (BITS_PER_LONG == 64)
130 unsigned long old_val
;
132 __asm__
__volatile__(
134 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
135 "cmpd %0,%3\n" /* if load is not equal to */
136 "bne 2f\n" /* old, fail */
137 "stdcx. %2,0,%1\n" /* else store conditional */
138 "bne- 1b\n" /* retry if lost reservation */
142 : "r"(addr
), "r"((unsigned long)_new
),
143 "r"((unsigned long)old
)
150 /* generate an illegal instruction. Cannot catch this with linker tricks
151 * when optimizations are disabled. */
152 __asm__
__volatile__(ILLEGAL_INSTR
);
157 #define uatomic_cmpxchg(addr, old, _new) \
158 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
159 (unsigned long)(_new), \
162 /* uatomic_add_return */
164 static inline __attribute__((always_inline
))
165 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
173 __asm__
__volatile__(
175 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
176 "add %0,%2,%0\n" /* add val to value loaded */
177 "stwcx. %0,0,%1\n" /* store conditional */
178 "bne- 1b\n" /* retry if lost reservation */
181 : "r"(addr
), "r"(val
)
186 #if (BITS_PER_LONG == 64)
189 unsigned long result
;
191 __asm__
__volatile__(
193 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
194 "add %0,%2,%0\n" /* add val to value loaded */
195 "stdcx. %0,0,%1\n" /* store conditional */
196 "bne- 1b\n" /* retry if lost reservation */
199 : "r"(addr
), "r"(val
)
206 /* generate an illegal instruction. Cannot catch this with linker tricks
207 * when optimizations are disabled. */
208 __asm__
__volatile__(ILLEGAL_INSTR
);
213 #define uatomic_add_return(addr, v) \
214 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
215 (unsigned long)(v), \
218 /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */
220 #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
222 #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
223 #define uatomic_sub(addr, v) (void)uatomic_sub_return((addr), (v))
225 #define uatomic_inc(addr) uatomic_add((addr), 1)
226 #define uatomic_dec(addr) uatomic_add((addr), -1)
228 #define URCU_CAS_AVAIL() 1
229 #define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new)
231 #endif /* _URCU_ARCH_UATOMIC_PPC_H */
This page took 0.035719 seconds and 4 git commands to generate.