1 #ifndef _URCU_ARCH_UATOMIC_PPC_H
2 #define _URCU_ARCH_UATOMIC_PPC_H
5 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
6 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
7 * Copyright (c) 1999-2004 Hewlett-Packard Development Company, L.P.
8 * Copyright (c) 2009 Mathieu Desnoyers
10 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
11 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
13 * Permission is hereby granted to use or copy this program
14 * for any purpose, provided the above notices are retained on all copies.
15 * Permission to modify the code and to distribute modified code is granted,
16 * provided the above notices are retained, and a notice that the code was
17 * modified is included with the above copyright notice.
19 * Code inspired from libuatomic_ops-1.2, inherited in part from the
20 * Boehm-Demers-Weiser conservative garbage collector.
23 #include <urcu/compiler.h>
24 #include <urcu/system.h>
31 #define LWSYNC_OPCODE "sync\n"
33 #define LWSYNC_OPCODE "lwsync\n"
36 #define ILLEGAL_INSTR ".long 0xd00d00"
39 * Using a isync as second barrier for exchange to provide acquire semantic.
40 * According to uatomic_ops/sysdeps/gcc/powerpc.h, the documentation is "fairly
41 * explicit that this also has acquire semantics."
42 * Derived from AO_compare_and_swap(), but removed the comparison.
47 static inline __attribute__((always_inline
))
48 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
57 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
58 "stwcx. %2,0,%1\n" /* else store conditional */
59 "bne- 1b\n" /* retry if lost reservation */
67 #if (BITS_PER_LONG == 64)
74 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
75 "stdcx. %2,0,%1\n" /* else store conditional */
76 "bne- 1b\n" /* retry if lost reservation */
86 /* generate an illegal instruction. Cannot catch this with linker tricks
87 * when optimizations are disabled. */
88 __asm__
__volatile__(ILLEGAL_INSTR
);
92 #define uatomic_xchg(addr, v) \
93 ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
97 static inline __attribute__((always_inline
))
98 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
99 unsigned long _new
, int len
)
104 unsigned int old_val
;
106 __asm__
__volatile__(
108 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
109 "cmpw %0,%3\n" /* if load is not equal to */
110 "bne 2f\n" /* old, fail */
111 "stwcx. %2,0,%1\n" /* else store conditional */
112 "bne- 1b\n" /* retry if lost reservation */
116 : "r"(addr
), "r"((unsigned int)_new
),
117 "r"((unsigned int)old
)
122 #if (BITS_PER_LONG == 64)
125 unsigned long old_val
;
127 __asm__
__volatile__(
129 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
130 "cmpd %0,%3\n" /* if load is not equal to */
131 "bne 2f\n" /* old, fail */
132 "stdcx. %2,0,%1\n" /* else store conditional */
133 "bne- 1b\n" /* retry if lost reservation */
137 : "r"(addr
), "r"((unsigned long)_new
),
138 "r"((unsigned long)old
)
145 /* generate an illegal instruction. Cannot catch this with linker tricks
146 * when optimizations are disabled. */
147 __asm__
__volatile__(ILLEGAL_INSTR
);
152 #define uatomic_cmpxchg(addr, old, _new) \
153 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
154 (unsigned long)(_new), \
157 /* uatomic_add_return */
159 static inline __attribute__((always_inline
))
160 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
168 __asm__
__volatile__(
170 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
171 "add %0,%2,%0\n" /* add val to value loaded */
172 "stwcx. %0,0,%1\n" /* store conditional */
173 "bne- 1b\n" /* retry if lost reservation */
176 : "r"(addr
), "r"(val
)
181 #if (BITS_PER_LONG == 64)
184 unsigned long result
;
186 __asm__
__volatile__(
188 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
189 "add %0,%2,%0\n" /* add val to value loaded */
190 "stdcx. %0,0,%1\n" /* store conditional */
191 "bne- 1b\n" /* retry if lost reservation */
194 : "r"(addr
), "r"(val
)
201 /* generate an illegal instruction. Cannot catch this with linker tricks
202 * when optimizations are disabled. */
203 __asm__
__volatile__(ILLEGAL_INSTR
);
208 #define uatomic_add_return(addr, v) \
209 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
210 (unsigned long)(v), \
217 #include <urcu/uatomic_generic.h>
219 #endif /* _URCU_ARCH_UATOMIC_PPC_H */
This page took 0.04143 seconds and 4 git commands to generate.