1 // SPDX-FileCopyrightText: 1991-1994 by Xerox Corporation. All rights reserved.
2 // SPDX-FileCopyrightText: 1996-1999 by Silicon Graphics. All rights reserved.
3 // SPDX-FileCopyrightText: 1999-2004 Hewlett-Packard Development Company, L.P.
4 // SPDX-FileCopyrightText: 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 // SPDX-License-Identifier: LicenseRef-Boehm-GC
8 #ifndef _URCU_ARCH_UATOMIC_PPC_H
9 #define _URCU_ARCH_UATOMIC_PPC_H
12 * Code inspired from libuatomic_ops-1.2, inherited in part from the
13 * Boehm-Demers-Weiser conservative garbage collector.
16 #include <urcu/compiler.h>
17 #include <urcu/system.h>
23 #define ILLEGAL_INSTR ".long 0xd00d00"
26 * Providing sequential consistency semantic with respect to other
27 * instructions for cmpxchg and add_return family of atomic primitives.
29 * This is achieved with:
30 * lwsync (prior stores can be reordered after following loads)
33 * test if success (retry)
36 * Explanation of the sequential consistency provided by this scheme
37 * from Paul E. McKenney:
39 * The reason we can get away with the lwsync before is that if a prior
40 * store reorders with the lwarx, then you have to store to the atomic
41 * variable from some other CPU to detect it.
43 * And if you do that, the lwarx will lose its reservation, so the stwcx
44 * will fail. The atomic operation will retry, so that the caller won't be
45 * able to see the misordering.
50 static inline __attribute__((always_inline
))
51 unsigned long _uatomic_exchange(void *addr
, unsigned long val
, int len
)
60 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
61 "stwcx. %2,0,%1\n" /* else store conditional */
62 "bne- 1b\n" /* retry if lost reservation */
70 #if (CAA_BITS_PER_LONG == 64)
77 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
78 "stdcx. %2,0,%1\n" /* else store conditional */
79 "bne- 1b\n" /* retry if lost reservation */
90 * generate an illegal instruction. Cannot catch this with
91 * linker tricks when optimizations are disabled.
93 __asm__
__volatile__(ILLEGAL_INSTR
);
97 #define uatomic_xchg(addr, v) \
98 ((__typeof__(*(addr))) _uatomic_exchange((addr), \
99 caa_cast_long_keep_sign(v), \
103 static inline __attribute__((always_inline
))
104 unsigned long _uatomic_cmpxchg(void *addr
, unsigned long old
,
105 unsigned long _new
, int len
)
110 unsigned int old_val
;
112 __asm__
__volatile__(
114 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
115 "cmpw %0,%3\n" /* if load is not equal to */
116 "bne 2f\n" /* old, fail */
117 "stwcx. %2,0,%1\n" /* else store conditional */
118 "bne- 1b\n" /* retry if lost reservation */
122 : "r"(addr
), "r"((unsigned int)_new
),
123 "r"((unsigned int)old
)
128 #if (CAA_BITS_PER_LONG == 64)
131 unsigned long old_val
;
133 __asm__
__volatile__(
135 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
136 "cmpd %0,%3\n" /* if load is not equal to */
137 "bne 2f\n" /* old, fail */
138 "stdcx. %2,0,%1\n" /* else store conditional */
139 "bne- 1b\n" /* retry if lost reservation */
143 : "r"(addr
), "r"((unsigned long)_new
),
144 "r"((unsigned long)old
)
152 * generate an illegal instruction. Cannot catch this with
153 * linker tricks when optimizations are disabled.
155 __asm__
__volatile__(ILLEGAL_INSTR
);
160 #define uatomic_cmpxchg(addr, old, _new) \
161 ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
162 caa_cast_long_keep_sign(old), \
163 caa_cast_long_keep_sign(_new),\
166 /* uatomic_add_return */
168 static inline __attribute__((always_inline
))
169 unsigned long _uatomic_add_return(void *addr
, unsigned long val
,
177 __asm__
__volatile__(
179 "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
180 "add %0,%2,%0\n" /* add val to value loaded */
181 "stwcx. %0,0,%1\n" /* store conditional */
182 "bne- 1b\n" /* retry if lost reservation */
185 : "r"(addr
), "r"(val
)
190 #if (CAA_BITS_PER_LONG == 64)
193 unsigned long result
;
195 __asm__
__volatile__(
197 "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
198 "add %0,%2,%0\n" /* add val to value loaded */
199 "stdcx. %0,0,%1\n" /* store conditional */
200 "bne- 1b\n" /* retry if lost reservation */
203 : "r"(addr
), "r"(val
)
211 * generate an illegal instruction. Cannot catch this with
212 * linker tricks when optimizations are disabled.
214 __asm__
__volatile__(ILLEGAL_INSTR
);
219 #define uatomic_add_return(addr, v) \
220 ((__typeof__(*(addr))) _uatomic_add_return((addr), \
221 caa_cast_long_keep_sign(v), \
228 #include <urcu/uatomic/generic.h>
230 #endif /* _URCU_ARCH_UATOMIC_PPC_H */
This page took 0.0490350000000001 seconds and 4 git commands to generate.