This is set by the gcc when the target does not support the lwsync
opcode. This is the case for e500 core based CPUs.
I haven't touch tests/api_ppc.h because I don't see any users.
Signed-off-by: Sebastian Andrzej Siewior <bigeasy@linutronix.de>
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
+#ifdef __NO_LWSYNC__
+#define LWSYNC_OPCODE "sync\n"
+#else
+#define LWSYNC_OPCODE "lwsync\n"
+#endif
+
#ifndef BITS_PER_LONG
#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
#endif
#ifndef BITS_PER_LONG
#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
#endif
unsigned int result;
__asm__ __volatile__(
unsigned int result;
__asm__ __volatile__(
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"stwcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"stwcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
unsigned long result;
__asm__ __volatile__(
unsigned long result;
__asm__ __volatile__(
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"stdcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"stdcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
unsigned int old_val;
__asm__ __volatile__(
unsigned int old_val;
__asm__ __volatile__(
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
unsigned long old_val;
__asm__ __volatile__(
unsigned long old_val;
__asm__ __volatile__(
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
unsigned int result;
__asm__ __volatile__(
unsigned int result;
__asm__ __volatile__(
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stwcx. %0,0,%1\n" /* store conditional */
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stwcx. %0,0,%1\n" /* store conditional */
unsigned long result;
__asm__ __volatile__(
unsigned long result;
__asm__ __volatile__(
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stdcx. %0,0,%1\n" /* store conditional */
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stdcx. %0,0,%1\n" /* store conditional */