#define uatomic_read(addr) LOAD_SHARED(*(addr))
/* xchg */
+
+static inline __attribute__((always_inline))
unsigned long _uatomic_exchange(volatile void *addr, unsigned long val, int len)
{
switch (len) {
case 4:
+ {
unsigned int old_val;
__asm__ __volatile__(
: "=&r"(old_val), "=m" (*addr)
: "r"(val), "m" (*addr)
: "memory", "cc");
+ return old_val;
+ }
#if (BITS_PER_LONG == 64)
case 8:
+ {
unsigned long old_val;
__asm__ __volatile__(
: "=&r"(old_val), "=m" (*addr)
: "r"(val), "m" (*addr)
: "memory", "cc");
+ return old_val;
+ }
#endif
default:
__asm__ __volatile__(".long 0xd00d00");
static inline __attribute__((always_inline))
unsigned long _uatomic_cmpxchg(void *addr, unsigned long old,
- unsigned long new, int len)
+ unsigned long _new, int len)
{
switch (len) {
case 4:
+ {
unsigned int old_val = (unsigned int)old;
__asm__ __volatile__(
" cs %0,%2,%1\n"
: "+r"(old_val), "+m"(*addr)
- : "r"(new)
+ : "r"(_new)
: "memory", "cc");
return old_val;
+ }
#if (BITS_PER_LONG == 64)
case 8:
+ {
__asm__ __volatile__(
" csg %0,%2,%1\n"
: "+r"(old), "+m"(*addr)
- : "r"(new)
+ : "r"(_new)
: "memory", "cc");
return old;
+ }
#endif
default:
__asm__ __volatile__(".long 0xd00d00");
return 0;
}
-#define uatomic_cmpxchg(addr, old, new) \
+#define uatomic_cmpxchg(addr, old, _new) \
(__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
(unsigned long)(old), \
- (unsigned long)(new), \
+ (unsigned long)(_new), \
sizeof(*(addr)))
/* uatomic_add_return */