#endif
#ifndef uatomic_set
-#define uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
+#define uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
#endif
#ifndef uatomic_read
#if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR
static inline __attribute__((always_inline))
-void _uatomic_link_error()
+void _uatomic_link_error(void)
{
#ifdef ILLEGAL_INSTR
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__(ILLEGAL_INSTR);
#else
- __builtin_trap ();
+ __builtin_trap();
#endif
}
#else /* #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
-extern void _uatomic_link_error ();
+extern void _uatomic_link_error(void);
#endif /* #else #if !defined __OPTIMIZE__ || defined UATOMIC_NO_LINK_ERROR */
/* cmpxchg */
}
-#define uatomic_cmpxchg(addr, old, _new) \
- ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\
- (unsigned long)(_new), \
+#define uatomic_cmpxchg(addr, old, _new) \
+ ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), \
+ caa_cast_long_keep_sign(old), \
+ caa_cast_long_keep_sign(_new),\
sizeof(*(addr))))
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
__sync_and_and_fetch_1(addr, val);
+ return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
__sync_and_and_fetch_2(addr, val);
+ return;
#endif
case 4:
__sync_and_and_fetch_4(addr, val);
+ return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
__sync_and_and_fetch_8(addr, val);
+ return;
#endif
}
_uatomic_link_error();
- return 0;
}
#define uatomic_and(addr, v) \
(_uatomic_and((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
+#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
+
#endif
/* uatomic_or */
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
__sync_or_and_fetch_1(addr, val);
+ return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
__sync_or_and_fetch_2(addr, val);
+ return;
#endif
case 4:
__sync_or_and_fetch_4(addr, val);
+ return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
__sync_or_and_fetch_8(addr, val);
+ return;
#endif
}
_uatomic_link_error();
- return 0;
+ return;
}
#define uatomic_or(addr, v) \
(_uatomic_or((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
+#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
+
#endif
+
/* uatomic_add_return */
#ifndef uatomic_add_return
}
-#define uatomic_add_return(addr, v) \
- ((__typeof__(*(addr))) _uatomic_add_return((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+#define uatomic_add_return(addr, v) \
+ ((__typeof__(*(addr))) _uatomic_add_return((addr), \
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
#endif /* #ifndef uatomic_add_return */
#ifndef uatomic_xchg
}
#define uatomic_xchg(addr, v) \
- ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
+ ((__typeof__(*(addr))) _uatomic_exchange((addr), \
+ caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
#endif /* #ifndef uatomic_xchg */
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old & val, 1);
} while (oldt != old);
+
+ return;
}
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old & val, 4);
} while (oldt != old);
+
+ return;
}
#if (CAA_BITS_PER_LONG == 64)
case 8:
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old & val, 8);
} while (oldt != old);
+
+ return;
}
#endif
}
_uatomic_link_error();
- return 0;
}
-#define uatomic_and(addr, v) \
- (uatomic_and((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+#define uatomic_and(addr, v) \
+ (_uatomic_and((addr), \
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
+#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
+
#endif /* #ifndef uatomic_and */
#ifndef uatomic_or
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old | val, 1);
} while (oldt != old);
+
+ return;
}
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old | val, 2);
} while (oldt != old);
+
+ return;
}
#endif
case 4:
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old | val, 4);
} while (oldt != old);
+
+ return;
}
#if (CAA_BITS_PER_LONG == 64)
case 8:
old = oldt;
oldt = _uatomic_cmpxchg(addr, old, old | val, 8);
} while (oldt != old);
+
+ return;
}
#endif
}
_uatomic_link_error();
- return 0;
}
-#define uatomic_or(addr, v) \
- (uatomic_or((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+#define uatomic_or(addr, v) \
+ (_uatomic_or((addr), \
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
+#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
+
#endif /* #ifndef uatomic_or */
#ifndef uatomic_add_return
return 0;
}
-#define uatomic_add_return(addr, v) \
- ((__typeof__(*(addr))) _uatomic_add_return((addr), \
- (unsigned long)(v), \
- sizeof(*(addr))))
+#define uatomic_add_return(addr, v) \
+ ((__typeof__(*(addr))) _uatomic_add_return((addr), \
+ caa_cast_long_keep_sign(v), \
+ sizeof(*(addr))))
#endif /* #ifndef uatomic_add_return */
#ifndef uatomic_xchg
}
#define uatomic_xchg(addr, v) \
- ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \
+ ((__typeof__(*(addr))) _uatomic_exchange((addr), \
+ caa_cast_long_keep_sign(v), \
sizeof(*(addr))))
#endif /* #ifndef uatomic_xchg */
#ifndef uatomic_add
#define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v))
+#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
#endif
-#define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v))
-#define uatomic_sub(addr, v) uatomic_add((addr), -(v))
+#define uatomic_sub_return(addr, v) \
+ uatomic_add_return((addr), -(caa_cast_long_keep_sign(v)))
+#define uatomic_sub(addr, v) \
+ uatomic_add((addr), -(caa_cast_long_keep_sign(v)))
+#define cmm_smp_mb__before_uatomic_sub() cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_sub() cmm_smp_mb__after_uatomic_add()
#ifndef uatomic_inc
#define uatomic_inc(addr) uatomic_add((addr), 1)
+#define cmm_smp_mb__before_uatomic_inc() cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_inc() cmm_smp_mb__after_uatomic_add()
#endif
#ifndef uatomic_dec
#define uatomic_dec(addr) uatomic_add((addr), -1)
+#define cmm_smp_mb__before_uatomic_dec() cmm_smp_mb__before_uatomic_add()
+#define cmm_smp_mb__after_uatomic_dec() cmm_smp_mb__after_uatomic_add()
#endif
#ifdef __cplusplus