};
#define __hp(x) ((struct __uatomic_dummy *)(x))
-#define _uatomic_set(addr, v) CMM_STORE_SHARED(*(addr), (v))
+#define _uatomic_set(addr, v) ((void) CMM_STORE_SHARED(*(addr), (v)))
/* cmpxchg */
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
: (compat_uatomic_##insn)) \
: (compat_uatomic_##insn))))
+/*
+ * We leave the return value so we don't break the ABI, but remove the
+ * return value from the API.
+ */
extern unsigned long _compat_uatomic_set(void *addr,
unsigned long _new, int len);
#define compat_uatomic_set(addr, _new) \
- ((__typeof__(*(addr))) _compat_uatomic_set((addr), \
- caa_cast_long_keep_sign(_new), \
- sizeof(*(addr))))
+ ((void) _compat_uatomic_set((addr), \
+ caa_cast_long_keep_sign(_new), \
+ sizeof(*(addr))))
extern unsigned long _compat_uatomic_xchg(void *addr,
#define uatomic_and(addr, v) \
UATOMIC_COMPAT(and(addr, v))
-#define cmm_smp_mb__before_and() cmm_barrier()
-#define cmm_smp_mb__after_and() cmm_barrier()
+#define cmm_smp_mb__before_uatomic_and() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_and() cmm_barrier()
#define uatomic_or(addr, v) \
UATOMIC_COMPAT(or(addr, v))
-#define cmm_smp_mb__before_or() cmm_barrier()
-#define cmm_smp_mb__after_or() cmm_barrier()
+#define cmm_smp_mb__before_uatomic_or() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_or() cmm_barrier()
#define uatomic_add_return(addr, v) \
UATOMIC_COMPAT(add_return(addr, v))
#define uatomic_add(addr, v) UATOMIC_COMPAT(add(addr, v))
-#define cmm_smp_mb__before_add() cmm_barrier()
-#define cmm_smp_mb__after_add() cmm_barrier()
+#define cmm_smp_mb__before_uatomic_add() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_add() cmm_barrier()
#define uatomic_inc(addr) UATOMIC_COMPAT(inc(addr))
-#define cmm_smp_mb__before_inc() cmm_barrier()
-#define cmm_smp_mb__after_inc() cmm_barrier()
+#define cmm_smp_mb__before_uatomic_inc() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_inc() cmm_barrier()
#define uatomic_dec(addr) UATOMIC_COMPAT(dec(addr))
-#define cmm_smp_mb__before_dec() cmm_barrier()
-#define cmm_smp_mb__after_dec() cmm_barrier()
+#define cmm_smp_mb__before_uatomic_dec() cmm_barrier()
+#define cmm_smp_mb__after_uatomic_dec() cmm_barrier()
#ifdef __cplusplus
}