-#define uatomic_load_store_return_op(op, addr, v, mo) \
- __extension__ \
- ({ \
- \
- switch (mo) { \
- case CMM_ACQUIRE: \
- case CMM_CONSUME: \
- case CMM_RELAXED: \
- break; \
- case CMM_RELEASE: \
- case CMM_ACQ_REL: \
- case CMM_SEQ_CST: \
- case CMM_SEQ_CST_FENCE: \
- cmm_smp_mb(); \
- break; \
- default: \
- abort(); \
- } \
- \
- __typeof__((*addr)) _value = op(addr, v); \
- \
- switch (mo) { \
- case CMM_CONSUME: \
- cmm_smp_read_barrier_depends(); \
- break; \
- case CMM_ACQUIRE: \
- case CMM_ACQ_REL: \
- case CMM_SEQ_CST: \
- case CMM_SEQ_CST_FENCE: \
- cmm_smp_mb(); \
- break; \
- case CMM_RELAXED: \
- case CMM_RELEASE: \
- break; \
- default: \
- abort(); \
- } \
- _value; \
+/*
+ * Can be defined for the architecture.
+ *
+ * What needs to be emitted _before_ the `operation' with memory ordering `mo'.
+ */
+#ifndef _cmm_compat_c11_smp_mb__before_mo
+# define _cmm_compat_c11_smp_mb__before_mo(operation, mo) cmm_smp_mb()
+#endif
+
+/*
+ * Can be defined for the architecture.
+ *
+ * What needs to be emitted _after_ the `operation' with memory ordering `mo'.
+ */
+#ifndef _cmm_compat_c11_smp_mb__after_mo
+# define _cmm_compat_c11_smp_mb__after_mo(operation, mo) cmm_smp_mb()
+#endif
+
+#define uatomic_load_store_return_op(op, addr, v, mo) \
+ __extension__ \
+ ({ \
+ _cmm_compat_c11_smp_mb__before_mo(op, mo); \
+ __typeof__((*addr)) _value = op(addr, v); \
+ _cmm_compat_c11_smp_mb__after_mo(op, mo); \
+ \
+ _value; \