#define rmb() asm volatile("lfence":::"memory")
#define wmb() asm volatile("sfence" ::: "memory")
+/* Assume SMP machine, given we don't have this information */
+#define CONFIG_SMP 1
+
+#ifdef CONFIG_SMP
+#define smp_mb() mb()
+#define smp_rmb() rmb()
+#define smp_wmb() wmb()
+#else
+#define smp_mb() barrier()
+#define smp_rmb() barrier()
+#define smp_wmb() barrier()
+#endif
+
static inline void atomic_inc(int *v)
{
asm volatile("lock; incl %0"
#ifdef DEBUG_FULL_MB
static inline void read_barrier()
{
- mb();
+ smp_mb();
}
#else
static inline void read_barrier()
if (value == NULL)
return 0;
debug_yield_write();
+ /*
+ * Make sure both tests below are done on the same version of *value
+ * to insure consistency.
+ */
v = ACCESS_ONCE(*value);
debug_yield_write();
return (v & RCU_GP_CTR_NEST_MASK) &&
- ((v ^ ACCESS_ONCE(urcu_gp_ctr)) & RCU_GP_CTR_BIT);
+ ((v ^ urcu_gp_ctr) & RCU_GP_CTR_BIT);
}
static inline void rcu_read_lock(void)