#define RBTREE_RCU_SUPPORT_ROTATE_LEFT
#define RBTREE_RCU_SUPPORT_ROTATE_RIGHT
#define RBTREE_RCU_SUPPORT_TRANSPLANT
+#define RBTREE_RCU_SUPPORT
+
+#ifdef RBTREE_RCU_SUPPORT
+#define c_rcu_dereference(x) rcu_dereference(x)
+#define c_cmm_smp_wmb() cmm_smp_wmb()
+#define c_cmm_smp_wmc() cmm_smp_wmc()
+#define C_CMM_STORE_SHARED(x, v) CMM_STORE_SHARED(x, v)
+#define C__CMM_STORE_SHARED(x, v) _CMM_STORE_SHARED(x, v)
+#else
+#define c_rcu_dereference(x) (x)
+#define c_cmm_smp_wmb()
+#define c_cmm_smp_wmc()
+#define C_CMM_STORE_SHARED(x, v) ((x) = (v))
+#define C__CMM_STORE_SHARED(x, v) ((x) = (v))
+#endif
+
+/*
+ * Add internal mutex locking within the RBTree, for debugging. Enable this
+ * define and add mutexes to RCU readers to debug races with with rotation or
+ * transplant.
+ */
+/* #define RBTREE_INTERNAL_LOCKING */
-#ifdef EXTRA_DEBUG
+#ifdef RBTREE_INTERNAL_LOCKING
static pthread_mutex_t test_mutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t outer_mutex = PTHREAD_MUTEX_INITIALIZER;
{
pthread_mutex_unlock(&test_mutex);
}
+#else
+static
+void lock_outer_mutex(void)
+{
+}
+
+static
+void unlock_outer_mutex(void)
+{
+}
+
+static
+void lock_test_mutex(void)
+{
+}
+
+static
+void unlock_test_mutex(void)
+{
+}
#endif
static
struct rcu_rbtree_node *parent,
unsigned int pos)
{
- _CMM_STORE_SHARED(node->parent, ((unsigned long) parent) | pos);
+ C__CMM_STORE_SHARED(node->parent, ((unsigned long) parent) | pos);
}
static
struct rcu_rbtree_node *get_parent_and_pos(struct rcu_rbtree_node *node,
unsigned int *pos)
{
- unsigned long parent_pos = rcu_dereference(node->parent);
+ unsigned long parent_pos = c_rcu_dereference(node->parent);
*pos = (unsigned int) (parent_pos & 1UL);
return (struct rcu_rbtree_node *) (parent_pos & ~1UL);
node->rbtree->rbfree(node);
}
+#ifdef RBTREE_RCU_SUPPORT
+
static
struct rcu_rbtree_node *dup_decay_node(struct rcu_rbtree *rbtree,
struct rcu_rbtree_node *x)
return xc;
}
+#else /* RBTREE_RCU_SUPPORT */
+
+static
+struct rcu_rbtree_node *dup_decay_node(struct rcu_rbtree *rbtree,
+ struct rcu_rbtree_node *x)
+{
+ return x;
+}
+
+#endif
+
/*
* Info for range lookups:
* Range lookup information is only valid when used when searching for
struct rcu_rbtree_node *xl;
dbg_printf("searching point 0x%lx\n", (unsigned long) point);
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
while (!rcu_rbtree_is_nil(rbtree, x)) {
dbg_usleep(10);
- xl = rcu_dereference(x->_left);
+ xl = c_rcu_dereference(x->_left);
dbg_printf("search x %lx x_end %lx x_max_end %lx\n", (unsigned long) x->begin,
(unsigned long) x->end, (unsigned long) x->max_end);
dbg_printf("search xl %lx xl_end %lx xl_max_end %lx\n", (unsigned long) xl->begin,
break;
} else if (rbtree->comp(point, x->begin) > 0) {
dbg_printf("go right\n");
- x = rcu_dereference(x->_right);
+ x = c_rcu_dereference(x->_right);
} else {
dbg_printf("not found!\n");
x = make_nil(rbtree);
struct rcu_rbtree_node *x,
void *k)
{
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
int comp;
while (!rcu_rbtree_is_nil(rbtree, x) && (comp = rbtree->comp(k, x->begin)) != 0) {
dbg_usleep(10);
if (comp < 0)
- x = rcu_dereference(x->_left);
+ x = c_rcu_dereference(x->_left);
else
- x = rcu_dereference(x->_right);
+ x = c_rcu_dereference(x->_right);
}
return x;
}
{
struct rcu_rbtree_node *xl;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
if (rcu_rbtree_is_nil(rbtree, x)) {
*zr = x;
} else
*zr = x = dup_decay_node(rbtree, x);
- while (!rcu_rbtree_is_nil(rbtree, xl = rcu_dereference(x->_left))) {
+ while (!rcu_rbtree_is_nil(rbtree, xl = c_rcu_dereference(x->_left))) {
x = dup_decay_node(rbtree, xl);
set_parent(x, get_decay(get_parent(x)), get_pos(x));
get_parent(x)->_left = get_decay(get_parent(x)->_left);
{
struct rcu_rbtree_node *xl;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
if (rcu_rbtree_is_nil(rbtree, x))
return x;
get_pos(x->_left));
}
- while (!rcu_rbtree_is_nil(rbtree, xl = rcu_dereference(x->_left))) {
+ while (!rcu_rbtree_is_nil(rbtree, xl = c_rcu_dereference(x->_left))) {
x = xl;
set_parent(x->_right, get_decay(get_parent(x->_right)),
get_pos(x->_right));
{
struct rcu_rbtree_node *xl;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
if (rcu_rbtree_is_nil(rbtree, x))
return x;
- while (!rcu_rbtree_is_nil(rbtree, xl = rcu_dereference(x->_left)))
+ while (!rcu_rbtree_is_nil(rbtree, xl = c_rcu_dereference(x->_left)))
x = xl;
return x;
}
{
struct rcu_rbtree_node *xr;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
if (rcu_rbtree_is_nil(rbtree, x))
return x;
- while (!rcu_rbtree_is_nil(rbtree, xr = rcu_dereference(x->_right)))
+ while (!rcu_rbtree_is_nil(rbtree, xr = c_rcu_dereference(x->_right)))
x = xr;
return x;
}
struct rcu_rbtree_node *xr, *y;
unsigned int x_pos;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
- if (!rcu_rbtree_is_nil(rbtree, xr = rcu_dereference(x->_right)))
+ if (!rcu_rbtree_is_nil(rbtree, xr = c_rcu_dereference(x->_right)))
return rcu_rbtree_min(rbtree, xr);
y = get_parent_and_pos(x, &x_pos);
while (!rcu_rbtree_is_nil(rbtree, y) && x_pos == IS_RIGHT) {
struct rcu_rbtree_node *xl, *y;
unsigned int x_pos;
- x = rcu_dereference(x);
+ x = c_rcu_dereference(x);
- if (!rcu_rbtree_is_nil(rbtree, xl = rcu_dereference(x->_left)))
+ if (!rcu_rbtree_is_nil(rbtree, xl = c_rcu_dereference(x->_left)))
return rcu_rbtree_max(rbtree, xl);
y = get_parent_and_pos(x, &x_pos);
while (!rcu_rbtree_is_nil(rbtree, y) && x_pos == IS_LEFT) {
node->max_end = max_end;
} else {
top = get_parent(node);
- cmm_smp_wmb(); /* write into node before publish */
+ c_cmm_smp_wmb(); /* write into node before publish */
/* make new branch visible to readers */
if (rcu_rbtree_is_nil(rbtree, top))
- _CMM_STORE_SHARED(rbtree->root, node);
+ C__CMM_STORE_SHARED(rbtree->root, node);
if (get_pos(node) == IS_LEFT)
- _CMM_STORE_SHARED(top->_left, node);
+ C__CMM_STORE_SHARED(top->_left, node);
else
- _CMM_STORE_SHARED(top->_right, node);
+ C__CMM_STORE_SHARED(top->_right, node);
goto end;
}
} while (!rcu_rbtree_is_nil(rbtree, node));
top = node; /* nil */
- cmm_smp_wmb(); /* write into node before publish */
+ c_cmm_smp_wmb(); /* write into node before publish */
/* make new branch visible to readers */
- _CMM_STORE_SHARED(rbtree->root, prev);
+ C__CMM_STORE_SHARED(rbtree->root, prev);
end:
if (!copy_parents)
x->max_end = calculate_node_max_end(rbtree, x);
y->max_end = calculate_node_max_end(rbtree, y);
- cmm_smp_wmb(); /* write into node before publish */
+ c_cmm_smp_wmb(); /* write into node before publish */
/* External references update (visible by readers) */
if (rcu_rbtree_is_nil(rbtree, get_parent(y)))
- _CMM_STORE_SHARED(rbtree->root, y);
+ C__CMM_STORE_SHARED(rbtree->root, y);
else if (get_pos(y) == IS_LEFT)
- _CMM_STORE_SHARED(get_parent(y)->_left, y);
+ C__CMM_STORE_SHARED(get_parent(y)->_left, y);
else
- _CMM_STORE_SHARED(get_parent(y)->_right, y);
+ C__CMM_STORE_SHARED(get_parent(y)->_right, y);
/* Point children to new copy (parent only used by updates/next/prev) */
set_parent(x->_left, get_decay(get_parent(x->_left)),
}
y->_left = x;
set_parent(x, y, IS_LEFT);
+
+ /*
+ * We only changed the relative position of x and y wrt their
+ * children, and reparented y (but are keeping the same nodes in
+ * place, so its parent does not need to have end value
+ * recalculated).
+ */
+ x->max_end = calculate_node_max_end(rbtree, x);
+ y->max_end = calculate_node_max_end(rbtree, y);
+
unlock_test_mutex();
}
x->max_end = calculate_node_max_end(rbtree, x);
y->max_end = calculate_node_max_end(rbtree, y);
- cmm_smp_wmb(); /* write into node before publish */
+ c_cmm_smp_wmb(); /* write into node before publish */
/* External references update (visible by readers) */
if (rcu_rbtree_is_nil(rbtree, get_parent(y)))
- _CMM_STORE_SHARED(rbtree->root, y);
+ C__CMM_STORE_SHARED(rbtree->root, y);
else if (get_pos(y) == IS_RIGHT)
- _CMM_STORE_SHARED(get_parent(y)->_right, y);
+ C__CMM_STORE_SHARED(get_parent(y)->_right, y);
else
- _CMM_STORE_SHARED(get_parent(y)->_left, y);
+ C__CMM_STORE_SHARED(get_parent(y)->_left, y);
/* Point children to new copy (parent only used by updates/next/prev) */
set_parent(x->_right, get_decay(get_parent(x->_right)),
}
y->_right = x;
set_parent(x, y, IS_RIGHT);
+
+ /*
+ * We only changed the relative position of x and y wrt their
+ * children, and reparented y (but are keeping the same nodes in
+ * place, so its parent does not need to have end value
+ * recalculated).
+ */
+ x->max_end = calculate_node_max_end(rbtree, x);
+ y->max_end = calculate_node_max_end(rbtree, y);
+
unlock_test_mutex();
}
* Order stores to z (children/parents) before stores
* that will make it visible to the rest of the tree.
*/
- cmm_smp_wmb();
- _CMM_STORE_SHARED(rbtree->root, z);
+ c_cmm_smp_wmb();
+ C__CMM_STORE_SHARED(rbtree->root, z);
} else if (rbtree->comp(z->begin, y->begin) < 0) {
y = dup_decay_node(rbtree, y);
set_parent(z, y, IS_LEFT);
if (get_pos(z) == IS_LEFT)
- _CMM_STORE_SHARED(y->_left, z);
+ C__CMM_STORE_SHARED(y->_left, z);
else
- _CMM_STORE_SHARED(y->_right, z);
+ C__CMM_STORE_SHARED(y->_right, z);
populate_node_end(rbtree, y, 1, NULL);
} else {
y = dup_decay_node(rbtree, y);
set_parent(z, y, IS_RIGHT);
if (get_pos(z) == IS_LEFT)
- _CMM_STORE_SHARED(y->_left, z);
+ C__CMM_STORE_SHARED(y->_left, z);
else
- _CMM_STORE_SHARED(y->_right, z);
+ C__CMM_STORE_SHARED(y->_right, z);
populate_node_end(rbtree, y, 1, NULL);
}
rcu_rbtree_insert_fixup(rbtree, z);
/*
- * Make sure to commit all _CMM_STORE_SHARED() for non-coherent caches.
+ * Make sure to commit all C__CMM_STORE_SHARED() for non-coherent caches.
*/
- cmm_smp_wmc();
+ c_cmm_smp_wmc();
show_tree(rbtree);
check_max_end(rbtree, z);
check_max_end(rbtree, y);
if (rcu_rbtree_is_nil(rbtree, get_parent(u))) {
/* pos is arbitrary for root node */
set_parent(v, get_parent(u), IS_RIGHT);
- cmm_smp_wmb(); /* write into node before publish */
- _CMM_STORE_SHARED(rbtree->root, v);
+ c_cmm_smp_wmb(); /* write into node before publish */
+ C__CMM_STORE_SHARED(rbtree->root, v);
} else {
struct rcu_rbtree_node *vp;
vp = dup_decay_node(rbtree, vp);
set_parent(v, vp, get_pos(u));
if (get_pos(v) == IS_LEFT)
- _CMM_STORE_SHARED(vp->_left, v);
+ C__CMM_STORE_SHARED(vp->_left, v);
else
- _CMM_STORE_SHARED(vp->_right, v);
+ C__CMM_STORE_SHARED(vp->_right, v);
populate_node_end(rbtree, vp, copy_parents, stop);
check_max_end(rbtree, vp);
}
dbg_printf("transplant %p\n", v->begin);
lock_test_mutex();
- if (rcu_rbtree_is_nil(rbtree, get_parent(u)))
+ if (rcu_rbtree_is_nil(rbtree, get_parent(u))) {
rbtree->root = v;
- else if (u == get_parent(u)->_left)
- get_parent(u)->_left = v;
- else
- get_parent(u)->_right = v;
+ } else {
+ if (u == get_parent(u)->_left)
+ get_parent(u)->_left = v;
+ else
+ get_parent(u)->_right = v;
+ populate_node_end(rbtree, get_parent(u), copy_parents, stop);
+ }
set_parent(v, get_parent(u), get_pos(u));
unlock_test_mutex();
}
check_max_end(rbtree, x);
check_max_end(rbtree, get_decay(y));
/*
- * Commit all _CMM_STORE_SHARED().
+ * Commit all C__CMM_STORE_SHARED().
*/
- cmm_smp_wmc();
+ c_cmm_smp_wmc();
+#ifdef RBTREE_RCU_SUPPORT
rbtree->call_rcu(&z->head, _rcu_rbtree_free_node);
+#else
+ _rcu_rbtree_free_node(&z->head);
+#endif
return 0;
}