#define dbg_printf(args...)
#endif
-#define BUCKET_SIZE_RESIZE_THRESHOLD 8
+#define CHAIN_LEN_TARGET 4
+#define CHAIN_LEN_RESIZE_THRESHOLD 16
#ifndef max
#define max(a, b) ((a) > (b) ? (a) : (b))
struct rcu_ht {
struct rcu_table *t; /* shared */
ht_hash_fct hash_fct;
- void *hashseed;
+ ht_compare_fct compare_fct;
+ unsigned long hash_seed;
pthread_mutex_t resize_mutex; /* resize mutex: add/del mutex */
void (*ht_call_rcu)(struct rcu_head *head,
void (*func)(struct rcu_head *head));
void check_resize(struct rcu_ht *ht, struct rcu_table *t,
uint32_t chain_len)
{
- if (chain_len >= BUCKET_SIZE_RESIZE_THRESHOLD)
- ht_resize_lazy(ht, t, log2_u32(chain_len));
+ if (chain_len >= CHAIN_LEN_RESIZE_THRESHOLD)
+ ht_resize_lazy(ht, t,
+ log2_u32(chain_len - CHAIN_LEN_TARGET));
}
static
iter = clear_flag(rcu_dereference(iter_prev->next));
if (unlikely(!iter))
break;
- if (iter->reverse_hash < node->reverse_hash)
+ if (iter->reverse_hash > node->reverse_hash)
break;
iter_prev = iter;
check_resize(ht, t, ++chain_len);
iter = clear_flag(rcu_dereference(iter_prev->next));
if (unlikely(!iter))
break;
- if (iter->reverse_hash < node->reverse_hash)
+ if (unlikely(iter->reverse_hash > node->reverse_hash))
break;
if (iter == node) {
found = 1;
}
struct rcu_ht *ht_new(ht_hash_fct hash_fct,
- void *hashseed,
+ ht_compare_fct compare_fct,
+ unsigned long hash_seed,
unsigned long init_size,
void (*ht_call_rcu)(struct rcu_head *head,
void (*func)(struct rcu_head *head)))
ht = calloc(1, sizeof(struct rcu_ht));
ht->hash_fct = hash_fct;
- ht->hashseed = hashseed;
+ ht->compare_fct = compare_fct;
+ ht->hash_seed = hash_seed;
ht->ht_call_rcu = ht_call_rcu;
/* this mutex should not nest in read-side C.S. */
pthread_mutex_init(&ht->resize_mutex, NULL);
return ht;
}
-struct rcu_ht_node *ht_lookup(struct rcu_ht *ht, void *key)
+struct rcu_ht_node *ht_lookup(struct rcu_ht *ht, void *key, size_t key_len)
{
struct rcu_table *t;
struct rcu_ht_node *node;
unsigned long hash, reverse_hash;
- hash = ht->hash_fct(ht->hashseed, key);
+ hash = ht->hash_fct(key, key_len, ht->hash_seed);
reverse_hash = bit_reverse_ulong(hash);
t = rcu_dereference(ht->t);
for (;;) {
if (unlikely(!node))
break;
- if (node->reverse_hash > reverse_hash) {
+ if (unlikely(node->reverse_hash > reverse_hash)) {
node = NULL;
break;
}
- if (node->key == key) {
- if (is_removed(rcu_dereference(node->next)))
+ if (!ht->compare_fct(node->key, node->key_len, key, key_len)) {
+ if (unlikely(is_removed(rcu_dereference(node->next))))
node = NULL;
break;
}
{
struct rcu_table *t;
- node->hash = ht->hash_fct(ht->hashseed, node->key);
+ node->hash = ht->hash_fct(node->key, node->key_len, ht->hash_seed);
node->reverse_hash = bit_reverse_ulong((unsigned long) node->hash);
t = rcu_dereference(ht->t);
memcpy(&new_t->tbl, &old_t->tbl,
old_size * sizeof(struct rcu_ht_node *));
init_table(ht, new_t, old_size, new_size - old_size);
- new_t->size = new_size;
/* Changing table and size atomically wrt lookups */
rcu_assign_pointer(ht->t, new_t);
ht->ht_call_rcu(&old_t->head, ht_free_table_cb);