24 #include <my_global.h>
44 const int LF_HASH_OVERHEAD=
sizeof(
LF_SLIST);
51 intptr
volatile *prev;
59 #define PTR(V) (LF_SLIST *)((V) & (~(intptr)1))
60 #define DELETED(V) ((V) & 1)
84 cursor->prev= (intptr *)head;
86 cursor->curr= (
LF_SLIST *)(*cursor->prev);
87 _lf_pin(pins, 1, cursor->curr);
88 }
while (*cursor->prev != (intptr)cursor->curr && LF_BACKOFF);
91 if (unlikely(!cursor->curr))
95 link= cursor->curr->link;
96 cursor->next= PTR(link);
97 _lf_pin(pins, 0, cursor->next);
98 }
while (link != cursor->curr->link && LF_BACKOFF);
99 cur_hashnr= cursor->curr->hashnr;
100 cur_key= cursor->curr->key;
101 cur_keylen= cursor->curr->keylen;
102 if (*cursor->prev != (intptr)cursor->curr)
109 if (cur_hashnr >= hashnr)
112 if (cur_hashnr > hashnr ||
113 (r= my_strnncoll(cs, (uchar*) cur_key, cur_keylen, (uchar*) key,
117 cursor->prev= &(cursor->curr->link);
118 _lf_pin(pins, 2, cursor->curr);
126 if (my_atomic_casptr((
void **)cursor->prev,
127 (
void **)&cursor->curr, cursor->next))
128 _lf_alloc_free(pins, cursor->curr);
135 cursor->curr= cursor->next;
136 _lf_pin(pins, 1, cursor->curr);
161 if (lfind(head, cs, node->hashnr, node->key, node->keylen,
163 (flags & LF_HASH_UNIQUE))
170 node->link= (intptr)cursor.curr;
171 DBUG_ASSERT(node->link != (intptr)node);
172 DBUG_ASSERT(cursor.prev != &node->link);
173 if (my_atomic_casptr((
void **)cursor.prev, (
void **)&cursor.curr, node))
189 return res ? 0 : cursor.curr;
205 const uchar *key, uint keylen,
LF_PINS *pins)
212 if (!lfind(head, cs, hashnr, key, keylen, &cursor, pins))
220 if (my_atomic_casptr((
void **)&(cursor.curr->link),
221 (
void **)&cursor.next,
222 (
void *)(((intptr)cursor.next) | 1)))
225 if (my_atomic_casptr((
void **)cursor.prev,
226 (
void **)&cursor.curr, cursor.next))
227 _lf_alloc_free(pins, cursor.curr);
236 lfind(head, cs, hashnr, key, keylen, &cursor, pins);
263 uint32 hashnr,
const uchar *key, uint keylen,
267 int res= lfind(head, cs, hashnr, key, keylen, &cursor, pins);
269 _lf_pin(pins, 2, cursor.curr);
272 return res ? cursor.curr : 0;
275 static inline const uchar* hash_key(
const LF_HASH *hash,
276 const uchar *
record,
size_t *length)
279 return (*hash->get_key)(record, length, 0);
280 *length= hash->key_length;
281 return record + hash->key_offset;
290 static inline uint calc_hash(
LF_HASH *hash,
const uchar *key, uint keylen)
292 ulong nr1= 1, nr2= 4;
293 hash->charset->coll->hash_sort(hash->charset, (uchar*) key, keylen,
295 return nr1 & INT_MAX32;
315 void lf_hash_init(
LF_HASH *hash, uint element_size, uint flags,
316 uint key_offset, uint key_length, my_hash_get_key get_key,
319 lf_alloc_init(&hash->alloc,
sizeof(
LF_SLIST)+element_size,
321 lf_dynarray_init(&hash->array,
sizeof(
LF_SLIST *));
324 hash->element_size= element_size;
326 hash->charset= charset ? charset : &my_charset_bin;
327 hash->key_offset= key_offset;
328 hash->key_length= key_length;
329 hash->get_key= get_key;
330 DBUG_ASSERT(get_key ? !key_offset && !key_length : key_length);
333 void lf_hash_destroy(
LF_HASH *hash)
343 intptr next= el->link;
345 lf_alloc_direct_free(&hash->alloc, el);
350 lf_alloc_destroy(&hash->alloc);
351 lf_dynarray_destroy(&hash->array);
367 int lf_hash_insert(
LF_HASH *hash,
LF_PINS *pins,
const void *data)
369 int csize,
bucket, hashnr;
372 lf_rwlock_by_pins(pins);
373 node= (
LF_SLIST *)_lf_alloc_new(pins);
376 memcpy(node+1, data, hash->element_size);
377 node->key= hash_key(hash, (uchar *)(node+1), &node->keylen);
378 hashnr= calc_hash(hash, node->key, node->keylen);
379 bucket= hashnr % hash->size;
380 el= _lf_dynarray_lvalue(&hash->array, bucket);
383 if (*el == NULL && unlikely(initialize_bucket(hash, el, bucket, pins)))
385 node->hashnr= my_reverse_bits(hashnr) | 1;
386 if (linsert(el, hash->charset, node, pins, hash->flags))
388 _lf_alloc_free(pins, node);
389 lf_rwunlock_by_pins(pins);
393 if ((my_atomic_add32(&hash->count, 1)+1.0) / csize > MAX_LOAD)
394 my_atomic_cas32(&hash->size, &csize, csize*2);
395 lf_rwunlock_by_pins(pins);
411 int lf_hash_delete(
LF_HASH *hash,
LF_PINS *pins,
const void *key, uint keylen)
414 uint bucket, hashnr= calc_hash(hash, (uchar *)key, keylen);
416 bucket= hashnr % hash->size;
417 lf_rwlock_by_pins(pins);
418 el= _lf_dynarray_lvalue(&hash->array, bucket);
427 if (*el == NULL && unlikely(initialize_bucket(hash, el, bucket, pins)))
429 if (ldelete(el, hash->charset, my_reverse_bits(hashnr) | 1,
430 (uchar *)key, keylen, pins))
432 lf_rwunlock_by_pins(pins);
435 my_atomic_add32(&hash->count, -1);
436 lf_rwunlock_by_pins(pins);
450 void *lf_hash_search(
LF_HASH *hash,
LF_PINS *pins,
const void *key, uint keylen)
453 uint bucket, hashnr= calc_hash(hash, (uchar *)key, keylen);
455 bucket= hashnr % hash->size;
456 lf_rwlock_by_pins(pins);
457 el= _lf_dynarray_lvalue(&hash->array, bucket);
460 if (*el == NULL && unlikely(initialize_bucket(hash, el, bucket, pins)))
462 found= lsearch(el, hash->charset, my_reverse_bits(hashnr) | 1,
463 (uchar *)key, keylen, pins);
464 lf_rwunlock_by_pins(pins);
465 return found ? found+1 : 0;
468 static const uchar *dummy_key= (uchar*)
"";
478 uint parent= my_clear_highest_bit(bucket);
481 LF_SLIST *
volatile *el= _lf_dynarray_lvalue(&hash->array, parent);
482 if (unlikely(!el || !dummy))
484 if (*el == NULL && bucket &&
485 unlikely(initialize_bucket(hash, el, parent, pins)))
487 dummy->hashnr= my_reverse_bits(bucket) | 0;
488 dummy->key= dummy_key;
490 if ((cur= linsert(el, hash->charset, dummy, pins, LF_HASH_UNIQUE)))
495 my_atomic_casptr((
void **)node, (
void **)&tmp, dummy);