102 #include <my_global.h>
106 #define LF_PINBOX_MAX_PINS 65536
108 static void _lf_pinbox_real_free(
LF_PINS *pins);
114 void lf_pinbox_init(
LF_PINBOX *pinbox, uint free_ptr_offset,
115 lf_pinbox_free_func *free_func,
void *free_func_arg)
117 DBUG_ASSERT(free_ptr_offset %
sizeof(
void *) == 0);
118 compile_time_assert(
sizeof(
LF_PINS) == 64);
119 lf_dynarray_init(&pinbox->pinarray,
sizeof(
LF_PINS));
120 pinbox->pinstack_top_ver= 0;
121 pinbox->pins_in_array= 0;
122 pinbox->free_ptr_offset= free_ptr_offset;
123 pinbox->free_func= free_func;
124 pinbox->free_func_arg= free_func_arg;
127 void lf_pinbox_destroy(
LF_PINBOX *pinbox)
129 lf_dynarray_destroy(&pinbox->pinarray);
150 uint32 pins, next, top_ver;
161 top_ver= pinbox->pinstack_top_ver;
164 if (!(pins= top_ver % LF_PINBOX_MAX_PINS))
167 pins= my_atomic_add32((int32
volatile*) &pinbox->pins_in_array, 1)+1;
168 if (unlikely(pins >= LF_PINBOX_MAX_PINS))
174 el= (
LF_PINS *)_lf_dynarray_lvalue(&pinbox->pinarray, pins);
179 el= (
LF_PINS *)_lf_dynarray_value(&pinbox->pinarray, pins);
181 }
while (!my_atomic_cas32((int32
volatile*) &pinbox->pinstack_top_ver,
183 top_ver-pins+next+LF_PINBOX_MAX_PINS));
190 el->purgatory_count= 0;
197 el->stack_ends_here= (var ? & var->stack_ends_here : NULL);
209 void _lf_pinbox_put_pins(
LF_PINS *pins)
219 for (i= 0; i < LF_PINBOX_PINS; i++)
220 DBUG_ASSERT(pins->pin[i] == 0);
230 while (pins->purgatory_count)
232 _lf_pinbox_real_free(pins);
233 if (pins->purgatory_count)
235 my_atomic_rwlock_wrunlock(&pins->pinbox->pinarray.lock);
237 my_atomic_rwlock_wrlock(&pins->pinbox->pinarray.lock);
240 top_ver= pinbox->pinstack_top_ver;
243 pins->link= top_ver % LF_PINBOX_MAX_PINS;
244 }
while (!my_atomic_cas32((int32
volatile*) &pinbox->pinstack_top_ver,
246 top_ver-pins->link+nr+LF_PINBOX_MAX_PINS));
250 static int ptr_cmp(
void **a,
void **b)
252 return *a < *b ? -1 : *a == *b ? 0 : 1;
255 #define add_to_purgatory(PINS, ADDR) \
258 *(void **)((char *)(ADDR)+(PINS)->pinbox->free_ptr_offset)= \
260 (PINS)->purgatory= (ADDR); \
261 (PINS)->purgatory_count++; \
271 void _lf_pinbox_free(
LF_PINS *pins,
void *addr)
273 add_to_purgatory(pins, addr);
274 if (pins->purgatory_count % LF_PURGATORY_SIZE)
275 _lf_pinbox_real_free(pins);
290 LF_PINS *el_end= el + MY_MIN(hv->npins, LF_DYNARRAY_LEVEL_LENGTH);
291 for (; el < el_end; el++)
293 for (i= 0; i < LF_PINBOX_PINS; i++)
306 hv->npins-= LF_DYNARRAY_LEVEL_LENGTH;
314 static int match_pins(
LF_PINS *el,
void *addr)
317 LF_PINS *el_end= el+LF_DYNARRAY_LEVEL_LENGTH;
318 for (; el < el_end; el++)
319 for (i= 0; i < LF_PINBOX_PINS; i++)
320 if (el->pin[i] == addr)
325 #if STACK_DIRECTION < 0
326 #define available_stack_size(CUR,END) (long) ((char*)(CUR) - (char*)(END))
328 #define available_stack_size(CUR,END) (long) ((char*)(END) - (char*)(CUR))
331 #define next_node(P, X) (*((uchar * volatile *)(((uchar *)(X)) + (P)->free_ptr_offset)))
332 #define anext_node(X) next_node(&allocator->pinbox, (X))
337 static void _lf_pinbox_real_free(
LF_PINS *pins)
342 void *first= NULL, *last= NULL;
345 npins= pinbox->pins_in_array+1;
348 if (pins->stack_ends_here != NULL)
350 int alloca_size=
sizeof(
void *)*LF_PINBOX_PINS*npins;
352 if (available_stack_size(&pinbox, *pins->stack_ends_here) > alloca_size)
355 addr= (
void **) alloca(alloca_size);
359 _lf_dynarray_iterate(&pinbox->pinarray,
360 (lf_dynarray_func)harvest_pins, &hv);
362 npins= hv.granary-addr;
365 qsort(addr, npins,
sizeof(
void *), (qsort_cmp)ptr_cmp);
370 list= pins->purgatory;
372 pins->purgatory_count= 0;
376 list= *(
void **)((
char *)cur+pinbox->free_ptr_offset);
382 for (a= addr, b= addr+npins-1, c= a+(b-a)/2; (b-a) > 1; c= a+(b-a)/2)
389 if (cur == *a || cur == *b)
394 if (_lf_dynarray_iterate(&pinbox->pinarray,
395 (lf_dynarray_func)match_pins, cur))
401 last= next_node(pinbox, last)= (uchar *)cur;
403 first= last= (uchar *)cur;
407 add_to_purgatory(pins, cur);
410 pinbox->free_func(first, last, pinbox->free_func_arg);
425 static
void alloc_free(uchar *first,
426 uchar volatile *last,
427 LF_ALLOCATOR *allocator)
433 union { uchar * node;
void *ptr; } tmp;
434 tmp.node= allocator->top;
437 anext_node(last)= tmp.node;
438 }
while (!my_atomic_casptr((
void **)(
char *)&allocator->top,
439 (
void **)&tmp.ptr, first) && LF_BACKOFF);
455 void lf_alloc_init(LF_ALLOCATOR *allocator, uint
size, uint free_ptr_offset)
457 lf_pinbox_init(&allocator->pinbox, free_ptr_offset,
458 (lf_pinbox_free_func *)alloc_free, allocator);
460 allocator->mallocs= 0;
461 allocator->element_size=
size;
462 allocator->constructor= 0;
463 allocator->destructor= 0;
464 DBUG_ASSERT(size >=
sizeof(
void*) + free_ptr_offset);
478 void lf_alloc_destroy(LF_ALLOCATOR *allocator)
480 uchar *node= allocator->top;
483 uchar *tmp= anext_node(node);
484 if (allocator->destructor)
485 allocator->destructor(node);
489 lf_pinbox_destroy(&allocator->pinbox);
500 void *_lf_alloc_new(
LF_PINS *pins)
502 LF_ALLOCATOR *allocator= (LF_ALLOCATOR *)(pins->pinbox->free_func_arg);
508 node= allocator->top;
509 _lf_pin(pins, 0, node);
510 }
while (node != allocator->top && LF_BACKOFF);
513 node= (
void *)my_malloc(allocator->element_size, MYF(MY_WME));
514 if (allocator->constructor)
515 allocator->constructor(node);
516 #ifdef MY_LF_EXTRA_DEBUG
517 if (likely(node != 0))
518 my_atomic_add32(&allocator->mallocs, 1);
522 if (my_atomic_casptr((
void **)(
char *)&allocator->top,
523 (
void *)&node, anext_node(node)))
536 uint lf_alloc_pool_count(LF_ALLOCATOR *allocator)
540 for (node= allocator->top, i= 0; node; node= anext_node(node), i++)