1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
|
#ifndef LIBUALLOC_SLABAP_API_H
#define LIBUALLOC_SLABAP_API_H
#include <stddef.h>
#include <libualloc/libualloc.h>
/*
Allocator: slabap - fixed size slabs over 2^n aligned pages
Serve fixed size slabs from a list of aligned pages allocated in batch.
Similar to slab, but is a bit cheaper while requiring 2^n aligned page
allocation.
Allocation size: fixed
Standard calls: alloc, free, clean
Per allocation cost: 1 pointer + alignment
Per page cost: 2 pointers + 1 long + at most one slab_size
*/
typedef struct uall_slabap_s uall_slabap_t;
typedef struct uall_slabap_page_s uall_slabap_page_t;
typedef struct {
/* configuration */
uall_sysalloc_t *sys;
long slab_size;
void *user_data;
/* internal states - init all bytes to 0 */
size_t page_mask; /* 0 for non-2^n page size; the mask calculated from page_size for allocation aligned to page size (see UALL_PAGE_MASK()) */
uall_slabap_page_t *pages; /* singly linked list of slabap pages */
uall_slabap_t *free_slabs; /* singly linked list cache */
} uall_slabaps_t;
/* Return a new allocation of ctx->slab_size */
UALL_INLINE void *uall_slabap_alloc(uall_slabaps_t *ctx);
/* Free a previously allocated slab (may trigger a page free) */
UALL_INLINE void uall_slabap_free(uall_slabaps_t *ctx, void *ptr);
/* Free all data and empty ctx, which will be ready to accept new allocations;
cheaper than calling uall_slabap_free() multiple times */
UALL_INLINE void uall_slabap_clean(uall_slabaps_t *ctx);
#endif
|