1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99
|
/*
* malloc.c
*
* Very simple linked-list based malloc()/free().
*/
#include <stdlib.h>
#include <errno.h>
#include <string.h>
#include <dprintf.h>
#include "malloc.h"
static void *__malloc_from_block(struct free_arena_header *fp,
size_t size, malloc_tag_t tag)
{
size_t fsize;
struct free_arena_header *nfp, *na;
unsigned int heap = ARENA_HEAP_GET(fp->a.attrs);
fsize = ARENA_SIZE_GET(fp->a.attrs);
/* We need the 2* to account for the larger requirements of a free block */
if ( fsize >= size+2*sizeof(struct arena_header) ) {
/* Bigger block than required -- split block */
nfp = (struct free_arena_header *)((char *)fp + size);
na = fp->a.next;
ARENA_TYPE_SET(nfp->a.attrs, ARENA_TYPE_FREE);
ARENA_HEAP_SET(nfp->a.attrs, heap);
ARENA_SIZE_SET(nfp->a.attrs, fsize-size);
nfp->a.tag = MALLOC_FREE;
ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
ARENA_SIZE_SET(fp->a.attrs, size);
fp->a.tag = tag;
/* Insert into all-block chain */
nfp->a.prev = fp;
nfp->a.next = na;
na->a.prev = nfp;
fp->a.next = nfp;
/* Replace current block on free chain */
nfp->next_free = fp->next_free;
nfp->prev_free = fp->prev_free;
fp->next_free->prev_free = nfp;
fp->prev_free->next_free = nfp;
} else {
/* Allocate the whole block */
ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
fp->a.tag = tag;
/* Remove from free chain */
fp->next_free->prev_free = fp->prev_free;
fp->prev_free->next_free = fp->next_free;
}
return (void *)(&fp->a + 1);
}
static void *_malloc(size_t size, enum heap heap, malloc_tag_t tag)
{
struct free_arena_header *fp;
struct free_arena_header *head = &__malloc_head[heap];
void *p = NULL;
dprintf("_malloc(%zu, %u, %u) @ %p = ",
size, heap, tag, __builtin_return_address(0));
if (size) {
/* Add the obligatory arena header, and round up */
size = (size + 2 * sizeof(struct arena_header) - 1) & ARENA_SIZE_MASK;
for ( fp = head->next_free ; fp != head ; fp = fp->next_free ) {
if ( ARENA_SIZE_GET(fp->a.attrs) >= size ) {
/* Found fit -- allocate out of this block */
p = __malloc_from_block(fp, size, tag);
break;
}
}
}
dprintf("%p\n", p);
return p;
}
void *malloc(size_t size)
{
return _malloc(size, HEAP_MAIN, MALLOC_CORE);
}
void *lmalloc(size_t size)
{
return _malloc(size, HEAP_LOWMEM, MALLOC_CORE);
}
void *pmapi_lmalloc(size_t size)
{
return _malloc(size, HEAP_LOWMEM, MALLOC_MODULE);
}
|