mirror of
https://github.com/xcat2/xNBA.git
synced 2024-11-26 19:29:04 +00:00
Updated memory allocator to improve support for unaligned or partially
aligned blocks. Moved header to include/malloc.h, since we now also provide the POSIX-like malloc()/free() pair. Not yet tested.
This commit is contained in:
parent
2f0d412210
commit
b601a7d355
@ -16,189 +16,228 @@
|
||||
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
||||
*/
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
#include <strings.h>
|
||||
#include <io.h>
|
||||
#include <gpxe/list.h>
|
||||
#include <gpxe/malloc.h>
|
||||
#include <malloc.h>
|
||||
|
||||
/** @file
|
||||
*
|
||||
* Memory allocation
|
||||
* Dynamic memory allocation
|
||||
*
|
||||
*/
|
||||
|
||||
/** A free block of memory */
|
||||
struct free_block {
|
||||
struct memory_block {
|
||||
/** List of free blocks */
|
||||
struct list_head list;
|
||||
/** Size of this block */
|
||||
size_t size;
|
||||
};
|
||||
|
||||
#define MIN_MEMBLOCK_SIZE \
|
||||
( ( size_t ) ( 1 << ( fls ( sizeof ( struct memory_block ) - 1 ) ) ) )
|
||||
|
||||
/** A block of allocated memory complete with size information */
|
||||
struct autosized_block {
|
||||
/** Size of this block */
|
||||
size_t size;
|
||||
/** Remaining data */
|
||||
char data[0];
|
||||
};
|
||||
|
||||
/** List of free memory blocks */
|
||||
static LIST_HEAD ( free_blocks );
|
||||
|
||||
/**
|
||||
* Round size up to a memory allocation block size
|
||||
* Allocate a memory block
|
||||
*
|
||||
* @v requested Requested size
|
||||
* @ret obtained Obtained size
|
||||
* @v size Requested size
|
||||
* @v align Physical alignment
|
||||
* @ret ptr Memory block, or NULL
|
||||
*
|
||||
* The requested size is rounded up to the minimum allocation block
|
||||
* size (the size of a struct @c free_block) and then rounded up to
|
||||
* the nearest power of two.
|
||||
* Allocates a memory block @b physically aligned as requested. No
|
||||
* guarantees are provided for the alignment of the virtual address.
|
||||
*
|
||||
* @c align must be a power of two. @c size may not be zero.
|
||||
*/
|
||||
static size_t block_size ( size_t requested ) {
|
||||
size_t obtained = 1;
|
||||
void * alloc_memblock ( size_t size, size_t align ) {
|
||||
struct memory_block *block;
|
||||
size_t pre_size;
|
||||
ssize_t post_size;
|
||||
struct memory_block *pre;
|
||||
struct memory_block *post;
|
||||
|
||||
while ( ( obtained < sizeof ( struct free_block ) ) ||
|
||||
( obtained < requested ) ) {
|
||||
obtained <<= 1;
|
||||
/* Round up alignment and size to multiples of MIN_MEMBLOCK_SIZE */
|
||||
align = ( align + MIN_MEMBLOCK_SIZE - 1 ) & ~( MIN_MEMBLOCK_SIZE - 1 );
|
||||
size = ( size + MIN_MEMBLOCK_SIZE - 1 ) & ~( MIN_MEMBLOCK_SIZE - 1 );
|
||||
|
||||
/* Search through blocks for the first one with enough space */
|
||||
list_for_each_entry ( block, &free_blocks, list ) {
|
||||
pre_size = ( - virt_to_phys ( block ) ) & ( align - 1 );
|
||||
post_size = block->size - pre_size - size;
|
||||
if ( post_size >= 0 ) {
|
||||
/* Split block into pre-block, block, and
|
||||
* post-block. After this split, the "pre"
|
||||
* block is the one currently linked into the
|
||||
* free list.
|
||||
*/
|
||||
pre = block;
|
||||
block = ( ( ( void * ) pre ) + pre_size );
|
||||
post = ( ( ( void * ) block ) + size );
|
||||
/* If there is a "post" block, add it in to
|
||||
* the free list. Leak it if it is too small
|
||||
* (which can happen only at the very end of
|
||||
* the heap).
|
||||
*/
|
||||
if ( ( size_t ) post_size > MIN_MEMBLOCK_SIZE ) {
|
||||
post->size = post_size;
|
||||
list_add ( &post->list, &pre->list );
|
||||
}
|
||||
/* Shrink "pre" block, leaving the main block
|
||||
* isolated and no longer part of the free
|
||||
* list.
|
||||
*/
|
||||
pre->size = pre_size;
|
||||
/* If there is no "pre" block, remove it from
|
||||
* the list. Also remove it (i.e. leak it) if
|
||||
* it is too small, which can happen only at
|
||||
* the very start of the heap.
|
||||
*/
|
||||
if ( pre_size < MIN_MEMBLOCK_SIZE )
|
||||
list_del ( &pre->list );
|
||||
/* Zero allocated memory, for calloc() */
|
||||
memset ( block, 0, size );
|
||||
return block;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Free a memory block
|
||||
*
|
||||
* @v ptr Memory allocated by alloc_memblock(), or NULL
|
||||
* @v size Size of the memory
|
||||
*
|
||||
* If @c ptr is NULL, no action is taken.
|
||||
*/
|
||||
void free_memblock ( void *ptr, size_t size ) {
|
||||
struct memory_block *freeing;
|
||||
struct memory_block *block;
|
||||
ssize_t gap_before;
|
||||
ssize_t gap_after;
|
||||
|
||||
/* Allow for ptr==NULL */
|
||||
if ( ! ptr )
|
||||
return;
|
||||
|
||||
/* Round up size to match actual size that alloc_memblock()
|
||||
* would have used.
|
||||
*/
|
||||
size = ( size + MIN_MEMBLOCK_SIZE - 1 ) & ~( MIN_MEMBLOCK_SIZE - 1 );
|
||||
freeing = ptr;
|
||||
freeing->size = size;
|
||||
|
||||
/* Insert/merge into free list */
|
||||
list_for_each_entry ( block, &free_blocks, list ) {
|
||||
/* Calculate gaps before and after the "freeing" block */
|
||||
gap_before = ( ( ( void * ) freeing ) -
|
||||
( ( ( void * ) block ) + block->size ) );
|
||||
gap_after = ( ( ( void * ) block ) -
|
||||
( ( ( void * ) freeing ) + freeing->size ) );
|
||||
/* Merge with immediately preceding block, if possible */
|
||||
if ( gap_before == 0 ) {
|
||||
block->size += size;
|
||||
list_del ( &block->list );
|
||||
freeing = block;
|
||||
}
|
||||
/* Insert before the immediately following block. If
|
||||
* possible, merge the following block into the
|
||||
* "freeing" block.
|
||||
*/
|
||||
if ( gap_after >= 0 ) {
|
||||
list_add_tail ( &freeing->list, &block->list );
|
||||
if ( gap_after == 0 ) {
|
||||
freeing->size += block->size;
|
||||
list_del ( &block->list );
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return obtained;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allocate memory
|
||||
*
|
||||
* @v size Requested size
|
||||
* @ret ptr Allocated memory
|
||||
* @ret ptr Memory, or NULL
|
||||
*
|
||||
* gmalloc() will always allocate memory in power-of-two sized blocks,
|
||||
* aligned to the corresponding power-of-two boundary. For example, a
|
||||
* request for 1500 bytes will return a 2048-byte block aligned to a
|
||||
* 2048-byte boundary.
|
||||
* Allocates memory with no particular alignment requirement. @c ptr
|
||||
* will be aligned to at least a multiple of sizeof(void*).
|
||||
*
|
||||
* The alignment applies to the physical address, not the virtual
|
||||
* address. The pointer value returned by gmalloc() therefore has no
|
||||
* alignment guarantees, except as provided for by the
|
||||
* virtual-to-physical mapping. (In a PXE environment, this mapping
|
||||
* is guaranteed to be a multiple of 16 bytes.)
|
||||
*
|
||||
* Unlike traditional malloc(), the caller must remember the size of
|
||||
* the allocated block and pass the size to gfree(). This is done in
|
||||
* order to allow efficient allocation of power-of-two sized and
|
||||
* aligned blocks.
|
||||
* Note that malloc() is very inefficient for allocating blocks where
|
||||
* the size is a power of two; if you have many of these
|
||||
* (e.g. descriptor rings, data buffers) you should use malloc_dma()
|
||||
* instead.
|
||||
*/
|
||||
void * gmalloc ( size_t size ) {
|
||||
struct free_block *block;
|
||||
struct free_block *buddy;
|
||||
void * malloc ( size_t size ) {
|
||||
size_t total_size;
|
||||
struct autosized_block *block;
|
||||
|
||||
/* Round up block size to power of two */
|
||||
size = block_size ( size );
|
||||
|
||||
/* Find the best available block */
|
||||
list_for_each_entry ( block, &free_blocks, list ) {
|
||||
if ( block->size == size ) {
|
||||
list_del ( &block->list );
|
||||
memset ( block, 0, size );
|
||||
return block;
|
||||
}
|
||||
while ( block->size > size ) {
|
||||
block->size >>= 1;
|
||||
buddy = ( ( ( void * ) block ) + block->size );
|
||||
buddy->size = block->size;
|
||||
list_add ( &buddy->list, &block->list );
|
||||
}
|
||||
}
|
||||
|
||||
/* Nothing available */
|
||||
return NULL;
|
||||
total_size = size + offsetof ( struct autosized_block, data );
|
||||
block = alloc_memblock ( total_size, 1 );
|
||||
if ( ! block )
|
||||
return NULL;
|
||||
block->size = size;
|
||||
return &block->data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Free memory
|
||||
*
|
||||
* @v ptr Allocated memory
|
||||
* @v size Originally requested size
|
||||
* @v size Memory allocated by malloc(), or NULL
|
||||
*
|
||||
* Frees memory originally allocated by gmalloc().
|
||||
* Memory allocated with malloc_dma() cannot be freed with free(); it
|
||||
* must be freed with free_dma() instead.
|
||||
*
|
||||
* Calling gfree() with a NULL @c ptr is explicitly allowed, and
|
||||
* defined to have no effect. Code such as
|
||||
*
|
||||
* @code
|
||||
*
|
||||
* if ( ! my_ptr )
|
||||
* gfree ( my_ptr, my_size )
|
||||
*
|
||||
* @endcode
|
||||
*
|
||||
* is perfectly valid, but should be avoided as unnecessary bloat.
|
||||
* If @c ptr is NULL, no action is taken.
|
||||
*/
|
||||
void gfree ( void *ptr, size_t size ) {
|
||||
struct free_block *freed_block = ptr;
|
||||
struct free_block *block;
|
||||
|
||||
/* Cope with gfree(NULL,x) */
|
||||
if ( ! ptr )
|
||||
return;
|
||||
void free ( void *ptr ) {
|
||||
struct autosized_block *block;
|
||||
|
||||
/* Round up block size to power of two */
|
||||
size = block_size ( size );
|
||||
freed_block->size = size;
|
||||
|
||||
/* Merge back into free list */
|
||||
list_for_each_entry ( block, &free_blocks, list ) {
|
||||
if ( ( ( virt_to_phys ( block ) ^
|
||||
virt_to_phys ( freed_block ) ) == size ) &&
|
||||
( block->size == size ) ) {
|
||||
list_del ( &block->list );
|
||||
size <<= 1;
|
||||
if ( block < freed_block )
|
||||
freed_block = block;
|
||||
freed_block->size = size;
|
||||
} else if ( block->size > size ) {
|
||||
break;
|
||||
}
|
||||
if ( ptr ) {
|
||||
block = container_of ( ptr, struct autosized_block, data );
|
||||
free_memblock ( block, block->size );
|
||||
}
|
||||
list_add_tail ( &freed_block->list, &block->list );
|
||||
}
|
||||
|
||||
/**
|
||||
* Add memory to allocation pool
|
||||
*
|
||||
* @v start Start address
|
||||
* @v len Length
|
||||
* @v end End address
|
||||
*
|
||||
* Adds a block of memory to the allocation pool. This is a one-way
|
||||
* operation; there is no way to reclaim this memory.
|
||||
* Adds a block of memory [start,end) to the allocation pool. This is
|
||||
* a one-way operation; there is no way to reclaim this memory.
|
||||
*
|
||||
* There are no alignment requirements on either start or len.
|
||||
* @c start must be aligned to at least a multiple of sizeof(void*).
|
||||
*/
|
||||
void gmpopulate ( void *start, size_t len ) {
|
||||
size_t frag_len;
|
||||
|
||||
/* Split region into power-of-two sized and aligned blocks,
|
||||
* and feed them to gfree().
|
||||
*/
|
||||
while ( len ) {
|
||||
frag_len = 1;
|
||||
/* Find maximum allowed alignment for this address */
|
||||
while ( ( virt_to_phys ( start ) & frag_len ) == 0 ) {
|
||||
frag_len <<= 1;
|
||||
}
|
||||
/* Find maximum block size that fits in remaining space */
|
||||
while ( frag_len > len ) {
|
||||
frag_len >>= 1;
|
||||
}
|
||||
/* Skip blocks that are too small */
|
||||
if ( frag_len >= sizeof ( struct free_block ) )
|
||||
gfree ( start, frag_len );
|
||||
start += frag_len;
|
||||
len -= frag_len;
|
||||
}
|
||||
void mpopulate ( void *start, size_t len ) {
|
||||
free_memblock ( start, ( len & ~( MIN_MEMBLOCK_SIZE - 1 ) ) );
|
||||
}
|
||||
|
||||
#if 0
|
||||
#if 1
|
||||
#include <vsprintf.h>
|
||||
/**
|
||||
* Dump free block list
|
||||
*
|
||||
*/
|
||||
void gdumpfree ( void ) {
|
||||
struct free_block *block;
|
||||
void mdumpfree ( void ) {
|
||||
struct memory_block *block;
|
||||
|
||||
printf ( "Free block list:\n" );
|
||||
list_for_each_entry ( block, &free_blocks, list ) {
|
||||
@ -207,3 +246,4 @@ void gdumpfree ( void ) {
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -1,36 +0,0 @@
|
||||
#ifndef _GPXE_MALLOC_H
|
||||
#define _GPXE_MALLOC_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
/** @file
|
||||
*
|
||||
* Memory allocation
|
||||
*
|
||||
*/
|
||||
|
||||
extern void * gmalloc ( size_t size );
|
||||
extern void gfree ( void *ptr, size_t size );
|
||||
extern void gmpopulate ( void *start, size_t len );
|
||||
|
||||
/**
|
||||
* Allocate cleared memory
|
||||
*
|
||||
* @v size Requested size
|
||||
* @ret ptr Allocated memory
|
||||
*
|
||||
* Allocate memory as per gmalloc(), and zero it.
|
||||
*
|
||||
* Note that gmalloc() and gcalloc() are identical, in the interests
|
||||
* of reducing code size. Callers should not, however, rely on
|
||||
* gmalloc() clearing memory, since this behaviour may change in
|
||||
* future.
|
||||
*/
|
||||
static inline void * gcalloc ( size_t size ) {
|
||||
return gmalloc ( size );
|
||||
}
|
||||
|
||||
/* Debug function; not compiled in by default */
|
||||
void gdumpfree ( void );
|
||||
|
||||
#endif /* _GPXE_MALLOC_H */
|
66
src/include/malloc.h
Normal file
66
src/include/malloc.h
Normal file
@ -0,0 +1,66 @@
|
||||
#ifndef _MALLOC_H
|
||||
#define _MALLOC_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
/** @file
|
||||
*
|
||||
* Dynamic memory allocation
|
||||
*
|
||||
*/
|
||||
|
||||
extern void * alloc_memblock ( size_t size, size_t align );
|
||||
extern void free_memblock ( void *ptr, size_t size );
|
||||
extern void * malloc ( size_t size );
|
||||
extern void free ( void *ptr );
|
||||
extern void mpopulate ( void *start, size_t len );
|
||||
extern void mdumpfree ( void );
|
||||
|
||||
/**
|
||||
* Allocate memory for DMA
|
||||
*
|
||||
* @v size Requested size
|
||||
* @v align Physical alignment
|
||||
* @ret ptr Memory, or NULL
|
||||
*
|
||||
* Allocates physically-aligned memory for DMA.
|
||||
*
|
||||
* @c align must be a power of two. @c size may not be zero.
|
||||
*/
|
||||
static inline void * malloc_dma ( size_t size, size_t phys_align ) {
|
||||
return alloc_memblock ( size, phys_align );
|
||||
}
|
||||
|
||||
/**
|
||||
* Free memory allocated with malloc_dma()
|
||||
*
|
||||
* @v ptr Memory allocated by malloc_dma(), or NULL
|
||||
* @v size Size of memory, as passed to malloc_dma()
|
||||
*
|
||||
* Memory allocated with malloc_dma() can only be freed with
|
||||
* free_dma(); it cannot be freed with the standard free().
|
||||
*
|
||||
* If @c ptr is NULL, no action is taken.
|
||||
*/
|
||||
static inline void free_dma ( void *ptr, size_t size ) {
|
||||
free_memblock ( ptr, size );
|
||||
}
|
||||
|
||||
/**
|
||||
* Allocate cleared memory
|
||||
*
|
||||
* @v nmemb Number of members
|
||||
* @v size Size of each member
|
||||
* @ret ptr Allocated memory
|
||||
*
|
||||
* Allocate memory as per malloc(), and zero it.
|
||||
*
|
||||
* Note that malloc() and calloc() are identical, in the interests of
|
||||
* reducing code size. Callers should not, however, rely on malloc()
|
||||
* clearing memory, since this behaviour may change in future.
|
||||
*/
|
||||
static inline void * calloc ( size_t nmemb, size_t size ) {
|
||||
return malloc ( nmemb * size );
|
||||
}
|
||||
|
||||
#endif /* _MALLOC_H */
|
Loading…
Reference in New Issue
Block a user