aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorStefan Bucur <stefanb@zytor.com>2008-08-14 01:02:46 +0300
committerStefan Bucur <stefan@stefan-ubumac.(none)>2009-03-15 10:10:51 +0200
commit3d43756af624dffa27986308dadd0b6acdf99eed (patch)
tree7804d7b38a0032e6df3cf5447c4e614ffb21505c
parent1ed26aae0dd31768f6abc7634332eae2d5085f06 (diff)
downloadsyslinux-elf-3d43756af624dffa27986308dadd0b6acdf99eed.tar.gz
syslinux-elf-3d43756af624dffa27986308dadd0b6acdf99eed.tar.xz
syslinux-elf-3d43756af624dffa27986308dadd0b6acdf99eed.zip
Made room in the arena_header structure for custom data.
-rw-r--r--com32/lib/free.c22
-rw-r--r--com32/lib/malloc.c35
-rw-r--r--com32/lib/malloc.h26
-rw-r--r--com32/lib/realloc.c17
4 files changed, 55 insertions, 45 deletions
diff --git a/com32/lib/free.c b/com32/lib/free.c
index aa17080d..0cf874b4 100644
--- a/com32/lib/free.c
+++ b/com32/lib/free.c
@@ -14,22 +14,23 @@ __free_block(struct free_arena_header *ah)
pah = ah->a.prev;
nah = ah->a.next;
- if ( pah->a.type == ARENA_TYPE_FREE &&
- (char *)pah+pah->a.size == (char *)ah ) {
+ if ( ARENA_TYPE_GET(pah->a.attrs) == ARENA_TYPE_FREE &&
+ (char *)pah+ARENA_SIZE_GET(pah->a.attrs) == (char *)ah ) {
/* Coalesce into the previous block */
- pah->a.size += ah->a.size;
+ ARENA_SIZE_SET(pah->a.attrs, ARENA_SIZE_GET(pah->a.attrs) +
+ ARENA_SIZE_GET(ah->a.attrs));
pah->a.next = nah;
nah->a.prev = pah;
#ifdef DEBUG_MALLOC
- ah->a.type = ARENA_TYPE_DEAD;
+ ARENA_TYPE_SET(ah->a.attrs, ARENA_TYPE_DEAD);
#endif
ah = pah;
pah = ah->a.prev;
} else {
/* Need to add this block to the free chain */
- ah->a.type = ARENA_TYPE_FREE;
+ ARENA_TYPE_SET(ah->a.attrs, ARENA_TYPE_FREE);
ah->next_free = __malloc_head.next_free;
ah->prev_free = &__malloc_head;
@@ -39,9 +40,10 @@ __free_block(struct free_arena_header *ah)
/* In either of the previous cases, we might be able to merge
with the subsequent block... */
- if ( nah->a.type == ARENA_TYPE_FREE &&
- (char *)ah+ah->a.size == (char *)nah ) {
- ah->a.size += nah->a.size;
+ if ( ARENA_TYPE_GET(nah->a.attrs) == ARENA_TYPE_FREE &&
+ (char *)ah+ARENA_SIZE_GET(ah->a.attrs) == (char *)nah ) {
+ ARENA_SIZE_SET(ah->a.attrs, ARENA_SIZE_GET(ah->a.attrs) +
+ ARENA_SIZE_GET(nah->a.attrs));
/* Remove the old block from the chains */
nah->next_free->prev_free = nah->prev_free;
@@ -50,7 +52,7 @@ __free_block(struct free_arena_header *ah)
nah->a.next->a.prev = ah;
#ifdef DEBUG_MALLOC
- nah->a.type = ARENA_TYPE_DEAD;
+ ARENA_TYPE_SET(nah->a.attrs, ARENA_TYPE_DEAD);
#endif
}
@@ -69,7 +71,7 @@ void free(void *ptr)
((struct arena_header *)ptr - 1);
#ifdef DEBUG_MALLOC
- assert( ah->a.type == ARENA_TYPE_USED );
+ assert( ARENA_TYPE_GET(ah->a.attrs) == ARENA_TYPE_USED );
#endif
__free_block(ah);
diff --git a/com32/lib/malloc.c b/com32/lib/malloc.c
index f5b1cc99..b0a8fe25 100644
--- a/com32/lib/malloc.c
+++ b/com32/lib/malloc.c
@@ -12,8 +12,8 @@
struct free_arena_header __malloc_head =
{
{
+ (void*)0,
ARENA_TYPE_HEAD,
- 0,
&__malloc_head,
&__malloc_head,
},
@@ -47,8 +47,8 @@ static void __constructor init_memory_arena(void)
__stack_size = total_space - 4*sizeof(struct arena_header);
fp = (struct free_arena_header *)start;
- fp->a.type = ARENA_TYPE_FREE;
- fp->a.size = total_space - __stack_size;
+ ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_FREE);
+ ARENA_SIZE_SET(fp->a.attrs, total_space - __stack_size);
/* Insert into chains */
fp->a.next = fp->a.prev = &__malloc_head;
@@ -62,7 +62,7 @@ static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
size_t fsize;
struct free_arena_header *nfp, *na;
- fsize = fp->a.size;
+ fsize = ARENA_SIZE_GET(fp->a.attrs);
/* We need the 2* to account for the larger requirements of a free block */
if ( fsize >= size+2*sizeof(struct arena_header) ) {
@@ -70,10 +70,10 @@ static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
nfp = (struct free_arena_header *)((char *)fp + size);
na = fp->a.next;
- nfp->a.type = ARENA_TYPE_FREE;
- nfp->a.size = fsize-size;
- fp->a.type = ARENA_TYPE_USED;
- fp->a.size = size;
+ ARENA_TYPE_SET(nfp->a.attrs, ARENA_TYPE_FREE);
+ ARENA_SIZE_SET(nfp->a.attrs, fsize-size);
+ ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
+ ARENA_SIZE_SET(fp->a.attrs, size);
/* Insert into all-block chain */
nfp->a.prev = fp;
@@ -88,7 +88,7 @@ static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
fp->prev_free->next_free = nfp;
} else {
/* Allocate the whole block */
- fp->a.type = ARENA_TYPE_USED;
+ ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
/* Remove from free chain */
fp->next_free->prev_free = fp->prev_free;
@@ -108,9 +108,9 @@ void *malloc(size_t size)
/* Add the obligatory arena header, and round up */
size = (size+2*sizeof(struct arena_header)-1) & ARENA_SIZE_MASK;
- for ( fp = __malloc_head.next_free ; fp->a.type != ARENA_TYPE_HEAD ;
+ for ( fp = __malloc_head.next_free ; ARENA_TYPE_GET(fp->a.attrs) != ARENA_TYPE_HEAD ;
fp = fp->next_free ) {
- if ( fp->a.size >= size ) {
+ if ( ARENA_SIZE_GET(fp->a.attrs) >= size ) {
/* Found fit -- allocate out of this block */
return __malloc_from_block(fp, size);
}
@@ -146,10 +146,10 @@ int posix_memalign(void **memptr, size_t alignment, size_t size) {
*memptr = NULL;
- for (fp = __malloc_head.next_free; fp->a.type != ARENA_TYPE_HEAD;
+ for (fp = __malloc_head.next_free; ARENA_TYPE_GET(fp->a.attrs) != ARENA_TYPE_HEAD;
fp = fp->next_free) {
- if (fp->a.size <= size)
+ if (ARENA_SIZE_GET(fp->a.attrs) <= size)
continue;
align_addr = (uintptr_t)fp;
@@ -164,15 +164,16 @@ int posix_memalign(void **memptr, size_t alignment, size_t size) {
align_addr += alignment;
// See if now we have enough space
- if (align_addr + size > (uintptr_t)fp + fp->a.size)
+ if (align_addr + size > (uintptr_t)fp + ARENA_SIZE_GET(fp->a.attrs))
continue;
// We have a winner...
if (align_addr - (uintptr_t)fp > sizeof(struct arena_header)) {
// We must split the block before the alignment point
nfp = (struct free_arena_header*)(align_addr - sizeof(struct arena_header));
- nfp->a.type = ARENA_TYPE_FREE;
- nfp->a.size = fp->a.size - ((uintptr_t)nfp - (uintptr_t)fp);
+ ARENA_TYPE_SET(nfp->a.attrs, ARENA_TYPE_FREE);
+ ARENA_SIZE_SET(nfp->a.attrs,
+ ARENA_SIZE_GET(fp->a.attrs) - ((uintptr_t)nfp - (uintptr_t)fp));
nfp->a.prev = fp;
nfp->a.next = fp->a.next;
nfp->prev_free = fp;
@@ -181,7 +182,7 @@ int posix_memalign(void **memptr, size_t alignment, size_t size) {
nfp->a.next->a.prev = nfp;
nfp->next_free->prev_free = nfp;
- fp->a.size = (uintptr_t)nfp - (uintptr_t)fp;
+ ARENA_SIZE_SET(fp->a.attrs, (uintptr_t)nfp - (uintptr_t)fp);
fp->a.next = nfp;
fp->next_free = nfp;
diff --git a/com32/lib/malloc.h b/com32/lib/malloc.h
index b873ac05..e136e538 100644
--- a/com32/lib/malloc.h
+++ b/com32/lib/malloc.h
@@ -22,27 +22,33 @@ struct free_arena_header;
* alignment unit.
*/
struct arena_header {
- size_t type;
- size_t size; /* Also gives the location of the next entry */
+ void *tag;
+ size_t attrs; /* Bits 0..1: Type, 2..3: Unused, 4..31: MSB of the size */
struct free_arena_header *next, *prev;
};
+
+#define ARENA_TYPE_USED 0x0
+#define ARENA_TYPE_FREE 0x1
+#define ARENA_TYPE_HEAD 0x2
#ifdef DEBUG_MALLOC
-#define ARENA_TYPE_USED 0x64e69c70
-#define ARENA_TYPE_FREE 0x012d610a
-#define ARENA_TYPE_HEAD 0x971676b5
-#define ARENA_TYPE_DEAD 0xeeeeeeee
-#else
-#define ARENA_TYPE_USED 0
-#define ARENA_TYPE_FREE 1
-#define ARENA_TYPE_HEAD 2
+#define ARENA_TYPE_DEAD 0x3
#endif
#define ARENA_SIZE_MASK (~(uintptr_t)(sizeof(struct arena_header)-1))
+#define ARENA_TYPE_MASK ((size_t)0x3)
#define ARENA_ALIGN_UP(p) ((char *)(((uintptr_t)(p) + ~ARENA_SIZE_MASK) & ARENA_SIZE_MASK))
#define ARENA_ALIGN_DOWN(p) ((char *)((uintptr_t)(p) & ARENA_SIZE_MASK))
+#define ARENA_SIZE_GET(attrs) ((attrs) & ARENA_SIZE_MASK)
+#define ARENA_TYPE_GET(attrs) ((attrs) & ARENA_TYPE_MASK)
+
+#define ARENA_SIZE_SET(attrs, size) \
+ ((attrs) = ((size) & ARENA_SIZE_MASK) | ((attrs) & ~ARENA_SIZE_MASK))
+#define ARENA_TYPE_SET(attrs, type) \
+ ((attrs) = ((attrs) & ~ARENA_TYPE_MASK) | ((type) & ARENA_TYPE_MASK))
+
/*
* This structure should be no more than twice the size of the
* previous structure.
diff --git a/com32/lib/realloc.c b/com32/lib/realloc.c
index 2161a758..a9b0ca4e 100644
--- a/com32/lib/realloc.c
+++ b/com32/lib/realloc.c
@@ -26,7 +26,7 @@ void *realloc(void *ptr, size_t size)
((struct arena_header *)ptr - 1);
/* Actual size of the old block */
- oldsize = ah->a.size;
+ oldsize = ARENA_SIZE_GET(ah->a.attrs);
/* Add the obligatory arena header, and round up */
newsize = (size+2*sizeof(struct arena_header)-1) & ARENA_SIZE_MASK;
@@ -39,15 +39,16 @@ void *realloc(void *ptr, size_t size)
xsize = oldsize;
nah = ah->a.next;
- if ((char *)nah == (char *)ah + ah->a.size &&
- nah->a.type == ARENA_TYPE_FREE &&
- oldsize + nah->a.size >= newsize) {
+ if ((char *)nah == (char *)ah + ARENA_SIZE_GET(ah->a.attrs) &&
+ ARENA_TYPE_GET(nah->a.attrs) == ARENA_TYPE_FREE &&
+ oldsize + ARENA_SIZE_GET(nah->a.attrs) >= newsize) {
/* Merge in subsequent free block */
ah->a.next = nah->a.next;
ah->a.next->a.prev = ah;
nah->next_free->prev_free = nah->prev_free;
nah->prev_free->next_free = nah->next_free;
- xsize = (ah->a.size += nah->a.size);
+ xsize = (ARENA_SIZE_SET(ah->a.attrs, ARENA_SIZE_GET(ah->a.attrs) +
+ ARENA_SIZE_GET(nah->a.attrs)));
}
if (xsize >= newsize) {
@@ -55,9 +56,9 @@ void *realloc(void *ptr, size_t size)
if (xsize >= newsize + 2*sizeof(struct arena_header)) {
/* Residual free block at end */
nah = (struct free_arena_header *)((char *)ah + newsize);
- nah->a.type = ARENA_TYPE_FREE;
- nah->a.size = xsize - newsize;
- ah->a.size = newsize;
+ ARENA_TYPE_SET(nah->a.attrs, ARENA_TYPE_FREE);
+ ARENA_SIZE_SET(nah->a.attrs, xsize - newsize);
+ ARENA_SIZE_SET(ah->a.attrs, newsize);
/* Insert into block list */
nah->a.next = ah->a.next;