Simplify malloc code

Remove all kinds of unused configuration options and dead code.
This commit is contained in:
Ulrich Drepper 2011-09-10 18:10:17 -04:00
parent d063d16433
commit 22a8918713
5 changed files with 257 additions and 1751 deletions

View File

@ -1,5 +1,10 @@
2011-09-10 Ulrich Drepper <drepper@gmail.com> 2011-09-10 Ulrich Drepper <drepper@gmail.com>
* malloc/malloc.c: Removed unused configurations and dead code.
* malloc/arena.c: Likewise.
* malloc/hooks.c: Likewise.
* malloc/Makefile (-CPPFLAGS-malloc.c): Don't add -DATOMIC_FASTBINS.
* include/tls.h: Removed. USE___THREAD must always be defined. * include/tls.h: Removed. USE___THREAD must always be defined.
* bits/libc-tsd.h: Don't handle !USE___THREAD. * bits/libc-tsd.h: Don't handle !USE___THREAD.
* elf/dl-libc.c: Likewise. * elf/dl-libc.c: Likewise.

View File

@ -125,7 +125,7 @@ endif
tst-mcheck-ENV = MALLOC_CHECK_=3 tst-mcheck-ENV = MALLOC_CHECK_=3
CPPFLAGS-malloc.c += -DPER_THREAD -DATOMIC_FASTBINS CPPFLAGS-malloc.c += -DPER_THREAD
# Uncomment this for test releases. For public releases it is too expensive. # Uncomment this for test releases. For public releases it is too expensive.
#CPPFLAGS-malloc.o += -DMALLOC_DEBUG=1 #CPPFLAGS-malloc.o += -DMALLOC_DEBUG=1

View File

@ -53,8 +53,7 @@
/* A heap is a single contiguous memory region holding (coalesceable) /* A heap is a single contiguous memory region holding (coalesceable)
malloc_chunks. It is allocated with mmap() and always starts at an malloc_chunks. It is allocated with mmap() and always starts at an
address aligned to HEAP_MAX_SIZE. Not used unless compiling with address aligned to HEAP_MAX_SIZE. */
USE_ARENAS. */
typedef struct _heap_info { typedef struct _heap_info {
mstate ar_ptr; /* Arena for this heap. */ mstate ar_ptr; /* Arena for this heap. */
@ -98,7 +97,6 @@ int __malloc_initialized = -1;
/**************************************************************************/ /**************************************************************************/
#if USE_ARENAS
/* arena_get() acquires an arena and locks the corresponding mutex. /* arena_get() acquires an arena and locks the corresponding mutex.
First, try the one last locked successfully by this thread. (This First, try the one last locked successfully by this thread. (This
@ -114,19 +112,19 @@ int __malloc_initialized = -1;
} while(0) } while(0)
#define arena_lookup(ptr) do { \ #define arena_lookup(ptr) do { \
Void_t *vptr = NULL; \ void *vptr = NULL; \
ptr = (mstate)tsd_getspecific(arena_key, vptr); \ ptr = (mstate)tsd_getspecific(arena_key, vptr); \
} while(0) } while(0)
#ifdef PER_THREAD #ifdef PER_THREAD
#define arena_lock(ptr, size) do { \ # define arena_lock(ptr, size) do { \
if(ptr) \ if(ptr) \
(void)mutex_lock(&ptr->mutex); \ (void)mutex_lock(&ptr->mutex); \
else \ else \
ptr = arena_get2(ptr, (size)); \ ptr = arena_get2(ptr, (size)); \
} while(0) } while(0)
#else #else
#define arena_lock(ptr, size) do { \ # define arena_lock(ptr, size) do { \
if(ptr && !mutex_trylock(&ptr->mutex)) { \ if(ptr && !mutex_trylock(&ptr->mutex)) { \
THREAD_STAT(++(ptr->stat_lock_direct)); \ THREAD_STAT(++(ptr->stat_lock_direct)); \
} else \ } else \
@ -141,41 +139,16 @@ int __malloc_initialized = -1;
#define arena_for_chunk(ptr) \ #define arena_for_chunk(ptr) \
(chunk_non_main_arena(ptr) ? heap_for_ptr(ptr)->ar_ptr : &main_arena) (chunk_non_main_arena(ptr) ? heap_for_ptr(ptr)->ar_ptr : &main_arena)
#else /* !USE_ARENAS */
/* There is only one arena, main_arena. */
#if THREAD_STATS
#define arena_get(ar_ptr, sz) do { \
ar_ptr = &main_arena; \
if(!mutex_trylock(&ar_ptr->mutex)) \
++(ar_ptr->stat_lock_direct); \
else { \
(void)mutex_lock(&ar_ptr->mutex); \
++(ar_ptr->stat_lock_wait); \
} \
} while(0)
#else
#define arena_get(ar_ptr, sz) do { \
ar_ptr = &main_arena; \
(void)mutex_lock(&ar_ptr->mutex); \
} while(0)
#endif
#define arena_for_chunk(ptr) (&main_arena)
#endif /* USE_ARENAS */
/**************************************************************************/ /**************************************************************************/
#ifndef NO_THREADS
/* atfork support. */ /* atfork support. */
static __malloc_ptr_t (*save_malloc_hook) (size_t __size, static __malloc_ptr_t (*save_malloc_hook) (size_t __size,
__const __malloc_ptr_t); __const __malloc_ptr_t);
static void (*save_free_hook) (__malloc_ptr_t __ptr, static void (*save_free_hook) (__malloc_ptr_t __ptr,
__const __malloc_ptr_t); __const __malloc_ptr_t);
static Void_t* save_arena; static void* save_arena;
#ifdef ATFORK_MEM #ifdef ATFORK_MEM
ATFORK_MEM; ATFORK_MEM;
@ -184,16 +157,16 @@ ATFORK_MEM;
/* Magic value for the thread-specific arena pointer when /* Magic value for the thread-specific arena pointer when
malloc_atfork() is in use. */ malloc_atfork() is in use. */
#define ATFORK_ARENA_PTR ((Void_t*)-1) #define ATFORK_ARENA_PTR ((void*)-1)
/* The following hooks are used while the `atfork' handling mechanism /* The following hooks are used while the `atfork' handling mechanism
is active. */ is active. */
static Void_t* static void*
malloc_atfork(size_t sz, const Void_t *caller) malloc_atfork(size_t sz, const void *caller)
{ {
Void_t *vptr = NULL; void *vptr = NULL;
Void_t *victim; void *victim;
tsd_getspecific(arena_key, vptr); tsd_getspecific(arena_key, vptr);
if(vptr == ATFORK_ARENA_PTR) { if(vptr == ATFORK_ARENA_PTR) {
@ -217,9 +190,9 @@ malloc_atfork(size_t sz, const Void_t *caller)
} }
static void static void
free_atfork(Void_t* mem, const Void_t *caller) free_atfork(void* mem, const void *caller)
{ {
Void_t *vptr = NULL; void *vptr = NULL;
mstate ar_ptr; mstate ar_ptr;
mchunkptr p; /* chunk corresponding to mem */ mchunkptr p; /* chunk corresponding to mem */
@ -228,27 +201,15 @@ free_atfork(Void_t* mem, const Void_t *caller)
p = mem2chunk(mem); /* do not bother to replicate free_check here */ p = mem2chunk(mem); /* do not bother to replicate free_check here */
#if HAVE_MMAP
if (chunk_is_mmapped(p)) /* release mmapped memory. */ if (chunk_is_mmapped(p)) /* release mmapped memory. */
{ {
munmap_chunk(p); munmap_chunk(p);
return; return;
} }
#endif
#ifdef ATOMIC_FASTBINS
ar_ptr = arena_for_chunk(p); ar_ptr = arena_for_chunk(p);
tsd_getspecific(arena_key, vptr); tsd_getspecific(arena_key, vptr);
_int_free(ar_ptr, p, vptr == ATFORK_ARENA_PTR); _int_free(ar_ptr, p, vptr == ATFORK_ARENA_PTR);
#else
ar_ptr = arena_for_chunk(p);
tsd_getspecific(arena_key, vptr);
if(vptr != ATFORK_ARENA_PTR)
(void)mutex_lock(&ar_ptr->mutex);
_int_free(ar_ptr, p);
if(vptr != ATFORK_ARENA_PTR)
(void)mutex_unlock(&ar_ptr->mutex);
#endif
} }
@ -270,7 +231,7 @@ ptmalloc_lock_all (void)
return; return;
if (mutex_trylock(&list_lock)) if (mutex_trylock(&list_lock))
{ {
Void_t *my_arena; void *my_arena;
tsd_getspecific(arena_key, my_arena); tsd_getspecific(arena_key, my_arena);
if (my_arena == ATFORK_ARENA_PTR) if (my_arena == ATFORK_ARENA_PTR)
/* This is the same thread which already locks the global list. /* This is the same thread which already locks the global list.
@ -330,11 +291,9 @@ ptmalloc_unlock_all2 (void)
if(__malloc_initialized < 1) if(__malloc_initialized < 1)
return; return;
#if defined _LIBC || defined MALLOC_HOOKS
tsd_setspecific(arena_key, save_arena); tsd_setspecific(arena_key, save_arena);
__malloc_hook = save_malloc_hook; __malloc_hook = save_malloc_hook;
__free_hook = save_free_hook; __free_hook = save_free_hook;
#endif
#ifdef PER_THREAD #ifdef PER_THREAD
free_list = NULL; free_list = NULL;
#endif #endif
@ -359,10 +318,7 @@ ptmalloc_unlock_all2 (void)
#endif #endif
#endif /* !defined NO_THREADS */
/* Initialization routine. */ /* Initialization routine. */
#ifdef _LIBC
#include <string.h> #include <string.h>
extern char **_environ; extern char **_environ;
@ -396,7 +352,6 @@ next_env_entry (char ***position)
return result; return result;
} }
#endif /* _LIBC */
/* Set up basic state so that _int_malloc et al can work. */ /* Set up basic state so that _int_malloc et al can work. */
static void static void
@ -417,8 +372,7 @@ ptmalloc_init_minimal (void)
} }
#ifdef _LIBC #ifdef SHARED
# ifdef SHARED
static void * static void *
__failing_morecore (ptrdiff_t d) __failing_morecore (ptrdiff_t d)
{ {
@ -427,17 +381,12 @@ __failing_morecore (ptrdiff_t d)
extern struct dl_open_hook *_dl_open_hook; extern struct dl_open_hook *_dl_open_hook;
libc_hidden_proto (_dl_open_hook); libc_hidden_proto (_dl_open_hook);
# endif
#endif #endif
static void static void
ptmalloc_init (void) ptmalloc_init (void)
{ {
#if __STD_C
const char* s; const char* s;
#else
char* s;
#endif
int secure = 0; int secure = 0;
if(__malloc_initialized >= 0) return; if(__malloc_initialized >= 0) return;
@ -448,7 +397,7 @@ ptmalloc_init (void)
mutex_init(&main_arena.mutex); mutex_init(&main_arena.mutex);
main_arena.next = &main_arena; main_arena.next = &main_arena;
#if defined _LIBC && defined SHARED #ifdef SHARED
/* In case this libc copy is in a non-default namespace, never use brk. /* In case this libc copy is in a non-default namespace, never use brk.
Likewise if dlopened from statically linked program. */ Likewise if dlopened from statically linked program. */
Dl_info di; Dl_info di;
@ -462,9 +411,8 @@ ptmalloc_init (void)
mutex_init(&list_lock); mutex_init(&list_lock);
tsd_key_create(&arena_key, NULL); tsd_key_create(&arena_key, NULL);
tsd_setspecific(arena_key, (Void_t *)&main_arena); tsd_setspecific(arena_key, (void *)&main_arena);
thread_atfork(ptmalloc_lock_all, ptmalloc_unlock_all, ptmalloc_unlock_all2); thread_atfork(ptmalloc_lock_all, ptmalloc_unlock_all, ptmalloc_unlock_all2);
#ifdef _LIBC
secure = __libc_enable_secure; secure = __libc_enable_secure;
s = NULL; s = NULL;
if (__builtin_expect (_environ != NULL, 1)) if (__builtin_expect (_environ != NULL, 1))
@ -532,22 +480,6 @@ ptmalloc_init (void)
} }
} }
} }
#else
if (! secure)
{
if((s = getenv("MALLOC_TRIM_THRESHOLD_")))
mALLOPt(M_TRIM_THRESHOLD, atoi(s));
if((s = getenv("MALLOC_TOP_PAD_")))
mALLOPt(M_TOP_PAD, atoi(s));
if((s = getenv("MALLOC_PERTURB_")))
mALLOPt(M_PERTURB, atoi(s));
if((s = getenv("MALLOC_MMAP_THRESHOLD_")))
mALLOPt(M_MMAP_THRESHOLD, atoi(s));
if((s = getenv("MALLOC_MMAP_MAX_")))
mALLOPt(M_MMAP_MAX, atoi(s));
}
s = getenv("MALLOC_CHECK_");
#endif
if(s && s[0]) { if(s && s[0]) {
mALLOPt(M_CHECK_ACTION, (int)(s[0] - '0')); mALLOPt(M_CHECK_ACTION, (int)(s[0] - '0'));
if (check_action != 0) if (check_action != 0)
@ -569,18 +501,12 @@ thread_atfork_static(ptmalloc_lock_all, ptmalloc_unlock_all, \
/* Managing heaps and arenas (for concurrent threads) */ /* Managing heaps and arenas (for concurrent threads) */
#if USE_ARENAS
#if MALLOC_DEBUG > 1 #if MALLOC_DEBUG > 1
/* Print the complete contents of a single heap to stderr. */ /* Print the complete contents of a single heap to stderr. */
static void static void
#if __STD_C
dump_heap(heap_info *heap) dump_heap(heap_info *heap)
#else
dump_heap(heap) heap_info *heap;
#endif
{ {
char *ptr; char *ptr;
mchunkptr p; mchunkptr p;
@ -621,11 +547,7 @@ static char *aligned_heap_area;
static heap_info * static heap_info *
internal_function internal_function
#if __STD_C
new_heap(size_t size, size_t top_pad) new_heap(size_t size, size_t top_pad)
#else
new_heap(size, top_pad) size_t size, top_pad;
#endif
{ {
size_t page_mask = malloc_getpagesize - 1; size_t page_mask = malloc_getpagesize - 1;
char *p1, *p2; char *p1, *p2;
@ -695,11 +617,7 @@ new_heap(size, top_pad) size_t size, top_pad;
multiple of the page size. */ multiple of the page size. */
static int static int
#if __STD_C
grow_heap(heap_info *h, long diff) grow_heap(heap_info *h, long diff)
#else
grow_heap(h, diff) heap_info *h; long diff;
#endif
{ {
size_t page_mask = malloc_getpagesize - 1; size_t page_mask = malloc_getpagesize - 1;
long new_size; long new_size;
@ -723,11 +641,7 @@ grow_heap(h, diff) heap_info *h; long diff;
/* Shrink a heap. */ /* Shrink a heap. */
static int static int
#if __STD_C
shrink_heap(heap_info *h, long diff) shrink_heap(heap_info *h, long diff)
#else
shrink_heap(h, diff) heap_info *h; long diff;
#endif
{ {
long new_size; long new_size;
@ -736,21 +650,15 @@ shrink_heap(h, diff) heap_info *h; long diff;
return -1; return -1;
/* Try to re-map the extra heap space freshly to save memory, and /* Try to re-map the extra heap space freshly to save memory, and
make it inaccessible. */ make it inaccessible. */
#ifdef _LIBC
if (__builtin_expect (__libc_enable_secure, 0)) if (__builtin_expect (__libc_enable_secure, 0))
#else
if (1)
#endif
{ {
if((char *)MMAP((char *)h + new_size, diff, PROT_NONE, if((char *)MMAP((char *)h + new_size, diff, PROT_NONE,
MAP_PRIVATE|MAP_FIXED) == (char *) MAP_FAILED) MAP_PRIVATE|MAP_FIXED) == (char *) MAP_FAILED)
return -2; return -2;
h->mprotect_size = new_size; h->mprotect_size = new_size;
} }
#ifdef _LIBC
else else
madvise ((char *)h + new_size, diff, MADV_DONTNEED); madvise ((char *)h + new_size, diff, MADV_DONTNEED);
#endif
/*fprintf(stderr, "shrink %p %08lx\n", h, new_size);*/ /*fprintf(stderr, "shrink %p %08lx\n", h, new_size);*/
h->size = new_size; h->size = new_size;
@ -768,11 +676,7 @@ shrink_heap(h, diff) heap_info *h; long diff;
static int static int
internal_function internal_function
#if __STD_C
heap_trim(heap_info *heap, size_t pad) heap_trim(heap_info *heap, size_t pad)
#else
heap_trim(heap, pad) heap_info *heap; size_t pad;
#endif
{ {
mstate ar_ptr = heap->ar_ptr; mstate ar_ptr = heap->ar_ptr;
unsigned long pagesz = mp_.pagesize; unsigned long pagesz = mp_.pagesize;
@ -848,11 +752,6 @@ _int_new_arena(size_t size)
/*a->next = NULL;*/ /*a->next = NULL;*/
a->system_mem = a->max_system_mem = h->size; a->system_mem = a->max_system_mem = h->size;
arena_mem += h->size; arena_mem += h->size;
#ifdef NO_THREADS
if((unsigned long)(mp_.mmapped_mem + arena_mem + main_arena.system_mem) >
mp_.max_total_mem)
mp_.max_total_mem = mp_.mmapped_mem + arena_mem + main_arena.system_mem;
#endif
/* Set up the top chunk, with proper alignment. */ /* Set up the top chunk, with proper alignment. */
ptr = (char *)(a + 1); ptr = (char *)(a + 1);
@ -862,7 +761,7 @@ _int_new_arena(size_t size)
top(a) = (mchunkptr)ptr; top(a) = (mchunkptr)ptr;
set_head(top(a), (((char*)h + h->size) - ptr) | PREV_INUSE); set_head(top(a), (((char*)h + h->size) - ptr) | PREV_INUSE);
tsd_setspecific(arena_key, (Void_t *)a); tsd_setspecific(arena_key, (void *)a);
mutex_init(&a->mutex); mutex_init(&a->mutex);
(void)mutex_lock(&a->mutex); (void)mutex_lock(&a->mutex);
@ -903,7 +802,7 @@ get_free_list (void)
if (result != NULL) if (result != NULL)
{ {
(void)mutex_lock(&result->mutex); (void)mutex_lock(&result->mutex);
tsd_setspecific(arena_key, (Void_t *)result); tsd_setspecific(arena_key, (void *)result);
THREAD_STAT(++(result->stat_lock_loop)); THREAD_STAT(++(result->stat_lock_loop));
} }
} }
@ -958,7 +857,7 @@ reused_arena (void)
(void)mutex_lock(&result->mutex); (void)mutex_lock(&result->mutex);
out: out:
tsd_setspecific(arena_key, (Void_t *)result); tsd_setspecific(arena_key, (void *)result);
THREAD_STAT(++(result->stat_lock_loop)); THREAD_STAT(++(result->stat_lock_loop));
next_to_use = result->next; next_to_use = result->next;
@ -968,11 +867,7 @@ reused_arena (void)
static mstate static mstate
internal_function internal_function
#if __STD_C
arena_get2(mstate a_tsd, size_t size) arena_get2(mstate a_tsd, size_t size)
#else
arena_get2(a_tsd, size) mstate a_tsd; size_t size;
#endif
{ {
mstate a; mstate a;
@ -1002,7 +897,7 @@ arena_get2(a_tsd, size) mstate a_tsd; size_t size;
if (retried) if (retried)
(void)mutex_unlock(&list_lock); (void)mutex_unlock(&list_lock);
THREAD_STAT(++(a->stat_lock_loop)); THREAD_STAT(++(a->stat_lock_loop));
tsd_setspecific(arena_key, (Void_t *)a); tsd_setspecific(arena_key, (void *)a);
return a; return a;
} }
a = a->next; a = a->next;
@ -1034,7 +929,7 @@ arena_get2(a_tsd, size) mstate a_tsd; size_t size;
static void __attribute__ ((section ("__libc_thread_freeres_fn"))) static void __attribute__ ((section ("__libc_thread_freeres_fn")))
arena_thread_freeres (void) arena_thread_freeres (void)
{ {
Void_t *vptr = NULL; void *vptr = NULL;
mstate a = tsd_getspecific(arena_key, vptr); mstate a = tsd_getspecific(arena_key, vptr);
tsd_setspecific(arena_key, NULL); tsd_setspecific(arena_key, NULL);
@ -1049,8 +944,6 @@ arena_thread_freeres (void)
text_set_element (__libc_thread_subfreeres, arena_thread_freeres); text_set_element (__libc_thread_subfreeres, arena_thread_freeres);
#endif #endif
#endif /* USE_ARENAS */
/* /*
* Local variables: * Local variables:
* c-basic-offset: 2 * c-basic-offset: 2

View File

@ -25,26 +25,16 @@
/* Hooks for debugging versions. The initial hooks just call the /* Hooks for debugging versions. The initial hooks just call the
initialization routine, then do the normal work. */ initialization routine, then do the normal work. */
static Void_t* static void*
#if __STD_C
malloc_hook_ini(size_t sz, const __malloc_ptr_t caller) malloc_hook_ini(size_t sz, const __malloc_ptr_t caller)
#else
malloc_hook_ini(sz, caller)
size_t sz; const __malloc_ptr_t caller;
#endif
{ {
__malloc_hook = NULL; __malloc_hook = NULL;
ptmalloc_init(); ptmalloc_init();
return public_mALLOc(sz); return public_mALLOc(sz);
} }
static Void_t* static void*
#if __STD_C realloc_hook_ini(void* ptr, size_t sz, const __malloc_ptr_t caller)
realloc_hook_ini(Void_t* ptr, size_t sz, const __malloc_ptr_t caller)
#else
realloc_hook_ini(ptr, sz, caller)
Void_t* ptr; size_t sz; const __malloc_ptr_t caller;
#endif
{ {
__malloc_hook = NULL; __malloc_hook = NULL;
__realloc_hook = NULL; __realloc_hook = NULL;
@ -52,13 +42,8 @@ realloc_hook_ini(ptr, sz, caller)
return public_rEALLOc(ptr, sz); return public_rEALLOc(ptr, sz);
} }
static Void_t* static void*
#if __STD_C
memalign_hook_ini(size_t alignment, size_t sz, const __malloc_ptr_t caller) memalign_hook_ini(size_t alignment, size_t sz, const __malloc_ptr_t caller)
#else
memalign_hook_ini(alignment, sz, caller)
size_t alignment; size_t sz; const __malloc_ptr_t caller;
#endif
{ {
__memalign_hook = NULL; __memalign_hook = NULL;
ptmalloc_init(); ptmalloc_init();
@ -108,13 +93,9 @@ __malloc_check_init()
/* Instrument a chunk with overrun detector byte(s) and convert it /* Instrument a chunk with overrun detector byte(s) and convert it
into a user pointer with requested size sz. */ into a user pointer with requested size sz. */
static Void_t* static void*
internal_function internal_function
#if __STD_C mem2mem_check(void *ptr, size_t sz)
mem2mem_check(Void_t *ptr, size_t sz)
#else
mem2mem_check(ptr, sz) Void_t *ptr; size_t sz;
#endif
{ {
mchunkptr p; mchunkptr p;
unsigned char* m_ptr = (unsigned char*)BOUNDED_N(ptr, sz); unsigned char* m_ptr = (unsigned char*)BOUNDED_N(ptr, sz);
@ -133,7 +114,7 @@ mem2mem_check(ptr, sz) Void_t *ptr; size_t sz;
m_ptr[i] = 0xFF; m_ptr[i] = 0xFF;
} }
m_ptr[sz] = MAGICBYTE(p); m_ptr[sz] = MAGICBYTE(p);
return (Void_t*)m_ptr; return (void*)m_ptr;
} }
/* Convert a pointer to be free()d or realloc()ed to a valid chunk /* Convert a pointer to be free()d or realloc()ed to a valid chunk
@ -141,11 +122,7 @@ mem2mem_check(ptr, sz) Void_t *ptr; size_t sz;
static mchunkptr static mchunkptr
internal_function internal_function
#if __STD_C mem2chunk_check(void* mem, unsigned char **magic_p)
mem2chunk_check(Void_t* mem, unsigned char **magic_p)
#else
mem2chunk_check(mem, magic_p) Void_t* mem; unsigned char **magic_p;
#endif
{ {
mchunkptr p; mchunkptr p;
INTERNAL_SIZE_T sz, c; INTERNAL_SIZE_T sz, c;
@ -200,11 +177,7 @@ mem2chunk_check(mem, magic_p) Void_t* mem; unsigned char **magic_p;
static int static int
internal_function internal_function
#if __STD_C
top_check(void) top_check(void)
#else
top_check()
#endif
{ {
mchunkptr t = top(&main_arena); mchunkptr t = top(&main_arena);
char* brk, * new_brk; char* brk, * new_brk;
@ -246,14 +219,10 @@ top_check()
return 0; return 0;
} }
static Void_t* static void*
#if __STD_C malloc_check(size_t sz, const void *caller)
malloc_check(size_t sz, const Void_t *caller)
#else
malloc_check(sz, caller) size_t sz; const Void_t *caller;
#endif
{ {
Void_t *victim; void *victim;
if (sz+1 == 0) { if (sz+1 == 0) {
MALLOC_FAILURE_ACTION; MALLOC_FAILURE_ACTION;
@ -267,11 +236,7 @@ malloc_check(sz, caller) size_t sz; const Void_t *caller;
} }
static void static void
#if __STD_C free_check(void* mem, const void *caller)
free_check(Void_t* mem, const Void_t *caller)
#else
free_check(mem, caller) Void_t* mem; const Void_t *caller;
#endif
{ {
mchunkptr p; mchunkptr p;
@ -284,34 +249,20 @@ free_check(mem, caller) Void_t* mem; const Void_t *caller;
malloc_printerr(check_action, "free(): invalid pointer", mem); malloc_printerr(check_action, "free(): invalid pointer", mem);
return; return;
} }
#if HAVE_MMAP
if (chunk_is_mmapped(p)) { if (chunk_is_mmapped(p)) {
(void)mutex_unlock(&main_arena.mutex); (void)mutex_unlock(&main_arena.mutex);
munmap_chunk(p); munmap_chunk(p);
return; return;
} }
#endif
#if 0 /* Erase freed memory. */
memset(mem, 0, chunksize(p) - (SIZE_SZ+1));
#endif
#ifdef ATOMIC_FASTBINS
_int_free(&main_arena, p, 1); _int_free(&main_arena, p, 1);
#else
_int_free(&main_arena, p);
#endif
(void)mutex_unlock(&main_arena.mutex); (void)mutex_unlock(&main_arena.mutex);
} }
static Void_t* static void*
#if __STD_C realloc_check(void* oldmem, size_t bytes, const void *caller)
realloc_check(Void_t* oldmem, size_t bytes, const Void_t *caller)
#else
realloc_check(oldmem, bytes, caller)
Void_t* oldmem; size_t bytes; const Void_t *caller;
#endif
{ {
INTERNAL_SIZE_T nb; INTERNAL_SIZE_T nb;
Void_t* newmem = 0; void* newmem = 0;
unsigned char *magic_p; unsigned char *magic_p;
if (bytes+1 == 0) { if (bytes+1 == 0) {
@ -335,7 +286,6 @@ realloc_check(oldmem, bytes, caller)
checked_request2size(bytes+1, nb); checked_request2size(bytes+1, nb);
(void)mutex_lock(&main_arena.mutex); (void)mutex_lock(&main_arena.mutex);
#if HAVE_MMAP
if (chunk_is_mmapped(oldp)) { if (chunk_is_mmapped(oldp)) {
#if HAVE_MREMAP #if HAVE_MREMAP
mchunkptr newp = mremap_chunk(oldp, nb); mchunkptr newp = mremap_chunk(oldp, nb);
@ -358,27 +308,12 @@ realloc_check(oldmem, bytes, caller)
} }
} }
} else { } else {
#endif /* HAVE_MMAP */
if (top_check() >= 0) { if (top_check() >= 0) {
INTERNAL_SIZE_T nb; INTERNAL_SIZE_T nb;
checked_request2size(bytes + 1, nb); checked_request2size(bytes + 1, nb);
newmem = _int_realloc(&main_arena, oldp, oldsize, nb); newmem = _int_realloc(&main_arena, oldp, oldsize, nb);
} }
#if 0 /* Erase freed memory. */
if(newmem)
newp = mem2chunk(newmem);
nb = chunksize(newp);
if(oldp<newp || oldp>=chunk_at_offset(newp, nb)) {
memset((char*)oldmem + 2*sizeof(mbinptr), 0,
oldsize - (2*sizeof(mbinptr)+2*SIZE_SZ+1));
} else if(nb > oldsize+SIZE_SZ) {
memset((char*)BOUNDED_N(chunk2mem(newp), bytes) + oldsize,
0, nb - (oldsize+SIZE_SZ));
}
#endif
#if HAVE_MMAP
} }
#endif
/* mem2chunk_check changed the magic byte in the old chunk. /* mem2chunk_check changed the magic byte in the old chunk.
If newmem is NULL, then the old chunk will still be used though, If newmem is NULL, then the old chunk will still be used though,
@ -390,15 +325,10 @@ realloc_check(oldmem, bytes, caller)
return mem2mem_check(newmem, bytes); return mem2mem_check(newmem, bytes);
} }
static Void_t* static void*
#if __STD_C memalign_check(size_t alignment, size_t bytes, const void *caller)
memalign_check(size_t alignment, size_t bytes, const Void_t *caller)
#else
memalign_check(alignment, bytes, caller)
size_t alignment; size_t bytes; const Void_t *caller;
#endif
{ {
Void_t* mem; void* mem;
if (alignment <= MALLOC_ALIGNMENT) return malloc_check(bytes, NULL); if (alignment <= MALLOC_ALIGNMENT) return malloc_check(bytes, NULL);
if (alignment < MINSIZE) alignment = MINSIZE; if (alignment < MINSIZE) alignment = MINSIZE;
@ -414,77 +344,6 @@ memalign_check(alignment, bytes, caller)
return mem2mem_check(mem, bytes); return mem2mem_check(mem, bytes);
} }
#ifndef NO_THREADS
# ifdef _LIBC
# ifndef SHARED
/* These routines are never needed in this configuration. */
# define NO_STARTER
# endif
# endif
# ifdef NO_STARTER
# undef NO_STARTER
# else
/* The following hooks are used when the global initialization in
ptmalloc_init() hasn't completed yet. */
static Void_t*
#if __STD_C
malloc_starter(size_t sz, const Void_t *caller)
#else
malloc_starter(sz, caller) size_t sz; const Void_t *caller;
#endif
{
Void_t* victim;
victim = _int_malloc(&main_arena, sz);
return victim ? BOUNDED_N(victim, sz) : 0;
}
static Void_t*
#if __STD_C
memalign_starter(size_t align, size_t sz, const Void_t *caller)
#else
memalign_starter(align, sz, caller) size_t align, sz; const Void_t *caller;
#endif
{
Void_t* victim;
victim = _int_memalign(&main_arena, align, sz);
return victim ? BOUNDED_N(victim, sz) : 0;
}
static void
#if __STD_C
free_starter(Void_t* mem, const Void_t *caller)
#else
free_starter(mem, caller) Void_t* mem; const Void_t *caller;
#endif
{
mchunkptr p;
if(!mem) return;
p = mem2chunk(mem);
#if HAVE_MMAP
if (chunk_is_mmapped(p)) {
munmap_chunk(p);
return;
}
#endif
#ifdef ATOMIC_FASTBINS
_int_free(&main_arena, p, 1);
#else
_int_free(&main_arena, p);
#endif
}
# endif /* !defiend NO_STARTER */
#endif /* NO_THREADS */
/* Get/set state: malloc_get_state() records the current state of all /* Get/set state: malloc_get_state() records the current state of all
malloc variables (_except_ for the actual heap contents and `hook' malloc variables (_except_ for the actual heap contents and `hook'
@ -529,7 +388,7 @@ struct malloc_save_state {
unsigned long narenas; unsigned long narenas;
}; };
Void_t* void*
public_gET_STATe(void) public_gET_STATe(void)
{ {
struct malloc_save_state* ms; struct malloc_save_state* ms;
@ -564,11 +423,7 @@ public_gET_STATe(void)
ms->mmap_threshold = mp_.mmap_threshold; ms->mmap_threshold = mp_.mmap_threshold;
ms->check_action = check_action; ms->check_action = check_action;
ms->max_sbrked_mem = main_arena.max_system_mem; ms->max_sbrked_mem = main_arena.max_system_mem;
#ifdef NO_THREADS
ms->max_total_mem = mp_.max_total_mem;
#else
ms->max_total_mem = 0; ms->max_total_mem = 0;
#endif
ms->n_mmaps = mp_.n_mmaps; ms->n_mmaps = mp_.n_mmaps;
ms->max_n_mmaps = mp_.max_n_mmaps; ms->max_n_mmaps = mp_.max_n_mmaps;
ms->mmapped_mem = mp_.mmapped_mem; ms->mmapped_mem = mp_.mmapped_mem;
@ -581,11 +436,11 @@ public_gET_STATe(void)
ms->narenas = narenas; ms->narenas = narenas;
#endif #endif
(void)mutex_unlock(&main_arena.mutex); (void)mutex_unlock(&main_arena.mutex);
return (Void_t*)ms; return (void*)ms;
} }
int int
public_sET_STATe(Void_t* msptr) public_sET_STATe(void* msptr)
{ {
struct malloc_save_state* ms = (struct malloc_save_state*)msptr; struct malloc_save_state* ms = (struct malloc_save_state*)msptr;
size_t i; size_t i;
@ -656,9 +511,6 @@ public_sET_STATe(Void_t* msptr)
mp_.mmap_threshold = ms->mmap_threshold; mp_.mmap_threshold = ms->mmap_threshold;
check_action = ms->check_action; check_action = ms->check_action;
main_arena.max_system_mem = ms->max_sbrked_mem; main_arena.max_system_mem = ms->max_sbrked_mem;
#ifdef NO_THREADS
mp_.max_total_mem = ms->max_total_mem;
#endif
mp_.n_mmaps = ms->n_mmaps; mp_.n_mmaps = ms->n_mmaps;
mp_.max_n_mmaps = ms->max_n_mmaps; mp_.max_n_mmaps = ms->max_n_mmaps;
mp_.mmapped_mem = ms->mmapped_mem; mp_.mmapped_mem = ms->mmapped_mem;

File diff suppressed because it is too large Load Diff