mirror of
https://sourceware.org/git/glibc.git
synced 2024-12-22 19:00:07 +00:00
malloc: Remove check_action variable [BZ #21754]
Clean up calls to malloc_printerr and trim its argument list. This also removes a few bits of work done before calling malloc_printerr (such as unlocking operations). The tunable/environment variable still enables the lightweight additional malloc checking, but mallopt (M_CHECK_ACTION) no longer has any effect.
This commit is contained in:
parent
44bcba80f3
commit
ac3ed168d0
27
ChangeLog
27
ChangeLog
@ -1,3 +1,30 @@
|
|||||||
|
2017-08-30 Florian Weimer <fweimer@redhat.com>
|
||||||
|
|
||||||
|
[BZ #21754]
|
||||||
|
* malloc/arena.c (TUNABLE_CALLBACK set_mallopt_check): Do not set
|
||||||
|
check_action.
|
||||||
|
(ptmalloc_init): Do not set or use check_action.
|
||||||
|
* malloc/hooks.c (malloc_check_get_size, realloc_check): Adjust
|
||||||
|
call to malloc_printerr. Remove return statement.
|
||||||
|
(free_check): Likewise. Remove arena unlock.
|
||||||
|
(top_check): Update comment. Adjust call to malloc_printerr.
|
||||||
|
Remove heap repair code.
|
||||||
|
* malloc/malloc.c (unlink): Adjust calls to malloc_printerr.
|
||||||
|
(DEFAULT_CHECK_ACTION, check_action): Remove definitions.
|
||||||
|
(sysmalloc): Adjust call to malloc_printerr.
|
||||||
|
(munmap_chunk, __libc_realloc): Likewise. Remove return
|
||||||
|
statement.
|
||||||
|
(_int_malloc, int_realloc): Likewise. Remove errstr variable.
|
||||||
|
Remove errout label and corresponding gotos.
|
||||||
|
(_int_free): Likewise. Remove arena unlock.
|
||||||
|
(do_set_mallopt_check): Do not set check_action.
|
||||||
|
(malloc_printerr): Adjust parameter list. Do not mark arena as
|
||||||
|
corrupt.
|
||||||
|
* manual/memory.texi (Malloc Tunable Parameters): Remove TODO
|
||||||
|
comment.
|
||||||
|
* manual/probes.texi (Memory Allocation Probes): Remove
|
||||||
|
memory_mallopt_check_action.
|
||||||
|
|
||||||
2017-08-30 Steve Ellcey <sellcey@cavium.com>
|
2017-08-30 Steve Ellcey <sellcey@cavium.com>
|
||||||
|
|
||||||
* sysdeps/unix/sysv/linux/aarch64/makecontext.c (__makecontext):
|
* sysdeps/unix/sysv/linux/aarch64/makecontext.c (__makecontext):
|
||||||
|
@ -215,8 +215,7 @@ void
|
|||||||
TUNABLE_CALLBACK (set_mallopt_check) (tunable_val_t *valp)
|
TUNABLE_CALLBACK (set_mallopt_check) (tunable_val_t *valp)
|
||||||
{
|
{
|
||||||
int32_t value = (int32_t) valp->numval;
|
int32_t value = (int32_t) valp->numval;
|
||||||
do_set_mallopt_check (value);
|
if (value != 0)
|
||||||
if (check_action != 0)
|
|
||||||
__malloc_check_init ();
|
__malloc_check_init ();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -397,12 +396,8 @@ ptmalloc_init (void)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (s && s[0])
|
if (s && s[0] != '\0' && s[0] != '0')
|
||||||
{
|
|
||||||
__libc_mallopt (M_CHECK_ACTION, (int) (s[0] - '0'));
|
|
||||||
if (check_action != 0)
|
|
||||||
__malloc_check_init ();
|
__malloc_check_init ();
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if HAVE_MALLOC_INIT_HOOK
|
#if HAVE_MALLOC_INIT_HOOK
|
||||||
|
@ -121,12 +121,7 @@ malloc_check_get_size (mchunkptr p)
|
|||||||
size -= c)
|
size -= c)
|
||||||
{
|
{
|
||||||
if (c <= 0 || size < (c + 2 * SIZE_SZ))
|
if (c <= 0 || size < (c + 2 * SIZE_SZ))
|
||||||
{
|
malloc_printerr ("malloc_check_get_size: memory corruption");
|
||||||
malloc_printerr (check_action, "malloc_check_get_size: memory corruption",
|
|
||||||
chunk2mem (p),
|
|
||||||
chunk_is_mmapped (p) ? NULL : arena_for_chunk (p));
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* chunk2mem size. */
|
/* chunk2mem size. */
|
||||||
@ -232,17 +227,12 @@ mem2chunk_check (void *mem, unsigned char **magic_p)
|
|||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check for corruption of the top chunk, and try to recover if
|
/* Check for corruption of the top chunk. */
|
||||||
necessary. */
|
|
||||||
|
|
||||||
static int
|
static int
|
||||||
internal_function
|
internal_function
|
||||||
top_check (void)
|
top_check (void)
|
||||||
{
|
{
|
||||||
mchunkptr t = top (&main_arena);
|
mchunkptr t = top (&main_arena);
|
||||||
char *brk, *new_brk;
|
|
||||||
INTERNAL_SIZE_T front_misalign, sbrk_size;
|
|
||||||
unsigned long pagesz = GLRO (dl_pagesize);
|
|
||||||
|
|
||||||
if (t == initial_top (&main_arena) ||
|
if (t == initial_top (&main_arena) ||
|
||||||
(!chunk_is_mmapped (t) &&
|
(!chunk_is_mmapped (t) &&
|
||||||
@ -252,32 +242,7 @@ top_check (void)
|
|||||||
(char *) t + chunksize (t) == mp_.sbrk_base + main_arena.system_mem)))
|
(char *) t + chunksize (t) == mp_.sbrk_base + main_arena.system_mem)))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
malloc_printerr (check_action, "malloc: top chunk is corrupt", t,
|
malloc_printerr ("malloc: top chunk is corrupt");
|
||||||
&main_arena);
|
|
||||||
|
|
||||||
/* Try to set up a new top chunk. */
|
|
||||||
brk = MORECORE (0);
|
|
||||||
front_misalign = (unsigned long) chunk2mem (brk) & MALLOC_ALIGN_MASK;
|
|
||||||
if (front_misalign > 0)
|
|
||||||
front_misalign = MALLOC_ALIGNMENT - front_misalign;
|
|
||||||
sbrk_size = front_misalign + mp_.top_pad + MINSIZE;
|
|
||||||
sbrk_size += pagesz - ((unsigned long) (brk + sbrk_size) & (pagesz - 1));
|
|
||||||
new_brk = (char *) (MORECORE (sbrk_size));
|
|
||||||
if (new_brk == (char *) (MORECORE_FAILURE))
|
|
||||||
{
|
|
||||||
__set_errno (ENOMEM);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
/* Call the `morecore' hook if necessary. */
|
|
||||||
void (*hook) (void) = atomic_forced_read (__after_morecore_hook);
|
|
||||||
if (hook)
|
|
||||||
(*hook)();
|
|
||||||
main_arena.system_mem = (new_brk - mp_.sbrk_base) + sbrk_size;
|
|
||||||
|
|
||||||
top (&main_arena) = (mchunkptr) (brk + front_misalign);
|
|
||||||
set_head (top (&main_arena), (sbrk_size - front_misalign) | PREV_INUSE);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
@ -308,13 +273,7 @@ free_check (void *mem, const void *caller)
|
|||||||
__libc_lock_lock (main_arena.mutex);
|
__libc_lock_lock (main_arena.mutex);
|
||||||
p = mem2chunk_check (mem, NULL);
|
p = mem2chunk_check (mem, NULL);
|
||||||
if (!p)
|
if (!p)
|
||||||
{
|
malloc_printerr ("free(): invalid pointer");
|
||||||
__libc_lock_unlock (main_arena.mutex);
|
|
||||||
|
|
||||||
malloc_printerr (check_action, "free(): invalid pointer", mem,
|
|
||||||
&main_arena);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (chunk_is_mmapped (p))
|
if (chunk_is_mmapped (p))
|
||||||
{
|
{
|
||||||
__libc_lock_unlock (main_arena.mutex);
|
__libc_lock_unlock (main_arena.mutex);
|
||||||
@ -349,11 +308,7 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
|
|||||||
const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p);
|
const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p);
|
||||||
__libc_lock_unlock (main_arena.mutex);
|
__libc_lock_unlock (main_arena.mutex);
|
||||||
if (!oldp)
|
if (!oldp)
|
||||||
{
|
malloc_printerr ("realloc(): invalid pointer");
|
||||||
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
|
|
||||||
&main_arena);
|
|
||||||
return malloc_check (bytes, NULL);
|
|
||||||
}
|
|
||||||
const INTERNAL_SIZE_T oldsize = chunksize (oldp);
|
const INTERNAL_SIZE_T oldsize = chunksize (oldp);
|
||||||
|
|
||||||
checked_request2size (bytes + 1, nb);
|
checked_request2size (bytes + 1, nb);
|
||||||
|
144
malloc/malloc.c
144
malloc/malloc.c
@ -1019,8 +1019,7 @@ static void* _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
|
|||||||
static void* _int_memalign(mstate, size_t, size_t);
|
static void* _int_memalign(mstate, size_t, size_t);
|
||||||
static void* _mid_memalign(size_t, size_t, void *);
|
static void* _mid_memalign(size_t, size_t, void *);
|
||||||
|
|
||||||
static void malloc_printerr(int action, const char *str, void *ptr, mstate av)
|
static void malloc_printerr(const char *str) __attribute__ ((noreturn));
|
||||||
__attribute__ ((noreturn));
|
|
||||||
|
|
||||||
static void* internal_function mem2mem_check(void *p, size_t sz);
|
static void* internal_function mem2mem_check(void *p, size_t sz);
|
||||||
static int internal_function top_check(void);
|
static int internal_function top_check(void);
|
||||||
@ -1404,11 +1403,11 @@ typedef struct malloc_chunk *mbinptr;
|
|||||||
/* Take a chunk off a bin list */
|
/* Take a chunk off a bin list */
|
||||||
#define unlink(AV, P, BK, FD) { \
|
#define unlink(AV, P, BK, FD) { \
|
||||||
if (__builtin_expect (chunksize(P) != prev_size (next_chunk(P)), 0)) \
|
if (__builtin_expect (chunksize(P) != prev_size (next_chunk(P)), 0)) \
|
||||||
malloc_printerr (check_action, "corrupted size vs. prev_size", P, AV); \
|
malloc_printerr ("corrupted size vs. prev_size"); \
|
||||||
FD = P->fd; \
|
FD = P->fd; \
|
||||||
BK = P->bk; \
|
BK = P->bk; \
|
||||||
if (__builtin_expect (FD->bk != P || BK->fd != P, 0)) \
|
if (__builtin_expect (FD->bk != P || BK->fd != P, 0)) \
|
||||||
malloc_printerr (check_action, "corrupted double-linked list", P, AV); \
|
malloc_printerr ("corrupted double-linked list"); \
|
||||||
else { \
|
else { \
|
||||||
FD->bk = BK; \
|
FD->bk = BK; \
|
||||||
BK->fd = FD; \
|
BK->fd = FD; \
|
||||||
@ -1416,9 +1415,7 @@ typedef struct malloc_chunk *mbinptr;
|
|||||||
&& __builtin_expect (P->fd_nextsize != NULL, 0)) { \
|
&& __builtin_expect (P->fd_nextsize != NULL, 0)) { \
|
||||||
if (__builtin_expect (P->fd_nextsize->bk_nextsize != P, 0) \
|
if (__builtin_expect (P->fd_nextsize->bk_nextsize != P, 0) \
|
||||||
|| __builtin_expect (P->bk_nextsize->fd_nextsize != P, 0)) \
|
|| __builtin_expect (P->bk_nextsize->fd_nextsize != P, 0)) \
|
||||||
malloc_printerr (check_action, \
|
malloc_printerr ("corrupted double-linked list (not small)"); \
|
||||||
"corrupted double-linked list (not small)", \
|
|
||||||
P, AV); \
|
|
||||||
if (FD->fd_nextsize == NULL) { \
|
if (FD->fd_nextsize == NULL) { \
|
||||||
if (P->fd_nextsize == P) \
|
if (P->fd_nextsize == P) \
|
||||||
FD->fd_nextsize = FD->bk_nextsize = FD; \
|
FD->fd_nextsize = FD->bk_nextsize = FD; \
|
||||||
@ -1893,15 +1890,6 @@ void *weak_variable (*__memalign_hook)
|
|||||||
void weak_variable (*__after_morecore_hook) (void) = NULL;
|
void weak_variable (*__after_morecore_hook) (void) = NULL;
|
||||||
|
|
||||||
|
|
||||||
/* ---------------- Error behavior ------------------------------------ */
|
|
||||||
|
|
||||||
#ifndef DEFAULT_CHECK_ACTION
|
|
||||||
# define DEFAULT_CHECK_ACTION 3
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static int check_action = DEFAULT_CHECK_ACTION;
|
|
||||||
|
|
||||||
|
|
||||||
/* ------------------ Testing support ----------------------------------*/
|
/* ------------------ Testing support ----------------------------------*/
|
||||||
|
|
||||||
static int perturb_byte;
|
static int perturb_byte;
|
||||||
@ -2579,11 +2567,8 @@ sysmalloc (INTERNAL_SIZE_T nb, mstate av)
|
|||||||
set_head (old_top, (size + old_size) | PREV_INUSE);
|
set_head (old_top, (size + old_size) | PREV_INUSE);
|
||||||
|
|
||||||
else if (contiguous (av) && old_size && brk < old_end)
|
else if (contiguous (av) && old_size && brk < old_end)
|
||||||
{
|
|
||||||
/* Oops! Someone else killed our space.. Can't touch anything. */
|
/* Oops! Someone else killed our space.. Can't touch anything. */
|
||||||
malloc_printerr (3, "break adjusted to free malloc space", brk,
|
malloc_printerr ("break adjusted to free malloc space");
|
||||||
av);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Otherwise, make adjustments:
|
Otherwise, make adjustments:
|
||||||
@ -2874,11 +2859,7 @@ munmap_chunk (mchunkptr p)
|
|||||||
(in the moment at least) so we combine the two values into one before
|
(in the moment at least) so we combine the two values into one before
|
||||||
the bit test. */
|
the bit test. */
|
||||||
if (__builtin_expect (((block | total_size) & (GLRO (dl_pagesize) - 1)) != 0, 0))
|
if (__builtin_expect (((block | total_size) & (GLRO (dl_pagesize) - 1)) != 0, 0))
|
||||||
{
|
malloc_printerr ("munmap_chunk(): invalid pointer");
|
||||||
malloc_printerr (check_action, "munmap_chunk(): invalid pointer",
|
|
||||||
chunk2mem (p), NULL);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
atomic_decrement (&mp_.n_mmaps);
|
atomic_decrement (&mp_.n_mmaps);
|
||||||
atomic_add (&mp_.mmapped_mem, -total_size);
|
atomic_add (&mp_.mmapped_mem, -total_size);
|
||||||
@ -3190,11 +3171,7 @@ __libc_realloc (void *oldmem, size_t bytes)
|
|||||||
if ((__builtin_expect ((uintptr_t) oldp > (uintptr_t) -oldsize, 0)
|
if ((__builtin_expect ((uintptr_t) oldp > (uintptr_t) -oldsize, 0)
|
||||||
|| __builtin_expect (misaligned_chunk (oldp), 0))
|
|| __builtin_expect (misaligned_chunk (oldp), 0))
|
||||||
&& !DUMPED_MAIN_ARENA_CHUNK (oldp))
|
&& !DUMPED_MAIN_ARENA_CHUNK (oldp))
|
||||||
{
|
malloc_printerr ("realloc(): invalid pointer");
|
||||||
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
|
|
||||||
ar_ptr);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
checked_request2size (bytes, nb);
|
checked_request2size (bytes, nb);
|
||||||
|
|
||||||
@ -3540,8 +3517,6 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
size_t tcache_unsorted_count; /* count of unsorted chunks processed */
|
size_t tcache_unsorted_count; /* count of unsorted chunks processed */
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
const char *errstr = NULL;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Convert request size to internal form by adding SIZE_SZ bytes
|
Convert request size to internal form by adding SIZE_SZ bytes
|
||||||
overhead plus possibly more to obtain necessary alignment and/or
|
overhead plus possibly more to obtain necessary alignment and/or
|
||||||
@ -3588,12 +3563,7 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
if (victim != 0)
|
if (victim != 0)
|
||||||
{
|
{
|
||||||
if (__builtin_expect (fastbin_index (chunksize (victim)) != idx, 0))
|
if (__builtin_expect (fastbin_index (chunksize (victim)) != idx, 0))
|
||||||
{
|
malloc_printerr ("malloc(): memory corruption (fast)");
|
||||||
errstr = "malloc(): memory corruption (fast)";
|
|
||||||
errout:
|
|
||||||
malloc_printerr (check_action, errstr, chunk2mem (victim), av);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
check_remalloced_chunk (av, victim, nb);
|
check_remalloced_chunk (av, victim, nb);
|
||||||
#if USE_TCACHE
|
#if USE_TCACHE
|
||||||
/* While we're here, if we see other chunks of the same size,
|
/* While we're here, if we see other chunks of the same size,
|
||||||
@ -3642,10 +3612,8 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
{
|
{
|
||||||
bck = victim->bk;
|
bck = victim->bk;
|
||||||
if (__glibc_unlikely (bck->fd != victim))
|
if (__glibc_unlikely (bck->fd != victim))
|
||||||
{
|
malloc_printerr
|
||||||
errstr = "malloc(): smallbin double linked list corrupted";
|
("malloc(): smallbin double linked list corrupted");
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
set_inuse_bit_at_offset (victim, nb);
|
set_inuse_bit_at_offset (victim, nb);
|
||||||
bin->bk = bck;
|
bin->bk = bck;
|
||||||
bck->fd = bin;
|
bck->fd = bin;
|
||||||
@ -3736,8 +3704,7 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
if (__builtin_expect (chunksize_nomask (victim) <= 2 * SIZE_SZ, 0)
|
if (__builtin_expect (chunksize_nomask (victim) <= 2 * SIZE_SZ, 0)
|
||||||
|| __builtin_expect (chunksize_nomask (victim)
|
|| __builtin_expect (chunksize_nomask (victim)
|
||||||
> av->system_mem, 0))
|
> av->system_mem, 0))
|
||||||
malloc_printerr (check_action, "malloc(): memory corruption",
|
malloc_printerr ("malloc(): memory corruption");
|
||||||
chunk2mem (victim), av);
|
|
||||||
size = chunksize (victim);
|
size = chunksize (victim);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -3943,10 +3910,7 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
bck = unsorted_chunks (av);
|
bck = unsorted_chunks (av);
|
||||||
fwd = bck->fd;
|
fwd = bck->fd;
|
||||||
if (__glibc_unlikely (fwd->bk != bck))
|
if (__glibc_unlikely (fwd->bk != bck))
|
||||||
{
|
malloc_printerr ("malloc(): corrupted unsorted chunks");
|
||||||
errstr = "malloc(): corrupted unsorted chunks";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
remainder->bk = bck;
|
remainder->bk = bck;
|
||||||
remainder->fd = fwd;
|
remainder->fd = fwd;
|
||||||
bck->fd = remainder;
|
bck->fd = remainder;
|
||||||
@ -4050,10 +4014,7 @@ _int_malloc (mstate av, size_t bytes)
|
|||||||
bck = unsorted_chunks (av);
|
bck = unsorted_chunks (av);
|
||||||
fwd = bck->fd;
|
fwd = bck->fd;
|
||||||
if (__glibc_unlikely (fwd->bk != bck))
|
if (__glibc_unlikely (fwd->bk != bck))
|
||||||
{
|
malloc_printerr ("malloc(): corrupted unsorted chunks 2");
|
||||||
errstr = "malloc(): corrupted unsorted chunks 2";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
remainder->bk = bck;
|
remainder->bk = bck;
|
||||||
remainder->fd = fwd;
|
remainder->fd = fwd;
|
||||||
bck->fd = remainder;
|
bck->fd = remainder;
|
||||||
@ -4154,7 +4115,6 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
mchunkptr bck; /* misc temp for linking */
|
mchunkptr bck; /* misc temp for linking */
|
||||||
mchunkptr fwd; /* misc temp for linking */
|
mchunkptr fwd; /* misc temp for linking */
|
||||||
|
|
||||||
const char *errstr = NULL;
|
|
||||||
int locked = 0;
|
int locked = 0;
|
||||||
|
|
||||||
size = chunksize (p);
|
size = chunksize (p);
|
||||||
@ -4165,21 +4125,11 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
here by accident or by "design" from some intruder. */
|
here by accident or by "design" from some intruder. */
|
||||||
if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
|
if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
|
||||||
|| __builtin_expect (misaligned_chunk (p), 0))
|
|| __builtin_expect (misaligned_chunk (p), 0))
|
||||||
{
|
malloc_printerr ("free(): invalid pointer");
|
||||||
errstr = "free(): invalid pointer";
|
|
||||||
errout:
|
|
||||||
if (!have_lock && locked)
|
|
||||||
__libc_lock_unlock (av->mutex);
|
|
||||||
malloc_printerr (check_action, errstr, chunk2mem (p), av);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
/* We know that each chunk is at least MINSIZE bytes in size or a
|
/* We know that each chunk is at least MINSIZE bytes in size or a
|
||||||
multiple of MALLOC_ALIGNMENT. */
|
multiple of MALLOC_ALIGNMENT. */
|
||||||
if (__glibc_unlikely (size < MINSIZE || !aligned_OK (size)))
|
if (__glibc_unlikely (size < MINSIZE || !aligned_OK (size)))
|
||||||
{
|
malloc_printerr ("free(): invalid size");
|
||||||
errstr = "free(): invalid size";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
|
|
||||||
check_inuse_chunk(av, p);
|
check_inuse_chunk(av, p);
|
||||||
|
|
||||||
@ -4228,10 +4178,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
chunksize_nomask (chunk_at_offset (p, size)) <= 2 * SIZE_SZ
|
chunksize_nomask (chunk_at_offset (p, size)) <= 2 * SIZE_SZ
|
||||||
|| chunksize (chunk_at_offset (p, size)) >= av->system_mem;
|
|| chunksize (chunk_at_offset (p, size)) >= av->system_mem;
|
||||||
}))
|
}))
|
||||||
{
|
malloc_printerr ("free(): invalid next size (fast)");
|
||||||
errstr = "free(): invalid next size (fast)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
if (! have_lock)
|
if (! have_lock)
|
||||||
{
|
{
|
||||||
__libc_lock_unlock (av->mutex);
|
__libc_lock_unlock (av->mutex);
|
||||||
@ -4253,10 +4200,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
/* Check that the top of the bin is not the record we are going to add
|
/* Check that the top of the bin is not the record we are going to add
|
||||||
(i.e., double free). */
|
(i.e., double free). */
|
||||||
if (__builtin_expect (old == p, 0))
|
if (__builtin_expect (old == p, 0))
|
||||||
{
|
malloc_printerr ("double free or corruption (fasttop)");
|
||||||
errstr = "double free or corruption (fasttop)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
/* Check that size of fastbin chunk at the top is the same as
|
/* Check that size of fastbin chunk at the top is the same as
|
||||||
size of the chunk that we are adding. We can dereference OLD
|
size of the chunk that we are adding. We can dereference OLD
|
||||||
only if we have the lock, otherwise it might have already been
|
only if we have the lock, otherwise it might have already been
|
||||||
@ -4268,10 +4212,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2)) != old2);
|
while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2)) != old2);
|
||||||
|
|
||||||
if (have_lock && old != NULL && __builtin_expect (old_idx != idx, 0))
|
if (have_lock && old != NULL && __builtin_expect (old_idx != idx, 0))
|
||||||
{
|
malloc_printerr ("invalid fastbin entry (free)");
|
||||||
errstr = "invalid fastbin entry (free)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -4289,32 +4230,20 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
/* Lightweight tests: check whether the block is already the
|
/* Lightweight tests: check whether the block is already the
|
||||||
top block. */
|
top block. */
|
||||||
if (__glibc_unlikely (p == av->top))
|
if (__glibc_unlikely (p == av->top))
|
||||||
{
|
malloc_printerr ("double free or corruption (top)");
|
||||||
errstr = "double free or corruption (top)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
/* Or whether the next chunk is beyond the boundaries of the arena. */
|
/* Or whether the next chunk is beyond the boundaries of the arena. */
|
||||||
if (__builtin_expect (contiguous (av)
|
if (__builtin_expect (contiguous (av)
|
||||||
&& (char *) nextchunk
|
&& (char *) nextchunk
|
||||||
>= ((char *) av->top + chunksize(av->top)), 0))
|
>= ((char *) av->top + chunksize(av->top)), 0))
|
||||||
{
|
malloc_printerr ("double free or corruption (out)");
|
||||||
errstr = "double free or corruption (out)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
/* Or whether the block is actually not marked used. */
|
/* Or whether the block is actually not marked used. */
|
||||||
if (__glibc_unlikely (!prev_inuse(nextchunk)))
|
if (__glibc_unlikely (!prev_inuse(nextchunk)))
|
||||||
{
|
malloc_printerr ("double free or corruption (!prev)");
|
||||||
errstr = "double free or corruption (!prev)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
|
|
||||||
nextsize = chunksize(nextchunk);
|
nextsize = chunksize(nextchunk);
|
||||||
if (__builtin_expect (chunksize_nomask (nextchunk) <= 2 * SIZE_SZ, 0)
|
if (__builtin_expect (chunksize_nomask (nextchunk) <= 2 * SIZE_SZ, 0)
|
||||||
|| __builtin_expect (nextsize >= av->system_mem, 0))
|
|| __builtin_expect (nextsize >= av->system_mem, 0))
|
||||||
{
|
malloc_printerr ("free(): invalid next size (normal)");
|
||||||
errstr = "free(): invalid next size (normal)";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
|
|
||||||
free_perturb (chunk2mem(p), size - 2 * SIZE_SZ);
|
free_perturb (chunk2mem(p), size - 2 * SIZE_SZ);
|
||||||
|
|
||||||
@ -4346,10 +4275,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
|
|||||||
bck = unsorted_chunks(av);
|
bck = unsorted_chunks(av);
|
||||||
fwd = bck->fd;
|
fwd = bck->fd;
|
||||||
if (__glibc_unlikely (fwd->bk != bck))
|
if (__glibc_unlikely (fwd->bk != bck))
|
||||||
{
|
malloc_printerr ("free(): corrupted unsorted chunks");
|
||||||
errstr = "free(): corrupted unsorted chunks";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
p->fd = fwd;
|
p->fd = fwd;
|
||||||
p->bk = bck;
|
p->bk = bck;
|
||||||
if (!in_smallbin_range(size))
|
if (!in_smallbin_range(size))
|
||||||
@ -4562,17 +4488,10 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
|
|||||||
INTERNAL_SIZE_T* s; /* copy source */
|
INTERNAL_SIZE_T* s; /* copy source */
|
||||||
INTERNAL_SIZE_T* d; /* copy destination */
|
INTERNAL_SIZE_T* d; /* copy destination */
|
||||||
|
|
||||||
const char *errstr = NULL;
|
|
||||||
|
|
||||||
/* oldmem size */
|
/* oldmem size */
|
||||||
if (__builtin_expect (chunksize_nomask (oldp) <= 2 * SIZE_SZ, 0)
|
if (__builtin_expect (chunksize_nomask (oldp) <= 2 * SIZE_SZ, 0)
|
||||||
|| __builtin_expect (oldsize >= av->system_mem, 0))
|
|| __builtin_expect (oldsize >= av->system_mem, 0))
|
||||||
{
|
malloc_printerr ("realloc(): invalid old size");
|
||||||
errstr = "realloc(): invalid old size";
|
|
||||||
errout:
|
|
||||||
malloc_printerr (check_action, errstr, chunk2mem (oldp), av);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
check_inuse_chunk (av, oldp);
|
check_inuse_chunk (av, oldp);
|
||||||
|
|
||||||
@ -4583,10 +4502,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
|
|||||||
INTERNAL_SIZE_T nextsize = chunksize (next);
|
INTERNAL_SIZE_T nextsize = chunksize (next);
|
||||||
if (__builtin_expect (chunksize_nomask (next) <= 2 * SIZE_SZ, 0)
|
if (__builtin_expect (chunksize_nomask (next) <= 2 * SIZE_SZ, 0)
|
||||||
|| __builtin_expect (nextsize >= av->system_mem, 0))
|
|| __builtin_expect (nextsize >= av->system_mem, 0))
|
||||||
{
|
malloc_printerr ("realloc(): invalid next size");
|
||||||
errstr = "realloc(): invalid next size";
|
|
||||||
goto errout;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((unsigned long) (oldsize) >= (unsigned long) (nb))
|
if ((unsigned long) (oldsize) >= (unsigned long) (nb))
|
||||||
{
|
{
|
||||||
@ -5126,8 +5042,6 @@ static inline int
|
|||||||
__always_inline
|
__always_inline
|
||||||
do_set_mallopt_check (int32_t value)
|
do_set_mallopt_check (int32_t value)
|
||||||
{
|
{
|
||||||
LIBC_PROBE (memory_mallopt_check_action, 2, value, check_action);
|
|
||||||
check_action = value;
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5401,14 +5315,8 @@ libc_hidden_def (__libc_mallopt)
|
|||||||
extern char **__libc_argv attribute_hidden;
|
extern char **__libc_argv attribute_hidden;
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_printerr (int action, const char *str, void *ptr, mstate ar_ptr)
|
malloc_printerr (const char *str)
|
||||||
{
|
{
|
||||||
/* Avoid using this arena in future. We do not attempt to synchronize this
|
|
||||||
with anything else because we minimally want to ensure that __libc_message
|
|
||||||
gets its resources safely without stumbling on the current corruption. */
|
|
||||||
if (ar_ptr)
|
|
||||||
set_arena_corrupt (ar_ptr);
|
|
||||||
|
|
||||||
__libc_message (do_abort, "%s\n", str);
|
__libc_message (do_abort, "%s\n", str);
|
||||||
__builtin_unreachable ();
|
__builtin_unreachable ();
|
||||||
}
|
}
|
||||||
|
@ -1104,7 +1104,6 @@ When calling @code{mallopt}, the @var{param} argument specifies the
|
|||||||
parameter to be set, and @var{value} the new value to be set. Possible
|
parameter to be set, and @var{value} the new value to be set. Possible
|
||||||
choices for @var{param}, as defined in @file{malloc.h}, are:
|
choices for @var{param}, as defined in @file{malloc.h}, are:
|
||||||
|
|
||||||
@comment TODO: @item M_CHECK_ACTION
|
|
||||||
@vtable @code
|
@vtable @code
|
||||||
@item M_MMAP_MAX
|
@item M_MMAP_MAX
|
||||||
The maximum number of chunks to allocate with @code{mmap}. Setting this
|
The maximum number of chunks to allocate with @code{mmap}. Setting this
|
||||||
|
@ -195,13 +195,6 @@ this @code{malloc} parameter, and @var{$arg3} is nonzero if dynamic
|
|||||||
threshold adjustment was already disabled.
|
threshold adjustment was already disabled.
|
||||||
@end deftp
|
@end deftp
|
||||||
|
|
||||||
@deftp Probe memory_mallopt_check_action (int @var{$arg1}, int @var{$arg2})
|
|
||||||
This probe is triggered shortly after the @code{memory_mallopt} probe,
|
|
||||||
when the parameter to be changed is @code{M_CHECK_ACTION}. Argument
|
|
||||||
@var{$arg1} is the requested value, and @var{$arg2} is the previous
|
|
||||||
value of this @code{malloc} parameter.
|
|
||||||
@end deftp
|
|
||||||
|
|
||||||
@deftp Probe memory_mallopt_perturb (int @var{$arg1}, int @var{$arg2})
|
@deftp Probe memory_mallopt_perturb (int @var{$arg1}, int @var{$arg2})
|
||||||
This probe is triggered shortly after the @code{memory_mallopt} probe,
|
This probe is triggered shortly after the @code{memory_mallopt} probe,
|
||||||
when the parameter to be changed is @code{M_PERTURB}. Argument
|
when the parameter to be changed is @code{M_PERTURB}. Argument
|
||||||
|
Loading…
Reference in New Issue
Block a user