malloc: Remove check_action variable [BZ #21754]

Clean up calls to malloc_printerr and trim its argument list.

This also removes a few bits of work done before calling
malloc_printerr (such as unlocking operations).

The tunable/environment variable still enables the lightweight
additional malloc checking, but mallopt (M_CHECK_ACTION)
no longer has any effect.

(cherry-picked from ac3ed168d0)
This commit is contained in:
Florian Weimer 2017-08-30 19:29:38 +02:00 committed by Siddhesh Poyarekar
parent 8788996793
commit ee717ed23d
6 changed files with 65 additions and 188 deletions

View File

@ -1,3 +1,30 @@
2017-08-30 Florian Weimer <fweimer@redhat.com>
[BZ #21754]
* malloc/arena.c (TUNABLE_CALLBACK set_mallopt_check): Do not set
check_action.
(ptmalloc_init): Do not set or use check_action.
* malloc/hooks.c (malloc_check_get_size, realloc_check): Adjust
call to malloc_printerr. Remove return statement.
(free_check): Likewise. Remove arena unlock.
(top_check): Update comment. Adjust call to malloc_printerr.
Remove heap repair code.
* malloc/malloc.c (unlink): Adjust calls to malloc_printerr.
(DEFAULT_CHECK_ACTION, check_action): Remove definitions.
(sysmalloc): Adjust call to malloc_printerr.
(munmap_chunk, __libc_realloc): Likewise. Remove return
statement.
(_int_malloc, int_realloc): Likewise. Remove errstr variable.
Remove errout label and corresponding gotos.
(_int_free): Likewise. Remove arena unlock.
(do_set_mallopt_check): Do not set check_action.
(malloc_printerr): Adjust parameter list. Do not mark arena as
corrupt.
* manual/memory.texi (Malloc Tunable Parameters): Remove TODO
comment.
* manual/probes.texi (Memory Allocation Probes): Remove
memory_mallopt_check_action.
2017-08-30 Florian Weimer <fweimer@redhat.com>
[BZ #21754]

View File

@ -215,8 +215,7 @@ void
TUNABLE_CALLBACK (set_mallopt_check) (tunable_val_t *valp)
{
int32_t value = (int32_t) valp->numval;
do_set_mallopt_check (value);
if (check_action != 0)
if (value != 0)
__malloc_check_init ();
}
@ -397,12 +396,8 @@ ptmalloc_init (void)
}
}
}
if (s && s[0])
{
__libc_mallopt (M_CHECK_ACTION, (int) (s[0] - '0'));
if (check_action != 0)
__malloc_check_init ();
}
if (s && s[0] != '\0' && s[0] != '0')
__malloc_check_init ();
#endif
#if HAVE_MALLOC_INIT_HOOK

View File

@ -121,12 +121,7 @@ malloc_check_get_size (mchunkptr p)
size -= c)
{
if (c <= 0 || size < (c + 2 * SIZE_SZ))
{
malloc_printerr (check_action, "malloc_check_get_size: memory corruption",
chunk2mem (p),
chunk_is_mmapped (p) ? NULL : arena_for_chunk (p));
return 0;
}
malloc_printerr ("malloc_check_get_size: memory corruption");
}
/* chunk2mem size. */
@ -232,17 +227,12 @@ mem2chunk_check (void *mem, unsigned char **magic_p)
return p;
}
/* Check for corruption of the top chunk, and try to recover if
necessary. */
/* Check for corruption of the top chunk. */
static int
internal_function
top_check (void)
{
mchunkptr t = top (&main_arena);
char *brk, *new_brk;
INTERNAL_SIZE_T front_misalign, sbrk_size;
unsigned long pagesz = GLRO (dl_pagesize);
if (t == initial_top (&main_arena) ||
(!chunk_is_mmapped (t) &&
@ -252,32 +242,7 @@ top_check (void)
(char *) t + chunksize (t) == mp_.sbrk_base + main_arena.system_mem)))
return 0;
malloc_printerr (check_action, "malloc: top chunk is corrupt", t,
&main_arena);
/* Try to set up a new top chunk. */
brk = MORECORE (0);
front_misalign = (unsigned long) chunk2mem (brk) & MALLOC_ALIGN_MASK;
if (front_misalign > 0)
front_misalign = MALLOC_ALIGNMENT - front_misalign;
sbrk_size = front_misalign + mp_.top_pad + MINSIZE;
sbrk_size += pagesz - ((unsigned long) (brk + sbrk_size) & (pagesz - 1));
new_brk = (char *) (MORECORE (sbrk_size));
if (new_brk == (char *) (MORECORE_FAILURE))
{
__set_errno (ENOMEM);
return -1;
}
/* Call the `morecore' hook if necessary. */
void (*hook) (void) = atomic_forced_read (__after_morecore_hook);
if (hook)
(*hook)();
main_arena.system_mem = (new_brk - mp_.sbrk_base) + sbrk_size;
top (&main_arena) = (mchunkptr) (brk + front_misalign);
set_head (top (&main_arena), (sbrk_size - front_misalign) | PREV_INUSE);
return 0;
malloc_printerr ("malloc: top chunk is corrupt");
}
static void *
@ -308,13 +273,7 @@ free_check (void *mem, const void *caller)
__libc_lock_lock (main_arena.mutex);
p = mem2chunk_check (mem, NULL);
if (!p)
{
__libc_lock_unlock (main_arena.mutex);
malloc_printerr (check_action, "free(): invalid pointer", mem,
&main_arena);
return;
}
malloc_printerr ("free(): invalid pointer");
if (chunk_is_mmapped (p))
{
__libc_lock_unlock (main_arena.mutex);
@ -349,11 +308,7 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p);
__libc_lock_unlock (main_arena.mutex);
if (!oldp)
{
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
&main_arena);
return malloc_check (bytes, NULL);
}
malloc_printerr ("realloc(): invalid pointer");
const INTERNAL_SIZE_T oldsize = chunksize (oldp);
checked_request2size (bytes + 1, nb);

View File

@ -1019,8 +1019,7 @@ static void* _int_realloc(mstate, mchunkptr, INTERNAL_SIZE_T,
static void* _int_memalign(mstate, size_t, size_t);
static void* _mid_memalign(size_t, size_t, void *);
static void malloc_printerr(int action, const char *str, void *ptr, mstate av)
__attribute__ ((noreturn));
static void malloc_printerr(const char *str) __attribute__ ((noreturn));
static void* internal_function mem2mem_check(void *p, size_t sz);
static int internal_function top_check(void);
@ -1404,11 +1403,11 @@ typedef struct malloc_chunk *mbinptr;
/* Take a chunk off a bin list */
#define unlink(AV, P, BK, FD) { \
if (__builtin_expect (chunksize(P) != prev_size (next_chunk(P)), 0)) \
malloc_printerr (check_action, "corrupted size vs. prev_size", P, AV); \
malloc_printerr ("corrupted size vs. prev_size"); \
FD = P->fd; \
BK = P->bk; \
if (__builtin_expect (FD->bk != P || BK->fd != P, 0)) \
malloc_printerr (check_action, "corrupted double-linked list", P, AV); \
malloc_printerr ("corrupted double-linked list"); \
else { \
FD->bk = BK; \
BK->fd = FD; \
@ -1416,9 +1415,7 @@ typedef struct malloc_chunk *mbinptr;
&& __builtin_expect (P->fd_nextsize != NULL, 0)) { \
if (__builtin_expect (P->fd_nextsize->bk_nextsize != P, 0) \
|| __builtin_expect (P->bk_nextsize->fd_nextsize != P, 0)) \
malloc_printerr (check_action, \
"corrupted double-linked list (not small)", \
P, AV); \
malloc_printerr ("corrupted double-linked list (not small)"); \
if (FD->fd_nextsize == NULL) { \
if (P->fd_nextsize == P) \
FD->fd_nextsize = FD->bk_nextsize = FD; \
@ -1887,15 +1884,6 @@ void *weak_variable (*__memalign_hook)
void weak_variable (*__after_morecore_hook) (void) = NULL;
/* ---------------- Error behavior ------------------------------------ */
#ifndef DEFAULT_CHECK_ACTION
# define DEFAULT_CHECK_ACTION 3
#endif
static int check_action = DEFAULT_CHECK_ACTION;
/* ------------------ Testing support ----------------------------------*/
static int perturb_byte;
@ -2568,11 +2556,8 @@ sysmalloc (INTERNAL_SIZE_T nb, mstate av)
set_head (old_top, (size + old_size) | PREV_INUSE);
else if (contiguous (av) && old_size && brk < old_end)
{
/* Oops! Someone else killed our space.. Can't touch anything. */
malloc_printerr (3, "break adjusted to free malloc space", brk,
av);
}
/* Oops! Someone else killed our space.. Can't touch anything. */
malloc_printerr ("break adjusted to free malloc space");
/*
Otherwise, make adjustments:
@ -2863,11 +2848,7 @@ munmap_chunk (mchunkptr p)
(in the moment at least) so we combine the two values into one before
the bit test. */
if (__builtin_expect (((block | total_size) & (GLRO (dl_pagesize) - 1)) != 0, 0))
{
malloc_printerr (check_action, "munmap_chunk(): invalid pointer",
chunk2mem (p), NULL);
return;
}
malloc_printerr ("munmap_chunk(): invalid pointer");
atomic_decrement (&mp_.n_mmaps);
atomic_add (&mp_.mmapped_mem, -total_size);
@ -3181,11 +3162,7 @@ __libc_realloc (void *oldmem, size_t bytes)
if ((__builtin_expect ((uintptr_t) oldp > (uintptr_t) -oldsize, 0)
|| __builtin_expect (misaligned_chunk (oldp), 0))
&& !DUMPED_MAIN_ARENA_CHUNK (oldp))
{
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
ar_ptr);
return NULL;
}
malloc_printerr ("realloc(): invalid pointer");
checked_request2size (bytes, nb);
@ -3531,8 +3508,6 @@ _int_malloc (mstate av, size_t bytes)
size_t tcache_unsorted_count; /* count of unsorted chunks processed */
#endif
const char *errstr = NULL;
/*
Convert request size to internal form by adding SIZE_SZ bytes
overhead plus possibly more to obtain necessary alignment and/or
@ -3579,12 +3554,7 @@ _int_malloc (mstate av, size_t bytes)
if (victim != 0)
{
if (__builtin_expect (fastbin_index (chunksize (victim)) != idx, 0))
{
errstr = "malloc(): memory corruption (fast)";
errout:
malloc_printerr (check_action, errstr, chunk2mem (victim), av);
return NULL;
}
malloc_printerr ("malloc(): memory corruption (fast)");
check_remalloced_chunk (av, victim, nb);
#if USE_TCACHE
/* While we're here, if we see other chunks of the same size,
@ -3632,11 +3602,9 @@ _int_malloc (mstate av, size_t bytes)
else
{
bck = victim->bk;
if (__glibc_unlikely (bck->fd != victim))
{
errstr = "malloc(): smallbin double linked list corrupted";
goto errout;
}
if (__glibc_unlikely (bck->fd != victim))
malloc_printerr
("malloc(): smallbin double linked list corrupted");
set_inuse_bit_at_offset (victim, nb);
bin->bk = bck;
bck->fd = bin;
@ -3727,8 +3695,7 @@ _int_malloc (mstate av, size_t bytes)
if (__builtin_expect (chunksize_nomask (victim) <= 2 * SIZE_SZ, 0)
|| __builtin_expect (chunksize_nomask (victim)
> av->system_mem, 0))
malloc_printerr (check_action, "malloc(): memory corruption",
chunk2mem (victim), av);
malloc_printerr ("malloc(): memory corruption");
size = chunksize (victim);
/*
@ -3933,11 +3900,8 @@ _int_malloc (mstate av, size_t bytes)
have to perform a complete insert here. */
bck = unsorted_chunks (av);
fwd = bck->fd;
if (__glibc_unlikely (fwd->bk != bck))
{
errstr = "malloc(): corrupted unsorted chunks";
goto errout;
}
if (__glibc_unlikely (fwd->bk != bck))
malloc_printerr ("malloc(): corrupted unsorted chunks");
remainder->bk = bck;
remainder->fd = fwd;
bck->fd = remainder;
@ -4040,11 +4004,8 @@ _int_malloc (mstate av, size_t bytes)
have to perform a complete insert here. */
bck = unsorted_chunks (av);
fwd = bck->fd;
if (__glibc_unlikely (fwd->bk != bck))
{
errstr = "malloc(): corrupted unsorted chunks 2";
goto errout;
}
if (__glibc_unlikely (fwd->bk != bck))
malloc_printerr ("malloc(): corrupted unsorted chunks 2");
remainder->bk = bck;
remainder->fd = fwd;
bck->fd = remainder;
@ -4145,7 +4106,6 @@ _int_free (mstate av, mchunkptr p, int have_lock)
mchunkptr bck; /* misc temp for linking */
mchunkptr fwd; /* misc temp for linking */
const char *errstr = NULL;
int locked = 0;
size = chunksize (p);
@ -4156,21 +4116,11 @@ _int_free (mstate av, mchunkptr p, int have_lock)
here by accident or by "design" from some intruder. */
if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
|| __builtin_expect (misaligned_chunk (p), 0))
{
errstr = "free(): invalid pointer";
errout:
if (!have_lock && locked)
__libc_lock_unlock (av->mutex);
malloc_printerr (check_action, errstr, chunk2mem (p), av);
return;
}
malloc_printerr ("free(): invalid pointer");
/* We know that each chunk is at least MINSIZE bytes in size or a
multiple of MALLOC_ALIGNMENT. */
if (__glibc_unlikely (size < MINSIZE || !aligned_OK (size)))
{
errstr = "free(): invalid size";
goto errout;
}
malloc_printerr ("free(): invalid size");
check_inuse_chunk(av, p);
@ -4219,10 +4169,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
chunksize_nomask (chunk_at_offset (p, size)) <= 2 * SIZE_SZ
|| chunksize (chunk_at_offset (p, size)) >= av->system_mem;
}))
{
errstr = "free(): invalid next size (fast)";
goto errout;
}
malloc_printerr ("free(): invalid next size (fast)");
if (! have_lock)
{
__libc_lock_unlock (av->mutex);
@ -4244,10 +4191,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
/* Check that the top of the bin is not the record we are going to add
(i.e., double free). */
if (__builtin_expect (old == p, 0))
{
errstr = "double free or corruption (fasttop)";
goto errout;
}
malloc_printerr ("double free or corruption (fasttop)");
/* Check that size of fastbin chunk at the top is the same as
size of the chunk that we are adding. We can dereference OLD
only if we have the lock, otherwise it might have already been
@ -4259,10 +4203,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2)) != old2);
if (have_lock && old != NULL && __builtin_expect (old_idx != idx, 0))
{
errstr = "invalid fastbin entry (free)";
goto errout;
}
malloc_printerr ("invalid fastbin entry (free)");
}
/*
@ -4280,32 +4221,20 @@ _int_free (mstate av, mchunkptr p, int have_lock)
/* Lightweight tests: check whether the block is already the
top block. */
if (__glibc_unlikely (p == av->top))
{
errstr = "double free or corruption (top)";
goto errout;
}
malloc_printerr ("double free or corruption (top)");
/* Or whether the next chunk is beyond the boundaries of the arena. */
if (__builtin_expect (contiguous (av)
&& (char *) nextchunk
>= ((char *) av->top + chunksize(av->top)), 0))
{
errstr = "double free or corruption (out)";
goto errout;
}
malloc_printerr ("double free or corruption (out)");
/* Or whether the block is actually not marked used. */
if (__glibc_unlikely (!prev_inuse(nextchunk)))
{
errstr = "double free or corruption (!prev)";
goto errout;
}
malloc_printerr ("double free or corruption (!prev)");
nextsize = chunksize(nextchunk);
if (__builtin_expect (chunksize_nomask (nextchunk) <= 2 * SIZE_SZ, 0)
|| __builtin_expect (nextsize >= av->system_mem, 0))
{
errstr = "free(): invalid next size (normal)";
goto errout;
}
malloc_printerr ("free(): invalid next size (normal)");
free_perturb (chunk2mem(p), size - 2 * SIZE_SZ);
@ -4337,10 +4266,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
bck = unsorted_chunks(av);
fwd = bck->fd;
if (__glibc_unlikely (fwd->bk != bck))
{
errstr = "free(): corrupted unsorted chunks";
goto errout;
}
malloc_printerr ("free(): corrupted unsorted chunks");
p->fd = fwd;
p->bk = bck;
if (!in_smallbin_range(size))
@ -4553,17 +4479,10 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
INTERNAL_SIZE_T* s; /* copy source */
INTERNAL_SIZE_T* d; /* copy destination */
const char *errstr = NULL;
/* oldmem size */
if (__builtin_expect (chunksize_nomask (oldp) <= 2 * SIZE_SZ, 0)
|| __builtin_expect (oldsize >= av->system_mem, 0))
{
errstr = "realloc(): invalid old size";
errout:
malloc_printerr (check_action, errstr, chunk2mem (oldp), av);
return NULL;
}
malloc_printerr ("realloc(): invalid old size");
check_inuse_chunk (av, oldp);
@ -4574,10 +4493,7 @@ _int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
INTERNAL_SIZE_T nextsize = chunksize (next);
if (__builtin_expect (chunksize_nomask (next) <= 2 * SIZE_SZ, 0)
|| __builtin_expect (nextsize >= av->system_mem, 0))
{
errstr = "realloc(): invalid next size";
goto errout;
}
malloc_printerr ("realloc(): invalid next size");
if ((unsigned long) (oldsize) >= (unsigned long) (nb))
{
@ -5117,8 +5033,6 @@ static inline int
__always_inline
do_set_mallopt_check (int32_t value)
{
LIBC_PROBE (memory_mallopt_check_action, 2, value, check_action);
check_action = value;
return 1;
}
@ -5392,14 +5306,8 @@ libc_hidden_def (__libc_mallopt)
extern char **__libc_argv attribute_hidden;
static void
malloc_printerr (int action, const char *str, void *ptr, mstate ar_ptr)
malloc_printerr (const char *str)
{
/* Avoid using this arena in future. We do not attempt to synchronize this
with anything else because we minimally want to ensure that __libc_message
gets its resources safely without stumbling on the current corruption. */
if (ar_ptr)
set_arena_corrupt (ar_ptr);
__libc_message (do_abort, "%s\n", str);
__builtin_unreachable ();
}

View File

@ -1104,7 +1104,6 @@ When calling @code{mallopt}, the @var{param} argument specifies the
parameter to be set, and @var{value} the new value to be set. Possible
choices for @var{param}, as defined in @file{malloc.h}, are:
@comment TODO: @item M_CHECK_ACTION
@vtable @code
@item M_MMAP_MAX
The maximum number of chunks to allocate with @code{mmap}. Setting this

View File

@ -195,13 +195,6 @@ this @code{malloc} parameter, and @var{$arg3} is nonzero if dynamic
threshold adjustment was already disabled.
@end deftp
@deftp Probe memory_mallopt_check_action (int @var{$arg1}, int @var{$arg2})
This probe is triggered shortly after the @code{memory_mallopt} probe,
when the parameter to be changed is @code{M_CHECK_ACTION}. Argument
@var{$arg1} is the requested value, and @var{$arg2} is the previous
value of this @code{malloc} parameter.
@end deftp
@deftp Probe memory_mallopt_perturb (int @var{$arg1}, int @var{$arg2})
This probe is triggered shortly after the @code{memory_mallopt} probe,
when the parameter to be changed is @code{M_PERTURB}. Argument