malloc: Automated part of conversion to __libc_lock

This commit is contained in:
Florian Weimer 2016-09-06 12:49:54 +02:00
parent 6d5336211d
commit 4bf5f2224b
4 changed files with 69 additions and 60 deletions

View File

@ -1,3 +1,12 @@
2016-09-06 Florian Weimer <fweimer@redhat.com>
Convert malloc to __libc_lock. Automated part, using this Perl
s/// command:
s/(?:\(void\)\s*)?mutex_((?:|un|try)lock|init)
\s*\(\&([^\)]+)\)/__libc_lock_$1\ ($2)/gx;
* malloc/malloc.c, malloc/arena.c, malloc/hooks.c: Perform
conversion.
2016-09-05 Aurelien Jarno <aurelien@aurel32.net> 2016-09-05 Aurelien Jarno <aurelien@aurel32.net>
* conform/Makefile (conformtest-header-tests): Pass -I. to $(PERL). * conform/Makefile (conformtest-header-tests): Pass -I. to $(PERL).

View File

@ -112,7 +112,7 @@ int __malloc_initialized = -1;
#define arena_lock(ptr, size) do { \ #define arena_lock(ptr, size) do { \
if (ptr && !arena_is_corrupt (ptr)) \ if (ptr && !arena_is_corrupt (ptr)) \
(void) mutex_lock (&ptr->mutex); \ __libc_lock_lock (ptr->mutex); \
else \ else \
ptr = arena_get2 ((size), NULL); \ ptr = arena_get2 ((size), NULL); \
} while (0) } while (0)
@ -145,11 +145,11 @@ __malloc_fork_lock_parent (void)
/* We do not acquire free_list_lock here because we completely /* We do not acquire free_list_lock here because we completely
reconstruct free_list in __malloc_fork_unlock_child. */ reconstruct free_list in __malloc_fork_unlock_child. */
(void) mutex_lock (&list_lock); __libc_lock_lock (list_lock);
for (mstate ar_ptr = &main_arena;; ) for (mstate ar_ptr = &main_arena;; )
{ {
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
ar_ptr = ar_ptr->next; ar_ptr = ar_ptr->next;
if (ar_ptr == &main_arena) if (ar_ptr == &main_arena)
break; break;
@ -165,12 +165,12 @@ __malloc_fork_unlock_parent (void)
for (mstate ar_ptr = &main_arena;; ) for (mstate ar_ptr = &main_arena;; )
{ {
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = ar_ptr->next; ar_ptr = ar_ptr->next;
if (ar_ptr == &main_arena) if (ar_ptr == &main_arena)
break; break;
} }
(void) mutex_unlock (&list_lock); __libc_lock_unlock (list_lock);
} }
void void
@ -182,13 +182,13 @@ __malloc_fork_unlock_child (void)
/* Push all arenas to the free list, except thread_arena, which is /* Push all arenas to the free list, except thread_arena, which is
attached to the current thread. */ attached to the current thread. */
mutex_init (&free_list_lock); __libc_lock_init (free_list_lock);
if (thread_arena != NULL) if (thread_arena != NULL)
thread_arena->attached_threads = 1; thread_arena->attached_threads = 1;
free_list = NULL; free_list = NULL;
for (mstate ar_ptr = &main_arena;; ) for (mstate ar_ptr = &main_arena;; )
{ {
mutex_init (&ar_ptr->mutex); __libc_lock_init (ar_ptr->mutex);
if (ar_ptr != thread_arena) if (ar_ptr != thread_arena)
{ {
/* This arena is no longer attached to any thread. */ /* This arena is no longer attached to any thread. */
@ -201,7 +201,7 @@ __malloc_fork_unlock_child (void)
break; break;
} }
mutex_init (&list_lock); __libc_lock_init (list_lock);
} }
/* Initialization routine. */ /* Initialization routine. */
@ -668,9 +668,9 @@ _int_new_arena (size_t size)
LIBC_PROBE (memory_arena_new, 2, a, size); LIBC_PROBE (memory_arena_new, 2, a, size);
mstate replaced_arena = thread_arena; mstate replaced_arena = thread_arena;
thread_arena = a; thread_arena = a;
mutex_init (&a->mutex); __libc_lock_init (a->mutex);
(void) mutex_lock (&list_lock); __libc_lock_lock (list_lock);
/* Add the new arena to the global list. */ /* Add the new arena to the global list. */
a->next = main_arena.next; a->next = main_arena.next;
@ -680,11 +680,11 @@ _int_new_arena (size_t size)
atomic_write_barrier (); atomic_write_barrier ();
main_arena.next = a; main_arena.next = a;
(void) mutex_unlock (&list_lock); __libc_lock_unlock (list_lock);
(void) mutex_lock (&free_list_lock); __libc_lock_lock (free_list_lock);
detach_arena (replaced_arena); detach_arena (replaced_arena);
(void) mutex_unlock (&free_list_lock); __libc_lock_unlock (free_list_lock);
/* Lock this arena. NB: Another thread may have been attached to /* Lock this arena. NB: Another thread may have been attached to
this arena because the arena is now accessible from the this arena because the arena is now accessible from the
@ -696,7 +696,7 @@ _int_new_arena (size_t size)
but this could result in a deadlock with but this could result in a deadlock with
__malloc_fork_lock_parent. */ __malloc_fork_lock_parent. */
(void) mutex_lock (&a->mutex); __libc_lock_lock (a->mutex);
return a; return a;
} }
@ -710,7 +710,7 @@ get_free_list (void)
mstate result = free_list; mstate result = free_list;
if (result != NULL) if (result != NULL)
{ {
(void) mutex_lock (&free_list_lock); __libc_lock_lock (free_list_lock);
result = free_list; result = free_list;
if (result != NULL) if (result != NULL)
{ {
@ -722,12 +722,12 @@ get_free_list (void)
detach_arena (replaced_arena); detach_arena (replaced_arena);
} }
(void) mutex_unlock (&free_list_lock); __libc_lock_unlock (free_list_lock);
if (result != NULL) if (result != NULL)
{ {
LIBC_PROBE (memory_arena_reuse_free_list, 1, result); LIBC_PROBE (memory_arena_reuse_free_list, 1, result);
(void) mutex_lock (&result->mutex); __libc_lock_lock (result->mutex);
thread_arena = result; thread_arena = result;
} }
} }
@ -772,7 +772,7 @@ reused_arena (mstate avoid_arena)
result = next_to_use; result = next_to_use;
do do
{ {
if (!arena_is_corrupt (result) && !mutex_trylock (&result->mutex)) if (!arena_is_corrupt (result) && !__libc_lock_trylock (result->mutex))
goto out; goto out;
/* FIXME: This is a data race, see _int_new_arena. */ /* FIXME: This is a data race, see _int_new_arena. */
@ -799,14 +799,14 @@ reused_arena (mstate avoid_arena)
/* No arena available without contention. Wait for the next in line. */ /* No arena available without contention. Wait for the next in line. */
LIBC_PROBE (memory_arena_reuse_wait, 3, &result->mutex, result, avoid_arena); LIBC_PROBE (memory_arena_reuse_wait, 3, &result->mutex, result, avoid_arena);
(void) mutex_lock (&result->mutex); __libc_lock_lock (result->mutex);
out: out:
/* Attach the arena to the current thread. */ /* Attach the arena to the current thread. */
{ {
/* Update the arena thread attachment counters. */ /* Update the arena thread attachment counters. */
mstate replaced_arena = thread_arena; mstate replaced_arena = thread_arena;
(void) mutex_lock (&free_list_lock); __libc_lock_lock (free_list_lock);
detach_arena (replaced_arena); detach_arena (replaced_arena);
/* We may have picked up an arena on the free list. We need to /* We may have picked up an arena on the free list. We need to
@ -821,7 +821,7 @@ out:
++result->attached_threads; ++result->attached_threads;
(void) mutex_unlock (&free_list_lock); __libc_lock_unlock (free_list_lock);
} }
LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena); LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
@ -892,17 +892,17 @@ arena_get_retry (mstate ar_ptr, size_t bytes)
LIBC_PROBE (memory_arena_retry, 2, bytes, ar_ptr); LIBC_PROBE (memory_arena_retry, 2, bytes, ar_ptr);
if (ar_ptr != &main_arena) if (ar_ptr != &main_arena)
{ {
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
/* Don't touch the main arena if it is corrupt. */ /* Don't touch the main arena if it is corrupt. */
if (arena_is_corrupt (&main_arena)) if (arena_is_corrupt (&main_arena))
return NULL; return NULL;
ar_ptr = &main_arena; ar_ptr = &main_arena;
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
} }
else else
{ {
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = arena_get2 (bytes, ar_ptr); ar_ptr = arena_get2 (bytes, ar_ptr);
} }
@ -917,7 +917,7 @@ arena_thread_freeres (void)
if (a != NULL) if (a != NULL)
{ {
(void) mutex_lock (&free_list_lock); __libc_lock_lock (free_list_lock);
/* If this was the last attached thread for this arena, put the /* If this was the last attached thread for this arena, put the
arena on the free list. */ arena on the free list. */
assert (a->attached_threads > 0); assert (a->attached_threads > 0);
@ -926,7 +926,7 @@ arena_thread_freeres (void)
a->next_free = free_list; a->next_free = free_list;
free_list = a; free_list = a;
} }
(void) mutex_unlock (&free_list_lock); __libc_lock_unlock (free_list_lock);
} }
} }
text_set_element (__libc_thread_subfreeres, arena_thread_freeres); text_set_element (__libc_thread_subfreeres, arena_thread_freeres);

View File

@ -291,9 +291,9 @@ malloc_check (size_t sz, const void *caller)
return NULL; return NULL;
} }
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
victim = (top_check () >= 0) ? _int_malloc (&main_arena, sz + 1) : NULL; victim = (top_check () >= 0) ? _int_malloc (&main_arena, sz + 1) : NULL;
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
return mem2mem_check (victim, sz); return mem2mem_check (victim, sz);
} }
@ -305,11 +305,11 @@ free_check (void *mem, const void *caller)
if (!mem) if (!mem)
return; return;
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
p = mem2chunk_check (mem, NULL); p = mem2chunk_check (mem, NULL);
if (!p) if (!p)
{ {
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
malloc_printerr (check_action, "free(): invalid pointer", mem, malloc_printerr (check_action, "free(): invalid pointer", mem,
&main_arena); &main_arena);
@ -317,12 +317,12 @@ free_check (void *mem, const void *caller)
} }
if (chunk_is_mmapped (p)) if (chunk_is_mmapped (p))
{ {
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
munmap_chunk (p); munmap_chunk (p);
return; return;
} }
_int_free (&main_arena, p, 1); _int_free (&main_arena, p, 1);
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
} }
static void * static void *
@ -345,9 +345,9 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
free_check (oldmem, NULL); free_check (oldmem, NULL);
return NULL; return NULL;
} }
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p); const mchunkptr oldp = mem2chunk_check (oldmem, &magic_p);
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
if (!oldp) if (!oldp)
{ {
malloc_printerr (check_action, "realloc(): invalid pointer", oldmem, malloc_printerr (check_action, "realloc(): invalid pointer", oldmem,
@ -357,7 +357,7 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
const INTERNAL_SIZE_T oldsize = chunksize (oldp); const INTERNAL_SIZE_T oldsize = chunksize (oldp);
checked_request2size (bytes + 1, nb); checked_request2size (bytes + 1, nb);
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
if (chunk_is_mmapped (oldp)) if (chunk_is_mmapped (oldp))
{ {
@ -400,7 +400,7 @@ realloc_check (void *oldmem, size_t bytes, const void *caller)
if (newmem == NULL) if (newmem == NULL)
*magic_p ^= 0xFF; *magic_p ^= 0xFF;
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
return mem2mem_check (newmem, bytes); return mem2mem_check (newmem, bytes);
} }
@ -440,10 +440,10 @@ memalign_check (size_t alignment, size_t bytes, const void *caller)
alignment = a; alignment = a;
} }
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
mem = (top_check () >= 0) ? _int_memalign (&main_arena, alignment, bytes + 1) : mem = (top_check () >= 0) ? _int_memalign (&main_arena, alignment, bytes + 1) :
NULL; NULL;
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
return mem2mem_check (mem, bytes); return mem2mem_check (mem, bytes);
} }
@ -503,7 +503,7 @@ __malloc_get_state (void)
if (!ms) if (!ms)
return 0; return 0;
(void) mutex_lock (&main_arena.mutex); __libc_lock_lock (main_arena.mutex);
malloc_consolidate (&main_arena); malloc_consolidate (&main_arena);
ms->magic = MALLOC_STATE_MAGIC; ms->magic = MALLOC_STATE_MAGIC;
ms->version = MALLOC_STATE_VERSION; ms->version = MALLOC_STATE_VERSION;
@ -540,7 +540,7 @@ __malloc_get_state (void)
ms->arena_test = mp_.arena_test; ms->arena_test = mp_.arena_test;
ms->arena_max = mp_.arena_max; ms->arena_max = mp_.arena_max;
ms->narenas = narenas; ms->narenas = narenas;
(void) mutex_unlock (&main_arena.mutex); __libc_lock_unlock (main_arena.mutex);
return (void *) ms; return (void *) ms;
} }

View File

@ -2870,7 +2870,7 @@ __libc_malloc (size_t bytes)
} }
if (ar_ptr != NULL) if (ar_ptr != NULL)
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
assert (!victim || chunk_is_mmapped (mem2chunk (victim)) || assert (!victim || chunk_is_mmapped (mem2chunk (victim)) ||
ar_ptr == arena_for_chunk (mem2chunk (victim))); ar_ptr == arena_for_chunk (mem2chunk (victim)));
@ -3012,11 +3012,11 @@ __libc_realloc (void *oldmem, size_t bytes)
return newmem; return newmem;
} }
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
newp = _int_realloc (ar_ptr, oldp, oldsize, nb); newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
assert (!newp || chunk_is_mmapped (mem2chunk (newp)) || assert (!newp || chunk_is_mmapped (mem2chunk (newp)) ||
ar_ptr == arena_for_chunk (mem2chunk (newp))); ar_ptr == arena_for_chunk (mem2chunk (newp)));
@ -3098,7 +3098,7 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
} }
if (ar_ptr != NULL) if (ar_ptr != NULL)
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
assert (!p || chunk_is_mmapped (mem2chunk (p)) || assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
ar_ptr == arena_for_chunk (mem2chunk (p))); ar_ptr == arena_for_chunk (mem2chunk (p)));
@ -3219,7 +3219,7 @@ __libc_calloc (size_t n, size_t elem_size)
} }
if (av != NULL) if (av != NULL)
(void) mutex_unlock (&av->mutex); __libc_lock_unlock (av->mutex);
/* Allocation failed even after a retry. */ /* Allocation failed even after a retry. */
if (mem == 0) if (mem == 0)
@ -3835,7 +3835,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
errstr = "free(): invalid pointer"; errstr = "free(): invalid pointer";
errout: errout:
if (!have_lock && locked) if (!have_lock && locked)
(void) mutex_unlock (&av->mutex); __libc_lock_unlock (av->mutex);
malloc_printerr (check_action, errstr, chunk2mem (p), av); malloc_printerr (check_action, errstr, chunk2mem (p), av);
return; return;
} }
@ -3874,7 +3874,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
after getting the lock. */ after getting the lock. */
if (have_lock if (have_lock
|| ({ assert (locked == 0); || ({ assert (locked == 0);
mutex_lock(&av->mutex); __libc_lock_lock (av->mutex);
locked = 1; locked = 1;
chunk_at_offset (p, size)->size <= 2 * SIZE_SZ chunk_at_offset (p, size)->size <= 2 * SIZE_SZ
|| chunksize (chunk_at_offset (p, size)) >= av->system_mem; || chunksize (chunk_at_offset (p, size)) >= av->system_mem;
@ -3885,7 +3885,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
} }
if (! have_lock) if (! have_lock)
{ {
(void)mutex_unlock(&av->mutex); __libc_lock_unlock (av->mutex);
locked = 0; locked = 0;
} }
} }
@ -3931,7 +3931,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
else if (!chunk_is_mmapped(p)) { else if (!chunk_is_mmapped(p)) {
if (! have_lock) { if (! have_lock) {
(void)mutex_lock(&av->mutex); __libc_lock_lock (av->mutex);
locked = 1; locked = 1;
} }
@ -4064,7 +4064,7 @@ _int_free (mstate av, mchunkptr p, int have_lock)
if (! have_lock) { if (! have_lock) {
assert (locked); assert (locked);
(void)mutex_unlock(&av->mutex); __libc_lock_unlock (av->mutex);
} }
} }
/* /*
@ -4531,9 +4531,9 @@ __malloc_trim (size_t s)
mstate ar_ptr = &main_arena; mstate ar_ptr = &main_arena;
do do
{ {
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
result |= mtrim (ar_ptr, s); result |= mtrim (ar_ptr, s);
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = ar_ptr->next; ar_ptr = ar_ptr->next;
} }
@ -4662,9 +4662,9 @@ __libc_mallinfo (void)
ar_ptr = &main_arena; ar_ptr = &main_arena;
do do
{ {
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
int_mallinfo (ar_ptr, &m); int_mallinfo (ar_ptr, &m);
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = ar_ptr->next; ar_ptr = ar_ptr->next;
} }
@ -4694,7 +4694,7 @@ __malloc_stats (void)
struct mallinfo mi; struct mallinfo mi;
memset (&mi, 0, sizeof (mi)); memset (&mi, 0, sizeof (mi));
(void) mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
int_mallinfo (ar_ptr, &mi); int_mallinfo (ar_ptr, &mi);
fprintf (stderr, "Arena %d:\n", i); fprintf (stderr, "Arena %d:\n", i);
fprintf (stderr, "system bytes = %10u\n", (unsigned int) mi.arena); fprintf (stderr, "system bytes = %10u\n", (unsigned int) mi.arena);
@ -4705,7 +4705,7 @@ __malloc_stats (void)
#endif #endif
system_b += mi.arena; system_b += mi.arena;
in_use_b += mi.uordblks; in_use_b += mi.uordblks;
(void) mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = ar_ptr->next; ar_ptr = ar_ptr->next;
if (ar_ptr == &main_arena) if (ar_ptr == &main_arena)
break; break;
@ -4733,7 +4733,7 @@ __libc_mallopt (int param_number, int value)
if (__malloc_initialized < 0) if (__malloc_initialized < 0)
ptmalloc_init (); ptmalloc_init ();
(void) mutex_lock (&av->mutex); __libc_lock_lock (av->mutex);
/* Ensure initialization/consolidation */ /* Ensure initialization/consolidation */
malloc_consolidate (av); malloc_consolidate (av);
@ -4811,7 +4811,7 @@ __libc_mallopt (int param_number, int value)
} }
break; break;
} }
(void) mutex_unlock (&av->mutex); __libc_lock_unlock (av->mutex);
return res; return res;
} }
libc_hidden_def (__libc_mallopt) libc_hidden_def (__libc_mallopt)
@ -5058,7 +5058,7 @@ __malloc_info (int options, FILE *fp)
} sizes[NFASTBINS + NBINS - 1]; } sizes[NFASTBINS + NBINS - 1];
#define nsizes (sizeof (sizes) / sizeof (sizes[0])) #define nsizes (sizeof (sizes) / sizeof (sizes[0]))
mutex_lock (&ar_ptr->mutex); __libc_lock_lock (ar_ptr->mutex);
for (size_t i = 0; i < NFASTBINS; ++i) for (size_t i = 0; i < NFASTBINS; ++i)
{ {
@ -5117,7 +5117,7 @@ __malloc_info (int options, FILE *fp)
avail += sizes[NFASTBINS - 1 + i].total; avail += sizes[NFASTBINS - 1 + i].total;
} }
mutex_unlock (&ar_ptr->mutex); __libc_lock_unlock (ar_ptr->mutex);
total_nfastblocks += nfastblocks; total_nfastblocks += nfastblocks;
total_fastavail += fastavail; total_fastavail += fastavail;