malloc: Prevent arena free_list from turning cyclic [BZ #19048]

[BZ# 19048]
	* malloc/malloc.c (struct malloc_state): Update comment.  Add
	attached_threads member.
	(main_arena): Initialize attached_threads.
	* malloc/arena.c (list_lock): Update comment.
	(ptmalloc_lock_all, ptmalloc_unlock_all): Likewise.
	(ptmalloc_unlock_all2): Reinitialize arena reference counts.
	(deattach_arena): New function.
	(_int_new_arena): Initialize arena reference count and deattach
	replaced arena.
	(get_free_list, reused_arena): Update reference count and deattach
	replaced arena.
	(arena_thread_freeres): Update arena reference count and only put
	unreferenced arenas on the free list.
This commit is contained in:
Florian Weimer 2015-10-28 19:32:46 +01:00
parent 0b9af583a5
commit a62719ba90
4 changed files with 103 additions and 10 deletions

View File

@ -1,3 +1,20 @@
2015-10-28 Florian Weimer <fweimer@redhat.com>
[BZ# 19048]
* malloc/malloc.c (struct malloc_state): Update comment. Add
attached_threads member.
(main_arena): Initialize attached_threads.
* malloc/arena.c (list_lock): Update comment.
(ptmalloc_lock_all, ptmalloc_unlock_all): Likewise.
(ptmalloc_unlock_all2): Reinitialize arena reference counts.
(deattach_arena): New function.
(_int_new_arena): Initialize arena reference count and deattach
replaced arena.
(get_free_list, reused_arena): Update reference count and deattach
replaced arena.
(arena_thread_freeres): Update arena reference count and only put
unreferenced arenas on the free list.
2015-10-28 Joseph Myers <joseph@codesourcery.com>
[BZ #19181]

15
NEWS
View File

@ -19,9 +19,18 @@ Version 2.23
18823, 18824, 18825, 18857, 18863, 18870, 18872, 18873, 18875, 18887,
18918, 18921, 18928, 18951, 18952, 18953, 18956, 18961, 18966, 18967,
18969, 18970, 18977, 18980, 18981, 18982, 18985, 19003, 19007, 19012,
19016, 19018, 19032, 19046, 19049, 19050, 19059, 19071, 19074, 19076,
19077, 19078, 19079, 19085, 19086, 19088, 19094, 19095, 19124, 19125,
19129, 19134, 19137, 19156, 19174, 19181.
19016, 19018, 19032, 19046, 19048, 19049, 19050, 19059, 19071, 19074,
19076, 19077, 19078, 19079, 19085, 19086, 19088, 19094, 19095, 19124,
19125, 19129, 19134, 19137, 19156, 19174, 19181.
* A defect in the malloc implementation, present since glibc 2.15 (2012) or
glibc 2.10 via --enable-experimental-malloc (2009), could result in the
unnecessary serialization of memory allocation requests across threads.
The defect is now corrected. Users should see a substantial increase in
the concurent throughput of allocation requests for applications which
trigger this bug. Affected applications typically create create and
destroy threads frequently. (Bug 19048 was reported and analyzed by
Ericsson.)
* There is now a --disable-timezone-tools configure option for disabling the
building and installing of the timezone related utilities (zic, zdump, and

View File

@ -68,7 +68,10 @@ extern int sanity_check_heap_info_alignment[(sizeof (heap_info)
static __thread mstate thread_arena attribute_tls_model_ie;
/* Arena free list. */
/* Arena free list. list_lock protects the free_list variable below,
and the next_free and attached_threads members of the mstate
objects. No other (malloc) locks must be taken while list_lock is
active, otherwise deadlocks may occur. */
static mutex_t list_lock = MUTEX_INITIALIZER;
static size_t narenas = 1;
@ -225,7 +228,10 @@ ptmalloc_lock_all (void)
save_free_hook = __free_hook;
__malloc_hook = malloc_atfork;
__free_hook = free_atfork;
/* Only the current thread may perform malloc/free calls now. */
/* Only the current thread may perform malloc/free calls now.
save_arena will be reattached to the current thread, in
ptmalloc_lock_all, so save_arena->attached_threads is not
updated. */
save_arena = thread_arena;
thread_arena = ATFORK_ARENA_PTR;
out:
@ -243,6 +249,9 @@ ptmalloc_unlock_all (void)
if (--atfork_recursive_cntr != 0)
return;
/* Replace ATFORK_ARENA_PTR with save_arena.
save_arena->attached_threads was not changed in ptmalloc_lock_all
and is still correct. */
thread_arena = save_arena;
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
@ -274,12 +283,19 @@ ptmalloc_unlock_all2 (void)
thread_arena = save_arena;
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
/* Push all arenas to the free list, except save_arena, which is
attached to the current thread. */
if (save_arena != NULL)
((mstate) save_arena)->attached_threads = 1;
free_list = NULL;
for (ar_ptr = &main_arena;; )
{
mutex_init (&ar_ptr->mutex);
if (ar_ptr != save_arena)
{
/* This arena is no longer attached to any thread. */
ar_ptr->attached_threads = 0;
ar_ptr->next_free = free_list;
free_list = ar_ptr;
}
@ -718,6 +734,22 @@ heap_trim (heap_info *heap, size_t pad)
/* Create a new arena with initial size "size". */
/* If REPLACED_ARENA is not NULL, detach it from this thread. Must be
called while list_lock is held. */
static void
detach_arena (mstate replaced_arena)
{
if (replaced_arena != NULL)
{
assert (replaced_arena->attached_threads > 0);
/* The current implementation only detaches from main_arena in
case of allocation failure. This means that it is likely not
beneficial to put the arena on free_list even if the
reference count reaches zero. */
--replaced_arena->attached_threads;
}
}
static mstate
_int_new_arena (size_t size)
{
@ -739,6 +771,7 @@ _int_new_arena (size_t size)
}
a = h->ar_ptr = (mstate) (h + 1);
malloc_init_state (a);
a->attached_threads = 1;
/*a->next = NULL;*/
a->system_mem = a->max_system_mem = h->size;
arena_mem += h->size;
@ -752,12 +785,15 @@ _int_new_arena (size_t size)
set_head (top (a), (((char *) h + h->size) - ptr) | PREV_INUSE);
LIBC_PROBE (memory_arena_new, 2, a, size);
mstate replaced_arena = thread_arena;
thread_arena = a;
mutex_init (&a->mutex);
(void) mutex_lock (&a->mutex);
(void) mutex_lock (&list_lock);
detach_arena (replaced_arena);
/* Add the new arena to the global list. */
a->next = main_arena.next;
atomic_write_barrier ();
@ -772,13 +808,23 @@ _int_new_arena (size_t size)
static mstate
get_free_list (void)
{
mstate replaced_arena = thread_arena;
mstate result = free_list;
if (result != NULL)
{
(void) mutex_lock (&list_lock);
result = free_list;
if (result != NULL)
free_list = result->next_free;
{
free_list = result->next_free;
/* Arenas on the free list are not attached to any thread. */
assert (result->attached_threads == 0);
/* But the arena will now be attached to this thread. */
result->attached_threads = 1;
detach_arena (replaced_arena);
}
(void) mutex_unlock (&list_lock);
if (result != NULL)
@ -837,6 +883,14 @@ reused_arena (mstate avoid_arena)
(void) mutex_lock (&result->mutex);
out:
{
mstate replaced_arena = thread_arena;
(void) mutex_lock (&list_lock);
detach_arena (replaced_arena);
++result->attached_threads;
(void) mutex_unlock (&list_lock);
}
LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
thread_arena = result;
next_to_use = result->next;
@ -931,8 +985,14 @@ arena_thread_freeres (void)
if (a != NULL)
{
(void) mutex_lock (&list_lock);
a->next_free = free_list;
free_list = a;
/* If this was the last attached thread for this arena, put the
arena on the free list. */
assert (a->attached_threads > 0);
if (--a->attached_threads == 0)
{
a->next_free = free_list;
free_list = a;
}
(void) mutex_unlock (&list_lock);
}
}

View File

@ -1709,9 +1709,15 @@ struct malloc_state
/* Linked list */
struct malloc_state *next;
/* Linked list for free arenas. */
/* Linked list for free arenas. Access to this field is serialized
by list_lock in arena.c. */
struct malloc_state *next_free;
/* Number of threads attached to this arena. 0 if the arena is on
the free list. Access to this field is serialized by list_lock
in arena.c. */
INTERNAL_SIZE_T attached_threads;
/* Memory allocated from the system in this arena. */
INTERNAL_SIZE_T system_mem;
INTERNAL_SIZE_T max_system_mem;
@ -1755,7 +1761,8 @@ struct malloc_par
static struct malloc_state main_arena =
{
.mutex = MUTEX_INITIALIZER,
.next = &main_arena
.next = &main_arena,
.attached_threads = 1
};
/* There is only one instance of the malloc parameters. */