[malloc] Improve malloc initialization sequence

Message ID HE1PR0801MB2058129B80D52F8CCF0F5370837D0@HE1PR0801MB2058.eurprd08.prod.outlook.com
State Committed
Headers

Commit Message

Wilco Dijkstra Oct. 2, 2017, 2:24 p.m. UTC
  Florian Weimer wrote:

> The locking is unnecessary.  You should remove it and call 
> malloc_init_state before the tunables preprocessor conditional.
>
> I believe this fixes bug 22159, so please reference this bug (both in 
> the commit message and the ChangeLog).

Sure, here is the updated version:

The current malloc initialization is quite convoluted. Instead of
sometimes calling malloc_consolidate from ptmalloc_init, call
malloc_init_state early so that the main_arena is always initialized.
The special initialization can now be removed from malloc_consolidate.
This also fixes BZ #22159.

GLIBC builds and tests pass, including --enable-tunables=no, OK for commit?

ChangeLog:
2017-10-02  Wilco Dijkstra  <wdijkstr@arm.com>

        [BZ #22159]
        * malloc/arena.c (ptmalloc_init): Call malloc_init_state.
        * malloc/malloc.c (malloc_consolidate): Remove initialization.
--
  

Patch

diff --git a/malloc/arena.c b/malloc/arena.c
index 9e5a62d260bf2f5e6d76da4ccaf7b7dcb388c296..85b985e193d513b633bd148b275515a29a710584 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -307,13 +307,9 @@  ptmalloc_init (void)
 
   thread_arena = &main_arena;
 
-#if HAVE_TUNABLES
-  /* Ensure initialization/consolidation and do it under a lock so that a
-     thread attempting to use the arena in parallel waits on us till we
-     finish.  */
-  __libc_lock_lock (main_arena.mutex);
-  malloc_consolidate (&main_arena);
+  malloc_init_state (&main_arena);
 
+#if HAVE_TUNABLES
   TUNABLE_GET (check, int32_t, TUNABLE_CALLBACK (set_mallopt_check));
   TUNABLE_GET (top_pad, size_t, TUNABLE_CALLBACK (set_top_pad));
   TUNABLE_GET (perturb, int32_t, TUNABLE_CALLBACK (set_perturb_byte));
@@ -322,13 +318,12 @@  ptmalloc_init (void)
   TUNABLE_GET (mmap_max, int32_t, TUNABLE_CALLBACK (set_mmaps_max));
   TUNABLE_GET (arena_max, size_t, TUNABLE_CALLBACK (set_arena_max));
   TUNABLE_GET (arena_test, size_t, TUNABLE_CALLBACK (set_arena_test));
-#if USE_TCACHE
+# if USE_TCACHE
   TUNABLE_GET (tcache_max, size_t, TUNABLE_CALLBACK (set_tcache_max));
   TUNABLE_GET (tcache_count, size_t, TUNABLE_CALLBACK (set_tcache_count));
   TUNABLE_GET (tcache_unsorted_limit, size_t,
 	       TUNABLE_CALLBACK (set_tcache_unsorted_limit));
-#endif
-  __libc_lock_unlock (main_arena.mutex);
+# endif
 #else
   const char *s = NULL;
   if (__glibc_likely (_environ != NULL))
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 88cfd25766eba6787faeb7195d95b73d7a4637ab..162e423e7bd18a07e4e97dc618be406d8bc9c529 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -4410,12 +4410,7 @@  static void malloc_consolidate(mstate av)
   mchunkptr       bck;
   mchunkptr       fwd;
 
-  /*
-    If max_fast is 0, we know that av hasn't
-    yet been initialized, in which case do so below
-  */
-
-  if (get_max_fast () != 0) {
+  {
     atomic_store_relaxed (&av->have_fastchunks, false);
 
     unsorted_bin = unsorted_chunks(av);
@@ -4484,10 +4479,6 @@  static void malloc_consolidate(mstate av)
       }
     } while (fb++ != maxfb);
   }
-  else {
-    malloc_init_state(av);
-    check_malloc_state(av);
-  }
 }
 
 /*