[v5,4/7] malloc: tcache small optimizations.

Message ID 20250321120102.54012-5-cupertino.miranda@oracle.com (mailing list archive)
State Under Review
Delegated to: Wilco Dijkstra
Headers
Series tcache: malloc improvements |

Checks

Context Check Description
redhat-pt-bot/TryBot-apply_patch success Patch applied to master at the time it was sent
linaro-tcwg-bot/tcwg_glibc_build--master-arm success Build passed
linaro-tcwg-bot/tcwg_glibc_build--master-aarch64 success Build passed
linaro-tcwg-bot/tcwg_glibc_check--master-aarch64 success Test passed
linaro-tcwg-bot/tcwg_glibc_check--master-arm success Test passed

Commit Message

Cupertino Miranda March 21, 2025, noon UTC
  Replaced condition in tcache_available, now only checking if tc_idx is
smaller then TCACHE_MAX_BINS.
The original condition is not necessary as it is also checking that the
respective bin is not empty.
Only tcache_put will increase the bin counter value and in all calls for
tcache_put it verifies for valid tc_idx value.

__glibc_(un)likely macros were also introduced to better hint the
compiler for the most likely taken path.
---
 malloc/arena.c  |  2 +-
 malloc/malloc.c | 11 ++++++-----
 2 files changed, 7 insertions(+), 6 deletions(-)
  

Patch

diff --git a/malloc/arena.c b/malloc/arena.c
index 405ae829c0..3259de5468 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -262,7 +262,7 @@  static void tcache_key_initialize (void);
 static void
 ptmalloc_init (void)
 {
-  if (__malloc_initialized)
+  if (__glibc_likely (__malloc_initialized))
     return;
 
   __malloc_initialized = true;
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 7f35f48769..ea2da0c9fc 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -3212,8 +3212,8 @@  tcache_next (tcache_entry *e)
 static __always_inline bool
 tcache_available (size_t tc_idx)
 {
-  if (tc_idx < mp_.tcache_bins
-      && tcache != NULL
+  if (__glibc_likely (tc_idx < TCACHE_MAX_BINS)
+      && __glibc_likely (tcache != NULL)
       && tcache->counts[tc_idx] > 0)
     return true;
   else
@@ -3250,7 +3250,8 @@  tcache_free (mchunkptr p, INTERNAL_SIZE_T size)
 {
   bool done = false;
   size_t tc_idx = csize2tidx (size);
-  if (tcache != NULL && tc_idx < mp_.tcache_bins)
+  if (__glibc_likely (tcache != NULL)
+      && __glibc_likely (tc_idx < mp_.tcache_bins))
     {
       /* Check to see if it's already in the tcache.  */
       tcache_entry *e = (tcache_entry *) chunk2mem (p);
@@ -4200,8 +4201,8 @@  _int_malloc (mstate av, size_t bytes)
 #if USE_TCACHE
 	      /* Fill cache first, return to user only if cache fills.
 		 We may return one of these chunks later.  */
-	      if (tcache_nb > 0
-		  && tcache->counts[tc_idx] < mp_.tcache_count)
+	      if (__glibc_likely (tcache_nb > 0)
+		  && __glibc_likely (tcache->counts[tc_idx] < mp_.tcache_count))
 		{
 		  tcache_put (victim, tc_idx);
 		  return_cached = 1;