[10/16] malloc: Change calloc when tagging is disabled

Message ID df63ab4745dd6a610a54c5e4f38011170e4fa21d.1614874816.git.szabolcs.nagy@arm.com
State Committed
Commit 9d61722b599a02a6d3caed1d75417a7f5e661693
Headers
Series memory tagging improvements |

Commit Message

Szabolcs Nagy March 4, 2021, 4:33 p.m. UTC
  When glibc is built with memory tagging support (USE_MTAG) but it is not
enabled at runtime (mtag_enabled) then unconditional memset was used
even though that can be often avoided.

This is for performance when tagging is supported but not enabled.
The extra check should have no overhead: tag_new_zero_region already
had a runtime check which the compiler can now optimize away.
---
 malloc/malloc.c | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)
  

Patch

diff --git a/malloc/malloc.c b/malloc/malloc.c
index 9002d51d7b..b1ee0f450b 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -3591,11 +3591,9 @@  __libc_calloc (size_t n, size_t elem_size)
   mchunkptr oldtop;
   INTERNAL_SIZE_T sz, oldtopsize;
   void *mem;
-#ifndef USE_MTAG
   unsigned long clearsize;
   unsigned long nclears;
   INTERNAL_SIZE_T *d;
-#endif
   ptrdiff_t bytes;
 
   if (__glibc_unlikely (__builtin_mul_overflow (n, elem_size, &bytes)))
@@ -3674,12 +3672,13 @@  __libc_calloc (size_t n, size_t elem_size)
     return 0;
 
   mchunkptr p = mem2chunk (mem);
+
   /* If we are using memory tagging, then we need to set the tags
      regardless of MORECORE_CLEARS, so we zero the whole block while
      doing so.  */
-#ifdef USE_MTAG
-  return tag_new_zero_region (mem, CHUNK_AVAILABLE_SIZE (p) - CHUNK_HDR_SZ);
-#else
+  if (__glibc_unlikely (mtag_enabled))
+    return tag_new_zero_region (mem, CHUNK_AVAILABLE_SIZE (p) - CHUNK_HDR_SZ);
+
   INTERNAL_SIZE_T csz = chunksize (p);
 
   /* Two optional cases in which clearing not necessary */
@@ -3733,7 +3732,6 @@  __libc_calloc (size_t n, size_t elem_size)
     }
 
   return mem;
-#endif
 }
 
 /*