[5/6] malloc: Remove unnecessary tagging around _mid_memalign

Message ID 7203bf2d0d1405daa5d1c62744abf8ceee85b5bb.1616155129.git.szabolcs.nagy@arm.com
State Committed
Commit 05f878c58e53370a76c2b82679899936bc69c714
Headers
Series malloc: more memory tagging optimizations |

Commit Message

Szabolcs Nagy March 19, 2021, 1:27 p.m. UTC
  The internal _mid_memalign already returns newly tagged memory.
(__libc_memalign and posix_memalign already relied on this, this
patch fixes the other call sites.)
---
 malloc/malloc.c | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)
  

Patch

diff --git a/malloc/malloc.c b/malloc/malloc.c
index 6f87b7bdb1..cb1837d0d7 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -3553,22 +3553,17 @@  libc_hidden_def (__libc_memalign)
 void *
 __libc_valloc (size_t bytes)
 {
-  void *p;
-
   if (__malloc_initialized < 0)
     ptmalloc_init ();
 
   void *address = RETURN_ADDRESS (0);
   size_t pagesize = GLRO (dl_pagesize);
-  p = _mid_memalign (pagesize, bytes, address);
-  return tag_new_usable (p);
+  return _mid_memalign (pagesize, bytes, address);
 }
 
 void *
 __libc_pvalloc (size_t bytes)
 {
-  void *p;
-
   if (__malloc_initialized < 0)
     ptmalloc_init ();
 
@@ -3585,8 +3580,7 @@  __libc_pvalloc (size_t bytes)
     }
   rounded_bytes = rounded_bytes & -(pagesize - 1);
 
-  p = _mid_memalign (pagesize, rounded_bytes, address);
-  return tag_new_usable (p);
+  return _mid_memalign (pagesize, rounded_bytes, address);
 }
 
 void *