[3/6] malloc: Inline int_free_check
Checks
Context |
Check |
Description |
redhat-pt-bot/TryBot-apply_patch |
success
|
Patch applied to master at the time it was sent
|
linaro-tcwg-bot/tcwg_glibc_build--master-arm |
success
|
Build passed
|
linaro-tcwg-bot/tcwg_glibc_build--master-aarch64 |
success
|
Build passed
|
linaro-tcwg-bot/tcwg_glibc_check--master-arm |
success
|
Test passed
|
linaro-tcwg-bot/tcwg_glibc_check--master-aarch64 |
success
|
Test passed
|
Commit Message
Inline int_free_check since it is only used by __libc_free.
---
Comments
On 31/03/25 09:57, Wilco Dijkstra wrote:
> Inline int_free_check since it is only used by __libc_free.
LGTM, thanks.
Reviewed-by: Adhemerval Zanella <adhemerval.zanella@linaro.org>
>
> ---
>
> diff --git a/malloc/malloc.c b/malloc/malloc.c
> index 5a1a65175cbe6715c67a8cc2e26bee64d78aac97..24a2d50ec845d379e7eae0a2360cb669a06504ea 100644
> --- a/malloc/malloc.c
> +++ b/malloc/malloc.c
> @@ -1086,7 +1086,6 @@ typedef struct malloc_chunk* mchunkptr;
> /* Internal routines. */
>
> static void* _int_malloc(mstate, size_t);
> -static void _int_free_check (mstate, mchunkptr, INTERNAL_SIZE_T);
> static void _int_free_chunk (mstate, mchunkptr, INTERNAL_SIZE_T, int);
> static void _int_free_merge_chunk (mstate, mchunkptr, INTERNAL_SIZE_T);
> static INTERNAL_SIZE_T _int_free_create_chunk (mstate,
> @@ -3466,7 +3465,18 @@ __libc_free (void *mem)
>
> INTERNAL_SIZE_T size = chunksize (p);
>
> - _int_free_check (arena_for_chunk (p), p, size);
> + /* Little security check which won't hurt performance: the
> + allocator never wraps around at the end of the address space.
> + Therefore we can exclude some size values which might appear
> + here by accident or by "design" from some intruder. */
> + if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
> + || __builtin_expect (misaligned_chunk (p), 0))
> + malloc_printerr ("free(): invalid pointer");
Maybe since you are touching code here, use __glibc_unlikely here.
> + /* We know that each chunk is at least MINSIZE bytes. */
> + if (__glibc_unlikely (size < MINSIZE))
> + malloc_printerr ("free(): invalid size");
> +
> + check_inuse_chunk (arena_for_chunk (p), p);
>
> #if USE_TCACHE
> if (tcache_free (p, size))
> @@ -4550,23 +4560,6 @@ _int_malloc (mstate av, size_t bytes)
> ------------------------------ free ------------------------------
> */
>
> -static __always_inline void
> -_int_free_check (mstate av, mchunkptr p, INTERNAL_SIZE_T size)
> -{
> - /* Little security check which won't hurt performance: the
> - allocator never wraps around at the end of the address space.
> - Therefore we can exclude some size values which might appear
> - here by accident or by "design" from some intruder. */
> - if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
> - || __builtin_expect (misaligned_chunk (p), 0))
> - malloc_printerr ("free(): invalid pointer");
> - /* We know that each chunk is at least MINSIZE bytes. */
> - if (__glibc_unlikely (size < MINSIZE))
> - malloc_printerr ("free(): invalid size");
> -
> - check_inuse_chunk (av, p);
> -}
> -
> /* Free chunk P of SIZE bytes to the arena. HAVE_LOCK indicates where
> the arena for P has already been locked. Caller must ensure chunk
> and size are valid. */
>
@@ -1086,7 +1086,6 @@ typedef struct malloc_chunk* mchunkptr;
/* Internal routines. */
static void* _int_malloc(mstate, size_t);
-static void _int_free_check (mstate, mchunkptr, INTERNAL_SIZE_T);
static void _int_free_chunk (mstate, mchunkptr, INTERNAL_SIZE_T, int);
static void _int_free_merge_chunk (mstate, mchunkptr, INTERNAL_SIZE_T);
static INTERNAL_SIZE_T _int_free_create_chunk (mstate,
@@ -3466,7 +3465,18 @@ __libc_free (void *mem)
INTERNAL_SIZE_T size = chunksize (p);
- _int_free_check (arena_for_chunk (p), p, size);
+ /* Little security check which won't hurt performance: the
+ allocator never wraps around at the end of the address space.
+ Therefore we can exclude some size values which might appear
+ here by accident or by "design" from some intruder. */
+ if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
+ || __builtin_expect (misaligned_chunk (p), 0))
+ malloc_printerr ("free(): invalid pointer");
+ /* We know that each chunk is at least MINSIZE bytes. */
+ if (__glibc_unlikely (size < MINSIZE))
+ malloc_printerr ("free(): invalid size");
+
+ check_inuse_chunk (arena_for_chunk (p), p);
#if USE_TCACHE
if (tcache_free (p, size))
@@ -4550,23 +4560,6 @@ _int_malloc (mstate av, size_t bytes)
------------------------------ free ------------------------------
*/
-static __always_inline void
-_int_free_check (mstate av, mchunkptr p, INTERNAL_SIZE_T size)
-{
- /* Little security check which won't hurt performance: the
- allocator never wraps around at the end of the address space.
- Therefore we can exclude some size values which might appear
- here by accident or by "design" from some intruder. */
- if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
- || __builtin_expect (misaligned_chunk (p), 0))
- malloc_printerr ("free(): invalid pointer");
- /* We know that each chunk is at least MINSIZE bytes. */
- if (__glibc_unlikely (size < MINSIZE))
- malloc_printerr ("free(): invalid size");
-
- check_inuse_chunk (av, p);
-}
-
/* Free chunk P of SIZE bytes to the arena. HAVE_LOCK indicates where
the arena for P has already been locked. Caller must ensure chunk
and size are valid. */