[v6,3/4] Reduce CAS in malloc spinlocks
Checks
Context |
Check |
Description |
dj/TryBot-apply_patch |
success
|
Patch applied to master at the time it was sent
|
Commit Message
Do an atomic load and check if compare may fail. Skip CAS and spin if
compare may fail to reduce cache line bouncing on contended locks.
---
malloc/arena.c | 5 +++++
malloc/malloc.c | 10 ++++++++++
2 files changed, 15 insertions(+)
@@ -899,6 +899,11 @@ arena_get2 (size_t size, mstate avoid_arena)
enough address space to create that many arenas. */
if (__glibc_unlikely (n <= narenas_limit - 1))
{
+ if (atomic_load_relaxed (&narenas) != n)
+ {
+ atomic_spin_nop ();
+ goto repeat;
+ }
if (catomic_compare_and_exchange_bool_acq (&narenas, n + 1, n))
goto repeat;
a = _int_new_arena (size);
@@ -3717,6 +3717,11 @@ _int_malloc (mstate av, size_t bytes)
pp = REVEAL_PTR (victim->fd); \
if (__glibc_unlikely (pp != NULL && misaligned_chunk (pp))) \
malloc_printerr ("malloc(): unaligned fastbin chunk detected"); \
+ if (atomic_load_relaxed (fb) != victim) \
+ { \
+ atomic_spin_nop (); \
+ continue; \
+ } \
} \
while ((pp = catomic_compare_and_exchange_val_acq (fb, pp, victim)) \
!= victim); \
@@ -4435,6 +4440,11 @@ _int_free (mstate av, mchunkptr p, int have_lock)
malloc_printerr ("double free or corruption (fasttop)");
old2 = old;
p->fd = PROTECT_PTR (&p->fd, old);
+ if (atomic_load_relaxed (fb) != old2)
+ {
+ atomic_spin_nop ();
+ continue;
+ }
}
while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2))
!= old2);