aboutsummaryrefslogtreecommitdiff
path: root/malloc/arena.c
diff options
context:
space:
mode:
authorSiddhesh Poyarekar <siddhesh@sourceware.org>2021-07-22 18:38:08 +0530
committerSiddhesh Poyarekar <siddhesh@sourceware.org>2021-07-22 18:38:08 +0530
commitb5bd5bfe88f496463ec9fab680a8edf64d7c2a42 (patch)
tree51978efe075143c64fcd622b051faa49572928db /malloc/arena.c
parent9dad716d4d2993f50b165747781244bd7c43bc95 (diff)
downloadglibc-b5bd5bfe88f496463ec9fab680a8edf64d7c2a42.zip
glibc-b5bd5bfe88f496463ec9fab680a8edf64d7c2a42.tar.gz
glibc-b5bd5bfe88f496463ec9fab680a8edf64d7c2a42.tar.bz2
glibc.malloc.check: Wean away from malloc hooks
The malloc-check debugging feature is tightly integrated into glibc malloc, so thanks to an idea from Florian Weimer, much of the malloc implementation has been moved into libc_malloc_debug.so to support malloc-check. Due to this, glibc malloc and malloc-check can no longer work together; they use altogether different (but identical) structures for heap management. This should not make a difference though since the malloc check hook is not disabled anywhere. malloc_set_state does, but it does so early enough that it shouldn't cause any problems. The malloc check tunable is now in the debug DSO and has no effect when the DSO is not preloaded. Reviewed-by: Carlos O'Donell <carlos@redhat.com> Tested-by: Carlos O'Donell <carlos@redhat.com>
Diffstat (limited to 'malloc/arena.c')
-rw-r--r--malloc/arena.c17
1 files changed, 5 insertions, 12 deletions
diff --git a/malloc/arena.c b/malloc/arena.c
index 840426f..edcaa88 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -79,7 +79,9 @@ static __thread mstate thread_arena attribute_tls_model_ie;
acquired after free_list_lock has been acquired. */
__libc_lock_define_initialized (static, free_list_lock);
+#if IS_IN (libc)
static size_t narenas = 1;
+#endif
static mstate free_list;
/* list_lock prevents concurrent writes to the next member of struct
@@ -207,14 +209,6 @@ __malloc_fork_unlock_child (void)
}
#if HAVE_TUNABLES
-static void
-TUNABLE_CALLBACK (set_mallopt_check) (tunable_val_t *valp)
-{
- int32_t value = (int32_t) valp->numval;
- if (value != 0)
- __malloc_check_init ();
-}
-
# define TUNABLE_CALLBACK_FNDECL(__name, __type) \
static inline int do_ ## __name (__type value); \
static void \
@@ -309,7 +303,7 @@ ptmalloc_init (void)
}
#endif
-#ifdef SHARED
+#if defined SHARED && IS_IN (libc)
/* In case this libc copy is in a non-default namespace, never use
brk. Likewise if dlopened from statically linked program. The
generic sbrk implementation also enforces this, but it is not
@@ -323,7 +317,6 @@ ptmalloc_init (void)
malloc_init_state (&main_arena);
#if HAVE_TUNABLES
- TUNABLE_GET (check, int32_t, TUNABLE_CALLBACK (set_mallopt_check));
TUNABLE_GET (top_pad, size_t, TUNABLE_CALLBACK (set_top_pad));
TUNABLE_GET (perturb, int32_t, TUNABLE_CALLBACK (set_perturb_byte));
TUNABLE_GET (mmap_threshold, size_t, TUNABLE_CALLBACK (set_mmap_threshold));
@@ -401,8 +394,6 @@ ptmalloc_init (void)
}
}
}
- if (s && s[0] != '\0' && s[0] != '0')
- __malloc_check_init ();
#endif
}
@@ -672,6 +663,7 @@ heap_trim (heap_info *heap, size_t pad)
/* Create a new arena with initial size "size". */
+#if IS_IN (libc)
/* If REPLACED_ARENA is not NULL, detach it from this thread. Must be
called while free_list_lock is held. */
static void
@@ -947,6 +939,7 @@ arena_get_retry (mstate ar_ptr, size_t bytes)
return ar_ptr;
}
+#endif
void
__malloc_arena_thread_freeres (void)