aboutsummaryrefslogtreecommitdiff
path: root/malloc/arena.c
diff options
context:
space:
mode:
Diffstat (limited to 'malloc/arena.c')
-rw-r--r--malloc/arena.c33
1 files changed, 8 insertions, 25 deletions
diff --git a/malloc/arena.c b/malloc/arena.c
index 405ae82..90c526f 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -43,14 +43,14 @@
/* HEAP_MAX_SIZE should be larger than the huge page size, otherwise heaps will
use not huge pages. It is a constant so arena_for_chunk() is efficient. */
-static inline size_t
+static __always_inline size_t
heap_min_size (void)
{
return mp_.hp_pagesize == 0 || mp_.hp_pagesize > HEAP_MAX_SIZE
? HEAP_MIN_SIZE : mp_.hp_pagesize;
}
-static inline size_t
+static __always_inline size_t
heap_max_size (void)
{
return HEAP_MAX_SIZE;
@@ -113,9 +113,6 @@ static mstate free_list;
acquired. */
__libc_lock_define_initialized (static, list_lock);
-/* Already initialized? */
-static bool __malloc_initialized = false;
-
/**************************************************************************/
@@ -141,14 +138,14 @@ static bool __malloc_initialized = false;
/* find the heap and corresponding arena for a given ptr */
-static inline heap_info *
+static __always_inline heap_info *
heap_for_ptr (void *ptr)
{
size_t max_size = heap_max_size ();
return PTR_ALIGN_DOWN (ptr, max_size);
}
-static inline struct malloc_state *
+static __always_inline struct malloc_state *
arena_for_chunk (mchunkptr ptr)
{
return chunk_main_arena (ptr) ? &main_arena : heap_for_ptr (ptr)->ar_ptr;
@@ -168,9 +165,6 @@ arena_for_chunk (mchunkptr ptr)
void
__malloc_fork_lock_parent (void)
{
- if (!__malloc_initialized)
- return;
-
/* We do not acquire free_list_lock here because we completely
reconstruct free_list in __malloc_fork_unlock_child. */
@@ -188,9 +182,6 @@ __malloc_fork_lock_parent (void)
void
__malloc_fork_unlock_parent (void)
{
- if (!__malloc_initialized)
- return;
-
for (mstate ar_ptr = &main_arena;; )
{
__libc_lock_unlock (ar_ptr->mutex);
@@ -204,9 +195,6 @@ __malloc_fork_unlock_parent (void)
void
__malloc_fork_unlock_child (void)
{
- if (!__malloc_initialized)
- return;
-
/* Push all arenas to the free list, except thread_arena, which is
attached to the current thread. */
__libc_lock_init (free_list_lock);
@@ -232,8 +220,8 @@ __malloc_fork_unlock_child (void)
}
#define TUNABLE_CALLBACK_FNDECL(__name, __type) \
-static inline int do_ ## __name (__type value); \
-static void \
+static __always_inline int do_ ## __name (__type value); \
+static void \
TUNABLE_CALLBACK (__name) (tunable_val_t *valp) \
{ \
__type value = (__type) (valp)->numval; \
@@ -259,14 +247,9 @@ TUNABLE_CALLBACK_FNDECL (set_hugetlb, size_t)
static void tcache_key_initialize (void);
#endif
-static void
-ptmalloc_init (void)
+void
+__ptmalloc_init (void)
{
- if (__malloc_initialized)
- return;
-
- __malloc_initialized = true;
-
#if USE_TCACHE
tcache_key_initialize ();
#endif