diff options
Diffstat (limited to 'malloc')
-rw-r--r-- | malloc/arena.c | 12 | ||||
-rw-r--r-- | malloc/malloc.c | 43 |
2 files changed, 31 insertions, 24 deletions
diff --git a/malloc/arena.c b/malloc/arena.c index 405ae82..5672c69 100644 --- a/malloc/arena.c +++ b/malloc/arena.c @@ -43,14 +43,14 @@ /* HEAP_MAX_SIZE should be larger than the huge page size, otherwise heaps will use not huge pages. It is a constant so arena_for_chunk() is efficient. */ -static inline size_t +static __always_inline size_t heap_min_size (void) { return mp_.hp_pagesize == 0 || mp_.hp_pagesize > HEAP_MAX_SIZE ? HEAP_MIN_SIZE : mp_.hp_pagesize; } -static inline size_t +static __always_inline size_t heap_max_size (void) { return HEAP_MAX_SIZE; @@ -141,14 +141,14 @@ static bool __malloc_initialized = false; /* find the heap and corresponding arena for a given ptr */ -static inline heap_info * +static __always_inline heap_info * heap_for_ptr (void *ptr) { size_t max_size = heap_max_size (); return PTR_ALIGN_DOWN (ptr, max_size); } -static inline struct malloc_state * +static __always_inline struct malloc_state * arena_for_chunk (mchunkptr ptr) { return chunk_main_arena (ptr) ? &main_arena : heap_for_ptr (ptr)->ar_ptr; @@ -232,8 +232,8 @@ __malloc_fork_unlock_child (void) } #define TUNABLE_CALLBACK_FNDECL(__name, __type) \ -static inline int do_ ## __name (__type value); \ -static void \ +static __always_inline int do_ ## __name (__type value); \ +static void \ TUNABLE_CALLBACK (__name) (tunable_val_t *valp) \ { \ __type value = (__type) (valp)->numval; \ diff --git a/malloc/malloc.c b/malloc/malloc.c index 0811061..a0bc733 100644 --- a/malloc/malloc.c +++ b/malloc/malloc.c @@ -1322,9 +1322,12 @@ nextchunk-> +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ value is less than PTRDIFF_T. Returns the requested size or MINSIZE in case the value is less than MINSIZE, or 0 if any of the previous checks fail. */ -static inline size_t +static __always_inline size_t checked_request2size (size_t req) __nonnull (1) { + _Static_assert (PTRDIFF_MAX <= SIZE_MAX / 2, + "PTRDIFF_MAX is not more than half of SIZE_MAX"); + if (__glibc_unlikely (req > PTRDIFF_MAX)) return 0; @@ -1782,7 +1785,7 @@ static uint8_t global_max_fast; global_max_fast = (((size_t) (s) <= MALLOC_ALIGN_MASK - SIZE_SZ) \ ? MIN_CHUNK_SIZE / 2 : ((s + SIZE_SZ) & ~MALLOC_ALIGN_MASK)) -static inline INTERNAL_SIZE_T +static __always_inline INTERNAL_SIZE_T get_max_fast (void) { /* Tell the GCC optimizers that global_max_fast is never larger @@ -3245,7 +3248,7 @@ tcache_double_free_verify (tcache_entry *e, size_t tc_idx) /* Try to free chunk to the tcache, if success return true. Caller must ensure that chunk and size are valid. */ -static inline bool +static __always_inline bool tcache_free (mchunkptr p, INTERNAL_SIZE_T size) { bool done = false; @@ -3380,26 +3383,17 @@ tcache_thread_shutdown (void) #endif /* !USE_TCACHE */ #if IS_IN (libc) -void * -__libc_malloc (size_t bytes) + +static void * __attribute_noinline__ +__libc_malloc2 (size_t bytes) { mstate ar_ptr; void *victim; - _Static_assert (PTRDIFF_MAX <= SIZE_MAX / 2, - "PTRDIFF_MAX is not more than half of SIZE_MAX"); - if (!__malloc_initialized) ptmalloc_init (); -#if USE_TCACHE - bool err = tcache_try_malloc (bytes, &victim); - - if (err) - return NULL; - if (victim) - return tag_new_usable (victim); -#endif + MAYBE_INIT_TCACHE (); if (SINGLE_THREAD_P) { @@ -3430,6 +3424,19 @@ __libc_malloc (size_t bytes) ar_ptr == arena_for_chunk (mem2chunk (victim))); return victim; } + +void * +__libc_malloc (size_t bytes) +{ +#if USE_TCACHE + size_t tc_idx = csize2tidx (checked_request2size (bytes)); + + if (tcache_available (tc_idx)) + return tag_new_usable (tcache_get (tc_idx)); +#endif + + return __libc_malloc2 (bytes); +} libc_hidden_def (__libc_malloc) void @@ -4553,7 +4560,7 @@ _int_malloc (mstate av, size_t bytes) ------------------------------ free ------------------------------ */ -static inline void +static __always_inline void _int_free_check (mstate av, mchunkptr p, INTERNAL_SIZE_T size) { /* Little security check which won't hurt performance: the @@ -4687,7 +4694,7 @@ _int_free_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T size, int have_lock) P has already been locked. It will perform sanity check, then try the fast path to free into tcache. If the attempt not success, free the chunk to arena. */ -static inline void +static __always_inline void _int_free (mstate av, mchunkptr p, int have_lock) { INTERNAL_SIZE_T size; /* its size */ |