aboutsummaryrefslogtreecommitdiff
path: root/malloc/arena.c
diff options
context:
space:
mode:
authorCarlos O'Donell <carlos@systemhalted.org>2015-02-17 19:25:01 -0500
committerCarlos O'Donell <carlos@systemhalted.org>2015-02-17 19:29:15 -0500
commit8a35c3fe122d49ba76dff815b3537affb5a50b45 (patch)
tree37fba229a2f0557ffdfd8ee431f499888aea4a16 /malloc/arena.c
parent1a2325c06cf309d1d8b4aafcfb1a3d43905baf9b (diff)
downloadglibc-8a35c3fe122d49ba76dff815b3537affb5a50b45.zip
glibc-8a35c3fe122d49ba76dff815b3537affb5a50b45.tar.gz
glibc-8a35c3fe122d49ba76dff815b3537affb5a50b45.tar.bz2
Use alignment macros, pagesize and powerof2.
We are replacing all of the bespoke alignment code with ALIGN_UP, ALIGN_DOWN, PTR_ALIGN_UP, and PTR_ALIGN_DOWN. This cleans up malloc/malloc.c, malloc/arena.c, and elf/dl-reloc.c. It also makes all the code consistently use pagesize, and powerof2 as required. Code size is reduced with the removal of precomputed pagemask, and use of pagesize instead. No measurable difference in performance. No regressions on x86_64.
Diffstat (limited to 'malloc/arena.c')
-rw-r--r--malloc/arena.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/malloc/arena.c b/malloc/arena.c
index 886defb..8af51f0 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -510,7 +510,7 @@ static heap_info *
internal_function
new_heap (size_t size, size_t top_pad)
{
- size_t page_mask = GLRO (dl_pagesize) - 1;
+ size_t pagesize = GLRO (dl_pagesize);
char *p1, *p2;
unsigned long ul;
heap_info *h;
@@ -523,7 +523,7 @@ new_heap (size_t size, size_t top_pad)
return 0;
else
size = HEAP_MAX_SIZE;
- size = (size + page_mask) & ~page_mask;
+ size = ALIGN_UP (size, pagesize);
/* A memory region aligned to a multiple of HEAP_MAX_SIZE is needed.
No swap space needs to be reserved for the following large
@@ -588,10 +588,10 @@ new_heap (size_t size, size_t top_pad)
static int
grow_heap (heap_info *h, long diff)
{
- size_t page_mask = GLRO (dl_pagesize) - 1;
+ size_t pagesize = GLRO (dl_pagesize);
long new_size;
- diff = (diff + page_mask) & ~page_mask;
+ diff = ALIGN_UP (diff, pagesize);
new_size = (long) h->size + diff;
if ((unsigned long) new_size > (unsigned long) HEAP_MAX_SIZE)
return -1;