summaryrefslogtreecommitdiff
path: root/malloc
diff options
context:
space:
mode:
Diffstat (limited to 'malloc')
-rw-r--r--malloc/arena.c6
-rw-r--r--malloc/malloc.c23
2 files changed, 27 insertions, 2 deletions
diff --git a/malloc/arena.c b/malloc/arena.c
index 6f0a9df8f1..6f4b0c497b 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -24,7 +24,11 @@
#define HEAP_MIN_SIZE (32*1024)
#ifndef HEAP_MAX_SIZE
-#define HEAP_MAX_SIZE (1024*1024) /* must be a power of two */
+# ifdef DEFAULT_MMAP_THRESHOLD_MAX
+# define HEAP_MAX_SIZE (2 * DEFAULT_MMAP_THRESHOLD_MAX)
+# else
+# define HEAP_MAX_SIZE (1024*1024) /* must be a power of two */
+# endif
#endif
/* HEAP_MIN_SIZE and HEAP_MAX_SIZE limit the size of mmap()ed heaps
diff --git a/malloc/malloc.c b/malloc/malloc.c
index 890d3669e2..02f659708d 100644
--- a/malloc/malloc.c
+++ b/malloc/malloc.c
@@ -259,6 +259,7 @@
#ifdef _LIBC
#include <stdio-common/_itoa.h>
+#include <bits/wordsize.h>
#endif
#ifdef __cplusplus
@@ -1424,7 +1425,15 @@ int __posix_memalign(void **, size_t, size_t);
#endif
#ifndef DEFAULT_MMAP_THRESHOLD_MAX
-#define DEFAULT_MMAP_THRESHOLD_MAX (8 * 1024 * 1024 * sizeof(long))
+ /* For 32-bit platforms we cannot increase the maximum mmap
+ threshold much because it is also the minimum value for the
+ maximum heap size and its alignment. Going above 512k (i.e., 1M
+ for new heaps) wastes too much address space. */
+# if __WORDSIZE == 32
+# define DEFAULT_MMAP_THRESHOLD_MAX (512 * 1024)
+# else
+# define DEFAULT_MMAP_THRESHOLD_MAX (4 * 1024 * 1024 * sizeof(long))
+# endif
#endif
/*
@@ -2867,6 +2876,7 @@ static Void_t* sYSMALLOc(nb, av) INTERNAL_SIZE_T nb; mstate av;
char* mm; /* return value from mmap call*/
+ try_mmap:
/*
Round up size to nearest page. For mmapped chunks, the overhead
is one SIZE_SZ unit larger than for normal chunks, because there
@@ -2996,6 +3006,9 @@ static Void_t* sYSMALLOc(nb, av) INTERNAL_SIZE_T nb; mstate av;
set_foot(old_top, (old_size + 2*SIZE_SZ));
}
}
+ else
+ /* We can at least try to use to mmap memory. */
+ goto try_mmap;
} else { /* av == main_arena */
@@ -4055,6 +4068,8 @@ _int_malloc(mstate av, size_t bytes)
for(;;) {
+ int iters = 0;
+ bool any_larger = false;
while ( (victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) {
bck = victim->bk;
if (__builtin_expect (victim->size <= 2 * SIZE_SZ, 0)
@@ -4150,6 +4165,12 @@ _int_malloc(mstate av, size_t bytes)
victim->fd = fwd;
fwd->bk = victim;
bck->fd = victim;
+
+ if (size >= nb)
+ any_larger = true;
+#define MAX_ITERS 10000
+ if (++iters >= MAX_ITERS)
+ break;
}
/*