From 1e2c9ef8d6778669657a057979f2a7049012e879 Mon Sep 17 00:00:00 2001 From: Jason Evans Date: Sun, 26 Feb 2017 12:58:15 -0800 Subject: Fix huge-aligned allocation. This regression was caused by b9408d77a63a54fd331f9b81c884f68e6d57f2e5 (Fix/simplify chunk_recycle() allocation size computations.). This resolves #647. --- Makefile.in | 1 + include/jemalloc/internal/extent.h | 5 ++ include/jemalloc/internal/private_symbols.txt | 2 + src/chunk.c | 9 ++- src/extent.c | 37 +++++++--- test/unit/extent_quantize.c | 98 +++++++++++++++++++++++++++ 6 files changed, 141 insertions(+), 11 deletions(-) create mode 100644 test/unit/extent_quantize.c diff --git a/Makefile.in b/Makefile.in index 675e4cb..8f1fb55 100644 --- a/Makefile.in +++ b/Makefile.in @@ -156,6 +156,7 @@ TESTS_UNIT := \ $(srcroot)test/unit/bitmap.c \ $(srcroot)test/unit/ckh.c \ $(srcroot)test/unit/decay.c \ + $(srcroot)test/unit/extent_quantize.c \ $(srcroot)test/unit/fork.c \ $(srcroot)test/unit/hash.c \ $(srcroot)test/unit/junk.c \ diff --git a/include/jemalloc/internal/extent.h b/include/jemalloc/internal/extent.h index 168ffe6..fc77f9f 100644 --- a/include/jemalloc/internal/extent.h +++ b/include/jemalloc/internal/extent.h @@ -75,6 +75,11 @@ typedef rb_tree(extent_node_t) extent_tree_t; /******************************************************************************/ #ifdef JEMALLOC_H_EXTERNS +#ifdef JEMALLOC_JET +size_t extent_size_quantize_floor(size_t size); +#endif +size_t extent_size_quantize_ceil(size_t size); + rb_proto(, extent_tree_szsnad_, extent_tree_t, extent_node_t) rb_proto(, extent_tree_ad_, extent_tree_t, extent_node_t) diff --git a/include/jemalloc/internal/private_symbols.txt b/include/jemalloc/internal/private_symbols.txt index 8a9e32f..a83d984 100644 --- a/include/jemalloc/internal/private_symbols.txt +++ b/include/jemalloc/internal/private_symbols.txt @@ -223,6 +223,8 @@ extent_node_sn_get extent_node_sn_set extent_node_zeroed_get extent_node_zeroed_set +extent_size_quantize_ceil +extent_size_quantize_floor extent_tree_ad_destroy extent_tree_ad_destroy_recurse extent_tree_ad_empty diff --git a/src/chunk.c b/src/chunk.c index de3bf4c..94f28f2 100644 --- a/src/chunk.c +++ b/src/chunk.c @@ -188,12 +188,17 @@ chunk_deregister(const void *chunk, const extent_node_t *node) static extent_node_t * chunk_first_best_fit(arena_t *arena, extent_tree_t *chunks_szsnad, size_t size) { + extent_node_t *node; + size_t qsize; extent_node_t key; assert(size == CHUNK_CEILING(size)); - extent_node_init(&key, arena, NULL, size, 0, false, false); - return (extent_tree_szsnad_nsearch(chunks_szsnad, &key)); + qsize = extent_size_quantize_ceil(size); + extent_node_init(&key, arena, NULL, qsize, 0, false, false); + node = extent_tree_szsnad_nsearch(chunks_szsnad, &key); + assert(node == NULL || extent_node_size_get(node) >= size); + return node; } static void * diff --git a/src/extent.c b/src/extent.c index 218156c..ff8de2f 100644 --- a/src/extent.c +++ b/src/extent.c @@ -3,13 +3,11 @@ /******************************************************************************/ -/* - * Round down to the nearest chunk size that can actually be requested during - * normal huge allocation. - */ -JEMALLOC_INLINE_C size_t -extent_quantize(size_t size) -{ +#ifndef JEMALLOC_JET +static +#endif +size_t +extent_size_quantize_floor(size_t size) { size_t ret; szind_t ind; @@ -25,11 +23,32 @@ extent_quantize(size_t size) return (ret); } +size_t +extent_size_quantize_ceil(size_t size) { + size_t ret; + + assert(size > 0); + + ret = extent_size_quantize_floor(size); + if (ret < size) { + /* + * Skip a quantization that may have an adequately large extent, + * because under-sized extents may be mixed in. This only + * happens when an unusual size is requested, i.e. for aligned + * allocation, and is just one of several places where linear + * search would potentially find sufficiently aligned available + * memory somewhere lower. + */ + ret = index2size(size2index(ret + 1)); + } + return ret; +} + JEMALLOC_INLINE_C int extent_sz_comp(const extent_node_t *a, const extent_node_t *b) { - size_t a_qsize = extent_quantize(extent_node_size_get(a)); - size_t b_qsize = extent_quantize(extent_node_size_get(b)); + size_t a_qsize = extent_size_quantize_floor(extent_node_size_get(a)); + size_t b_qsize = extent_size_quantize_floor(extent_node_size_get(b)); return ((a_qsize > b_qsize) - (a_qsize < b_qsize)); } diff --git a/test/unit/extent_quantize.c b/test/unit/extent_quantize.c new file mode 100644 index 0000000..d2eb6d7 --- /dev/null +++ b/test/unit/extent_quantize.c @@ -0,0 +1,98 @@ +#include "test/jemalloc_test.h" + +TEST_BEGIN(test_huge_extent_size) { + unsigned nhchunks, i; + size_t sz, extent_size_prev, ceil_prev; + size_t mib[4]; + size_t miblen = sizeof(mib) / sizeof(size_t); + + /* + * Iterate over all huge size classes, get their extent sizes, and + * verify that the quantized size is the same as the extent size. + */ + + sz = sizeof(unsigned); + assert_d_eq(mallctl("arenas.nhchunks", (void *)&nhchunks, &sz, NULL, + 0), 0, "Unexpected mallctl failure"); + + assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0, + "Unexpected mallctlnametomib failure"); + for (i = 0; i < nhchunks; i++) { + size_t extent_size, floor, ceil; + + + mib[2] = i; + sz = sizeof(size_t); + assert_d_eq(mallctlbymib(mib, miblen, (void *)&extent_size, + &sz, NULL, 0), 0, "Unexpected mallctlbymib failure"); + floor = extent_size_quantize_floor(extent_size); + ceil = extent_size_quantize_ceil(extent_size); + + assert_zu_eq(extent_size, floor, + "Extent quantization should be a no-op for precise size " + "(extent_size=%zu)", extent_size); + assert_zu_eq(extent_size, ceil, + "Extent quantization should be a no-op for precise size " + "(extent_size=%zu)", extent_size); + + if (i > 0) { + assert_zu_eq(extent_size_prev, + extent_size_quantize_floor(extent_size - PAGE), + "Floor should be a precise size"); + if (extent_size_prev < ceil_prev) { + assert_zu_eq(ceil_prev, extent_size, + "Ceiling should be a precise size " + "(extent_size_prev=%zu, ceil_prev=%zu, " + "extent_size=%zu)", extent_size_prev, + ceil_prev, extent_size); + } + } + if (i + 1 < nhchunks) { + extent_size_prev = floor; + ceil_prev = extent_size_quantize_ceil(extent_size + + PAGE); + } + } +} +TEST_END + +TEST_BEGIN(test_monotonic) { +#define SZ_MAX ZU(4 * 1024 * 1024) + unsigned i; + size_t floor_prev, ceil_prev; + + floor_prev = 0; + ceil_prev = 0; + for (i = 1; i <= SZ_MAX >> LG_PAGE; i++) { + size_t extent_size, floor, ceil; + + extent_size = i << LG_PAGE; + floor = extent_size_quantize_floor(extent_size); + ceil = extent_size_quantize_ceil(extent_size); + + assert_zu_le(floor, extent_size, + "Floor should be <= (floor=%zu, extent_size=%zu, ceil=%zu)", + floor, extent_size, ceil); + assert_zu_ge(ceil, extent_size, + "Ceiling should be >= (floor=%zu, extent_size=%zu, " + "ceil=%zu)", floor, extent_size, ceil); + + assert_zu_le(floor_prev, floor, "Floor should be monotonic " + "(floor_prev=%zu, floor=%zu, extent_size=%zu, ceil=%zu)", + floor_prev, floor, extent_size, ceil); + assert_zu_le(ceil_prev, ceil, "Ceiling should be monotonic " + "(floor=%zu, extent_size=%zu, ceil_prev=%zu, ceil=%zu)", + floor, extent_size, ceil_prev, ceil); + + floor_prev = floor; + ceil_prev = ceil; + } +} +TEST_END + +int +main(void) { + return test( + test_huge_extent_size, + test_monotonic); +} -- cgit v0.12