diff options
author | Jason Evans <je@fb.com> | 2015-03-26 01:56:55 (GMT) |
---|---|---|
committer | Jason Evans <je@fb.com> | 2015-03-26 02:10:06 (GMT) |
commit | 65db63cf3f0c5dd5126a1b3786756486eaf931ba (patch) | |
tree | 10f598324c157723127e2cd90850a7e787f2f854 /src | |
parent | 562d266511053a51406e91c78eba640cb46ad9c8 (diff) | |
download | jemalloc-65db63cf3f0c5dd5126a1b3786756486eaf931ba.zip jemalloc-65db63cf3f0c5dd5126a1b3786756486eaf931ba.tar.gz jemalloc-65db63cf3f0c5dd5126a1b3786756486eaf931ba.tar.bz2 |
Fix in-place shrinking huge reallocation purging bugs.
Fix the shrinking case of huge_ralloc_no_move_similar() to purge the
correct number of pages, at the correct offset. This regression was
introduced by 8d6a3e8321a7767cb2ca0930b85d5d488a8cc659 (Implement
dynamic per arena control over dirty page purging.).
Fix huge_ralloc_no_move_shrink() to purge the correct number of pages.
This bug was introduced by 9673983443a0782d975fbcb5d8457cfd411b8b56
(Purge/zero sub-chunk huge allocations as necessary.).
Diffstat (limited to 'src')
-rw-r--r-- | src/arena.c | 7 | ||||
-rw-r--r-- | src/huge.c | 31 |
2 files changed, 17 insertions, 21 deletions
diff --git a/src/arena.c b/src/arena.c index bc13d20..3041068 100644 --- a/src/arena.c +++ b/src/arena.c @@ -1245,16 +1245,11 @@ arena_purge_stashed(arena_t *arena, if (rdelm == &chunkselm->rd) { size_t size = extent_node_size_get(chunkselm); - void *addr, *chunk; - size_t offset; bool unzeroed; npages = size >> LG_PAGE; - addr = extent_node_addr_get(chunkselm); - chunk = CHUNK_ADDR2BASE(addr); - offset = CHUNK_ADDR2OFFSET(addr); unzeroed = chunk_purge_wrapper(arena, chunk_purge, - chunk, offset, size); + extent_node_addr_get(chunkselm), 0, size); extent_node_zeroed_set(chunkselm, !unzeroed); chunkselm = qr_next(chunkselm, cc_link); } else { @@ -145,12 +145,11 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize, /* Fill if necessary (shrinking). */ if (oldsize > usize) { - size_t sdiff = CHUNK_CEILING(usize) - usize; - zeroed = (sdiff != 0) ? !chunk_purge_wrapper(arena, chunk_purge, - CHUNK_ADDR2BASE(ptr), CHUNK_ADDR2OFFSET(ptr), usize) : true; + size_t sdiff = oldsize - usize; + zeroed = !chunk_purge_wrapper(arena, chunk_purge, ptr, usize, + sdiff); if (config_fill && unlikely(opt_junk_free)) { - memset((void *)((uintptr_t)ptr + usize), 0x5a, oldsize - - usize); + memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff); zeroed = false; } } else @@ -186,7 +185,6 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize) extent_node_t *node; arena_t *arena; chunk_purge_t *chunk_purge; - size_t sdiff; bool zeroed; node = huge_node_get(ptr); @@ -196,15 +194,18 @@ huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize) chunk_purge = arena->chunk_purge; malloc_mutex_unlock(&arena->lock); - sdiff = CHUNK_CEILING(usize) - usize; - zeroed = (sdiff != 0) ? !chunk_purge_wrapper(arena, chunk_purge, - CHUNK_ADDR2BASE((uintptr_t)ptr + usize), - CHUNK_ADDR2OFFSET((uintptr_t)ptr + usize), sdiff) : true; - if (config_fill && unlikely(opt_junk_free)) { - huge_dalloc_junk((void *)((uintptr_t)ptr + usize), oldsize - - usize); - zeroed = false; - } + if (oldsize > usize) { + size_t sdiff = oldsize - usize; + zeroed = !chunk_purge_wrapper(arena, chunk_purge, + CHUNK_ADDR2BASE((uintptr_t)ptr + usize), + CHUNK_ADDR2OFFSET((uintptr_t)ptr + usize), sdiff); + if (config_fill && unlikely(opt_junk_free)) { + huge_dalloc_junk((void *)((uintptr_t)ptr + usize), + sdiff); + zeroed = false; + } + } else + zeroed = true; malloc_mutex_lock(&arena->huge_mtx); /* Update the size of the huge allocation. */ |