From 6e62984ef6ca4312cf0a2e49ea2cc38feb94175b Mon Sep 17 00:00:00 2001
From: Jason Evans <jasone@canonware.com>
Date: Sun, 15 Dec 2013 21:49:40 -0800
Subject: Don't junk-fill reallocations unless usize changes.

Don't junk fill reallocations for which the request size is less than
the current usable size, but not enough smaller to cause a size class
change.  Unlike malloc()/calloc()/realloc(), *allocx() contractually
treats the full usize as the allocation, so a caller can ask for zeroed
memory via mallocx() and a series of rallocx() calls that all specify
MALLOCX_ZERO, and be assured that all newly allocated bytes will be
zeroed and made available to the application without danger of allocator
mutation until the size class decreases enough to cause usize reduction.
---
 include/jemalloc/internal/tcache.h |  1 +
 src/arena.c                        | 15 +++------------
 src/huge.c                         |  4 ----
 3 files changed, 4 insertions(+), 16 deletions(-)

diff --git a/include/jemalloc/internal/tcache.h b/include/jemalloc/internal/tcache.h
index d4eecde..c3d4b58 100644
--- a/include/jemalloc/internal/tcache.h
+++ b/include/jemalloc/internal/tcache.h
@@ -297,6 +297,7 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
 	binind = SMALL_SIZE2BIN(size);
 	assert(binind < NBINS);
 	tbin = &tcache->tbins[binind];
+	size = arena_bin_info[binind].reg_size;
 	ret = tcache_alloc_easy(tbin);
 	if (ret == NULL) {
 		ret = tcache_alloc_small_hard(tcache, tbin, binind);
diff --git a/src/arena.c b/src/arena.c
index 4a46013..406cf5d 100644
--- a/src/arena.c
+++ b/src/arena.c
@@ -1938,10 +1938,6 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
 	psize = PAGE_CEILING(size + extra);
 	if (psize == oldsize) {
 		/* Same size class. */
-		if (config_fill && opt_junk && size < oldsize) {
-			memset((void *)((uintptr_t)ptr + size), 0x5a, oldsize -
-			    size);
-		}
 		return (false);
 	} else {
 		arena_chunk_t *chunk;
@@ -1953,8 +1949,8 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
 		if (psize < oldsize) {
 			/* Fill before shrinking in order avoid a race. */
 			if (config_fill && opt_junk) {
-				memset((void *)((uintptr_t)ptr + size), 0x5a,
-				    oldsize - size);
+				memset((void *)((uintptr_t)ptr + psize), 0x5a,
+				    oldsize - psize);
 			}
 			arena_ralloc_large_shrink(arena, chunk, ptr, oldsize,
 			    psize);
@@ -1988,13 +1984,8 @@ arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra,
 			if ((size + extra <= SMALL_MAXCLASS &&
 			    SMALL_SIZE2BIN(size + extra) ==
 			    SMALL_SIZE2BIN(oldsize)) || (size <= oldsize &&
-			    size + extra >= oldsize)) {
-				if (config_fill && opt_junk && size < oldsize) {
-					memset((void *)((uintptr_t)ptr + size),
-					    0x5a, oldsize - size);
-				}
+			    size + extra >= oldsize))
 				return (ptr);
-			}
 		} else {
 			assert(size <= arena_maxclass);
 			if (size + extra > SMALL_MAXCLASS) {
diff --git a/src/huge.c b/src/huge.c
index 33fab68..ea9a2ad 100644
--- a/src/huge.c
+++ b/src/huge.c
@@ -89,10 +89,6 @@ huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra)
 	    && CHUNK_CEILING(oldsize) >= CHUNK_CEILING(size)
 	    && CHUNK_CEILING(oldsize) <= CHUNK_CEILING(size+extra)) {
 		assert(CHUNK_CEILING(oldsize) == oldsize);
-		if (config_fill && opt_junk && size < oldsize) {
-			memset((void *)((uintptr_t)ptr + size), 0x5a,
-			    oldsize - size);
-		}
 		return (ptr);
 	}
 
-- 
cgit v0.12