summaryrefslogtreecommitdiffstats
path: root/include/jemalloc
diff options
context:
space:
mode:
authorJason Evans <je@fb.com>2015-02-13 23:28:56 (GMT)
committerJason Evans <je@fb.com>2015-02-13 23:28:56 (GMT)
commit41cfe03f39740fe61cf46d86982f66c24168de32 (patch)
tree2b2db8229ea1a9581db947997eab74df36f89a83 /include/jemalloc
parentfeaaa3df0da9972b9c5016c55b886e54853cc855 (diff)
downloadjemalloc-41cfe03f39740fe61cf46d86982f66c24168de32.zip
jemalloc-41cfe03f39740fe61cf46d86982f66c24168de32.tar.gz
jemalloc-41cfe03f39740fe61cf46d86982f66c24168de32.tar.bz2
If MALLOCX_ARENA(a) is specified, use it during tcache fill.
Diffstat (limited to 'include/jemalloc')
-rw-r--r--include/jemalloc/internal/arena.h26
-rw-r--r--include/jemalloc/internal/tcache.h28
2 files changed, 27 insertions, 27 deletions
diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h
index 4d88736..b195daf 100644
--- a/include/jemalloc/internal/arena.h
+++ b/include/jemalloc/internal/arena.h
@@ -985,28 +985,26 @@ arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
assert(size != 0);
assert(size <= arena_maxclass);
+ arena = arena_choose(tsd, arena);
+ if (unlikely(arena == NULL))
+ return (NULL);
+
if (likely(size <= SMALL_MAXCLASS)) {
- if (likely(tcache != NULL))
- return (tcache_alloc_small(tsd, tcache, size, zero));
- else {
- arena = arena_choose(tsd, arena);
- if (unlikely(arena == NULL))
- return (NULL);
+ if (likely(tcache != NULL)) {
+ return (tcache_alloc_small(tsd, arena, tcache, size,
+ zero));
+ } else
return (arena_malloc_small(arena, size, zero));
- }
} else if (likely(size <= arena_maxclass)) {
/*
* Initialize tcache after checking size in order to avoid
* infinite recursion during tcache initialization.
*/
- if (likely(tcache != NULL) && size <= tcache_maxclass)
- return (tcache_alloc_large(tsd, tcache, size, zero));
- else {
- arena = arena_choose(tsd, arena);
- if (unlikely(arena == NULL))
- return (NULL);
+ if (likely(tcache != NULL) && size <= tcache_maxclass) {
+ return (tcache_alloc_large(tsd, arena, tcache, size,
+ zero));
+ } else
return (arena_malloc_large(arena, size, zero));
- }
} else
return (huge_malloc(tsd, arena, size, zero, tcache));
}
diff --git a/include/jemalloc/internal/tcache.h b/include/jemalloc/internal/tcache.h
index 2a3952b..d2443b1 100644
--- a/include/jemalloc/internal/tcache.h
+++ b/include/jemalloc/internal/tcache.h
@@ -120,10 +120,10 @@ extern tcaches_t *tcaches;
size_t tcache_salloc(const void *ptr);
void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
-void *tcache_alloc_small_hard(tsd_t *tsd, tcache_t *tcache,
+void *tcache_alloc_small_hard(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
tcache_bin_t *tbin, index_t binind);
-void tcache_bin_flush_small(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
- unsigned rem, tcache_t *tcache);
+void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
+ index_t binind, unsigned rem);
void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
unsigned rem, tcache_t *tcache);
void tcache_arena_associate(tcache_t *tcache, arena_t *arena);
@@ -151,10 +151,10 @@ bool tcache_enabled_get(void);
tcache_t *tcache_get(tsd_t *tsd, bool create);
void tcache_enabled_set(bool enabled);
void *tcache_alloc_easy(tcache_bin_t *tbin);
-void *tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size,
- bool zero);
-void *tcache_alloc_large(tsd_t *tsd, tcache_t *tcache, size_t size,
- bool zero);
+void *tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
+ size_t size, bool zero);
+void *tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
+ size_t size, bool zero);
void tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr,
index_t binind);
void tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr,
@@ -258,7 +258,8 @@ tcache_alloc_easy(tcache_bin_t *tbin)
}
JEMALLOC_ALWAYS_INLINE void *
-tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
+tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
+ bool zero)
{
void *ret;
index_t binind;
@@ -271,7 +272,7 @@ tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
usize = index2size(binind);
ret = tcache_alloc_easy(tbin);
if (unlikely(ret == NULL)) {
- ret = tcache_alloc_small_hard(tsd, tcache, tbin, binind);
+ ret = tcache_alloc_small_hard(tsd, arena, tcache, tbin, binind);
if (ret == NULL)
return (NULL);
}
@@ -302,7 +303,8 @@ tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
}
JEMALLOC_ALWAYS_INLINE void *
-tcache_alloc_large(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
+tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
+ bool zero)
{
void *ret;
index_t binind;
@@ -320,7 +322,7 @@ tcache_alloc_large(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
* Only allocate one large object at a time, because it's quite
* expensive to create one and not use it.
*/
- ret = arena_malloc_large(arena_choose(tsd, NULL), usize, zero);
+ ret = arena_malloc_large(arena, usize, zero);
if (ret == NULL)
return (NULL);
} else {
@@ -366,8 +368,8 @@ tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, index_t binind)
tbin = &tcache->tbins[binind];
tbin_info = &tcache_bin_info[binind];
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
- tcache_bin_flush_small(tsd, tbin, binind,
- (tbin_info->ncached_max >> 1), tcache);
+ tcache_bin_flush_small(tsd, tcache, tbin, binind,
+ (tbin_info->ncached_max >> 1));
}
assert(tbin->ncached < tbin_info->ncached_max);
tbin->avail[tbin->ncached] = ptr;