diff options
author | Jason Evans <jasone@canonware.com> | 2017-04-21 00:21:37 (GMT) |
---|---|---|
committer | Jason Evans <jasone@canonware.com> | 2017-04-21 17:06:12 (GMT) |
commit | 4403c9ab441eabb6c55d93b99836f7126e46be75 (patch) | |
tree | a7bf4b383956b2cd02b4cb8bdb8012640d283130 /include/jemalloc | |
parent | 5aa46f027df42636d4aa1fb70d1078a6c5f96420 (diff) | |
download | jemalloc-4403c9ab441eabb6c55d93b99836f7126e46be75.zip jemalloc-4403c9ab441eabb6c55d93b99836f7126e46be75.tar.gz jemalloc-4403c9ab441eabb6c55d93b99836f7126e46be75.tar.bz2 |
Remove --disable-tcache.
Simplify configuration by removing the --disable-tcache option, but
replace the testing for that configuration with
--with-malloc-conf=tcache:false.
Fix the thread.arena and thread.tcache.flush mallctls to work correctly
if tcache is disabled.
This partially resolves #580.
Diffstat (limited to 'include/jemalloc')
7 files changed, 9 insertions, 42 deletions
diff --git a/include/jemalloc/internal/arena_inlines_a.h b/include/jemalloc/internal/arena_inlines_a.h index cf92342..2bd5ce7 100644 --- a/include/jemalloc/internal/arena_inlines_a.h +++ b/include/jemalloc/internal/arena_inlines_a.h @@ -58,7 +58,7 @@ percpu_arena_update(tsd_t *tsd, unsigned cpu) { /* Set new arena/tcache associations. */ arena_migrate(tsd, oldind, newind); tcache_t *tcache = tcache_get(tsd); - if (config_tcache && tcache) { + if (tcache != NULL) { tcache_arena_reassociate(tsd_tsdn(tsd), tcache, newarena); } diff --git a/include/jemalloc/internal/jemalloc_internal_defs.h.in b/include/jemalloc/internal/jemalloc_internal_defs.h.in index 28eb0b3..d3d7694 100644 --- a/include/jemalloc/internal/jemalloc_internal_defs.h.in +++ b/include/jemalloc/internal/jemalloc_internal_defs.h.in @@ -155,13 +155,6 @@ #undef JEMALLOC_PROF_GCC /* - * JEMALLOC_TCACHE enables a thread-specific caching layer for small objects. - * This makes it possible to allocate/deallocate objects without any locking - * when the cache is in the steady state. - */ -#undef JEMALLOC_TCACHE - -/* * JEMALLOC_DSS enables use of sbrk(2) to allocate extents from the data storage * segment (DSS). */ diff --git a/include/jemalloc/internal/jemalloc_internal_inlines_a.h b/include/jemalloc/internal/jemalloc_internal_inlines_a.h index 38fa3c7..c28bd7c 100644 --- a/include/jemalloc/internal/jemalloc_internal_inlines_a.h +++ b/include/jemalloc/internal/jemalloc_internal_inlines_a.h @@ -323,7 +323,8 @@ malloc_getcpu(void) { JEMALLOC_ALWAYS_INLINE unsigned percpu_arena_choose(void) { unsigned arena_ind; - assert(have_percpu_arena && (percpu_arena_mode != percpu_arena_disabled)); + assert(have_percpu_arena && (percpu_arena_mode != + percpu_arena_disabled)); malloc_cpuid_t cpuid = malloc_getcpu(); assert(cpuid >= 0); @@ -420,19 +421,16 @@ tcache_large_bin_get(tcache_t *tcache, szind_t binind) { JEMALLOC_ALWAYS_INLINE bool tcache_available(tsd_t *tsd) { - cassert(config_tcache); - /* * Thread specific auto tcache might be unavailable if: 1) during tcache * initialization, or 2) disabled through thread.tcache.enabled mallctl * or config options. This check covers all cases. */ - if (likely(tsd_tcache_enabled_get(tsd) == true)) { - /* Associated arena == null implies tcache init in progress. */ - if (tsd_tcachep_get(tsd)->arena != NULL) { - assert(tcache_small_bin_get(tsd_tcachep_get(tsd), - 0)->avail != NULL); - } + if (likely(tsd_tcache_enabled_get(tsd))) { + /* Associated arena == NULL implies tcache init in progress. */ + assert(tsd_tcachep_get(tsd)->arena == NULL || + tcache_small_bin_get(tsd_tcachep_get(tsd), 0)->avail != + NULL); return true; } @@ -441,9 +439,6 @@ tcache_available(tsd_t *tsd) { JEMALLOC_ALWAYS_INLINE tcache_t * tcache_get(tsd_t *tsd) { - if (!config_tcache) { - return NULL; - } if (!tcache_available(tsd)) { return NULL; } diff --git a/include/jemalloc/internal/jemalloc_internal_inlines_b.h b/include/jemalloc/internal/jemalloc_internal_inlines_b.h index ab54a59..2fd371c 100644 --- a/include/jemalloc/internal/jemalloc_internal_inlines_b.h +++ b/include/jemalloc/internal/jemalloc_internal_inlines_b.h @@ -24,7 +24,7 @@ arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) { if (unlikely(ret == NULL)) { ret = arena_choose_hard(tsd, internal); assert(ret); - if (config_tcache && tcache_available(tsd)) { + if (tcache_available(tsd)) { tcache_t *tcache = tcache_get(tsd); if (tcache->arena != NULL) { /* See comments in tcache_data_init().*/ diff --git a/include/jemalloc/internal/jemalloc_preamble.h.in b/include/jemalloc/internal/jemalloc_preamble.h.in index 7c796c6..0e2ce31 100644 --- a/include/jemalloc/internal/jemalloc_preamble.h.in +++ b/include/jemalloc/internal/jemalloc_preamble.h.in @@ -111,13 +111,6 @@ static const bool config_stats = false #endif ; -static const bool config_tcache = -#ifdef JEMALLOC_TCACHE - true -#else - false -#endif - ; static const bool config_tls = #ifdef JEMALLOC_TLS true diff --git a/include/jemalloc/internal/tcache_inlines.h b/include/jemalloc/internal/tcache_inlines.h index d425b82..67d35b5 100644 --- a/include/jemalloc/internal/tcache_inlines.h +++ b/include/jemalloc/internal/tcache_inlines.h @@ -6,7 +6,6 @@ #ifndef JEMALLOC_ENABLE_INLINE void tcache_event(tsd_t *tsd, tcache_t *tcache); -void tcache_flush(void); bool tcache_enabled_get(tsd_t *tsd); tcache_t *tcache_get(tsd_t *tsd); void tcache_enabled_set(tsd_t *tsd, bool enabled); @@ -25,15 +24,11 @@ tcache_t *tcaches_get(tsd_t *tsd, unsigned ind); #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_)) JEMALLOC_INLINE bool tcache_enabled_get(tsd_t *tsd) { - cassert(config_tcache); - return tsd_tcache_enabled_get(tsd); } JEMALLOC_INLINE void tcache_enabled_set(tsd_t *tsd, bool enabled) { - cassert(config_tcache); - bool was_enabled = tsd_tcache_enabled_get(tsd); if (!was_enabled && enabled) { diff --git a/include/jemalloc/internal/tcache_structs.h b/include/jemalloc/internal/tcache_structs.h index c43e59b..fe27f36 100644 --- a/include/jemalloc/internal/tcache_structs.h +++ b/include/jemalloc/internal/tcache_structs.h @@ -40,23 +40,14 @@ struct tcache_s { * element of tbins is initialized to point to the proper offset within * this array. */ -#ifdef JEMALLOC_TCACHE tcache_bin_t tbins_small[NBINS]; -#else - tcache_bin_t tbins_small[0]; -#endif /* Data accessed less often below. */ ql_elm(tcache_t) link; /* Used for aggregating stats. */ arena_t *arena; /* Associated arena. */ szind_t next_gc_bin; /* Next bin to GC. */ -#ifdef JEMALLOC_TCACHE /* For small bins, fill (ncached_max >> lg_fill_div). */ uint8_t lg_fill_div[NBINS]; tcache_bin_t tbins_large[NSIZES-NBINS]; -#else - uint8_t lg_fill_div[0]; - tcache_bin_t tbins_large[0]; -#endif }; /* Linkage for list of available (previously used) explicit tcache IDs. */ |