diff options
| author | Qi Wang <interwq@gwu.edu> | 2019-04-03 00:50:42 (GMT) |
|---|---|---|
| committer | Qi Wang <interwq@gwu.edu> | 2019-04-03 00:50:42 (GMT) |
| commit | b0b3e49a54ec29e32636f4577d9d5a896d67fd20 (patch) | |
| tree | e80fd5feaedd401e7e2c884e73f8c884f51b5a65 /include/jemalloc/internal/arena_inlines_b.h | |
| parent | 61efbda7098de6fe64c362d309824864308c36d4 (diff) | |
| parent | f7489dc8f1fac233b0cd4e40331de8b738b1f2e2 (diff) | |
| download | jemalloc-5.2.0.zip jemalloc-5.2.0.tar.gz jemalloc-5.2.0.tar.bz2 | |
Merge branch 'dev'5.2.0
Diffstat (limited to 'include/jemalloc/internal/arena_inlines_b.h')
| -rw-r--r-- | include/jemalloc/internal/arena_inlines_b.h | 107 |
1 files changed, 89 insertions, 18 deletions
diff --git a/include/jemalloc/internal/arena_inlines_b.h b/include/jemalloc/internal/arena_inlines_b.h index 2b7e77e..614dedd 100644 --- a/include/jemalloc/internal/arena_inlines_b.h +++ b/include/jemalloc/internal/arena_inlines_b.h @@ -4,10 +4,36 @@ #include "jemalloc/internal/jemalloc_internal_types.h" #include "jemalloc/internal/mutex.h" #include "jemalloc/internal/rtree.h" -#include "jemalloc/internal/size_classes.h" +#include "jemalloc/internal/sc.h" #include "jemalloc/internal/sz.h" #include "jemalloc/internal/ticker.h" +JEMALLOC_ALWAYS_INLINE bool +arena_has_default_hooks(arena_t *arena) { + return (extent_hooks_get(arena) == &extent_hooks_default); +} + +JEMALLOC_ALWAYS_INLINE arena_t * +arena_choose_maybe_huge(tsd_t *tsd, arena_t *arena, size_t size) { + if (arena != NULL) { + return arena; + } + + /* + * For huge allocations, use the dedicated huge arena if both are true: + * 1) is using auto arena selection (i.e. arena == NULL), and 2) the + * thread is not assigned to a manual arena. + */ + if (unlikely(size >= oversize_threshold)) { + arena_t *tsd_arena = tsd_arena_get(tsd); + if (tsd_arena == NULL || arena_is_auto(tsd_arena)) { + return arena_choose_huge(tsd); + } + } + + return arena_choose(tsd, NULL); +} + JEMALLOC_ALWAYS_INLINE prof_tctx_t * arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { cassert(config_prof); @@ -28,7 +54,7 @@ arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { } JEMALLOC_ALWAYS_INLINE void -arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize, +arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) { cassert(config_prof); assert(ptr != NULL); @@ -47,7 +73,7 @@ arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize, } static inline void -arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) { +arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) { cassert(config_prof); assert(ptr != NULL); @@ -57,6 +83,32 @@ arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) { large_prof_tctx_reset(tsdn, extent); } +JEMALLOC_ALWAYS_INLINE nstime_t +arena_prof_alloc_time_get(tsdn_t *tsdn, const void *ptr, + alloc_ctx_t *alloc_ctx) { + cassert(config_prof); + assert(ptr != NULL); + + extent_t *extent = iealloc(tsdn, ptr); + /* + * Unlike arena_prof_prof_tctx_{get, set}, we only call this once we're + * sure we have a sampled allocation. + */ + assert(!extent_slab_get(extent)); + return large_prof_alloc_time_get(extent); +} + +JEMALLOC_ALWAYS_INLINE void +arena_prof_alloc_time_set(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx, + nstime_t t) { + cassert(config_prof); + assert(ptr != NULL); + + extent_t *extent = iealloc(tsdn, ptr); + assert(!extent_slab_get(extent)); + large_prof_alloc_time_set(extent, t); +} + JEMALLOC_ALWAYS_INLINE void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks) { tsd_t *tsd; @@ -83,14 +135,33 @@ arena_decay_tick(tsdn_t *tsdn, arena_t *arena) { arena_decay_ticks(tsdn, arena, 1); } +/* Purge a single extent to retained / unmapped directly. */ +JEMALLOC_ALWAYS_INLINE void +arena_decay_extent(tsdn_t *tsdn,arena_t *arena, extent_hooks_t **r_extent_hooks, + extent_t *extent) { + size_t extent_size = extent_size_get(extent); + extent_dalloc_wrapper(tsdn, arena, + r_extent_hooks, extent); + if (config_stats) { + /* Update stats accordingly. */ + arena_stats_lock(tsdn, &arena->stats); + arena_stats_add_u64(tsdn, &arena->stats, + &arena->decay_dirty.stats->nmadvise, 1); + arena_stats_add_u64(tsdn, &arena->stats, + &arena->decay_dirty.stats->purged, extent_size >> LG_PAGE); + arena_stats_sub_zu(tsdn, &arena->stats, &arena->stats.mapped, + extent_size); + arena_stats_unlock(tsdn, &arena->stats); + } +} + JEMALLOC_ALWAYS_INLINE void * arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, tcache_t *tcache, bool slow_path) { assert(!tsdn_null(tsdn) || tcache == NULL); - assert(size != 0); if (likely(tcache != NULL)) { - if (likely(size <= SMALL_MAXCLASS)) { + if (likely(size <= SC_SMALL_MAXCLASS)) { return tcache_alloc_small(tsdn_tsd(tsdn), arena, tcache, size, ind, zero, slow_path); } @@ -119,7 +190,7 @@ arena_salloc(tsdn_t *tsdn, const void *ptr) { szind_t szind = rtree_szind_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr, true); - assert(szind != NSIZES); + assert(szind != SC_NSIZES); return sz_index2size(szind); } @@ -152,7 +223,7 @@ arena_vsalloc(tsdn_t *tsdn, const void *ptr) { /* Only slab members should be looked up via interior pointers. */ assert(extent_addr_get(extent) == ptr || extent_slab_get(extent)); - assert(szind != NSIZES); + assert(szind != SC_NSIZES); return sz_index2size(szind); } @@ -173,7 +244,7 @@ arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr) { extent_t *extent = rtree_extent_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr, true); assert(szind == extent_szind_get(extent)); - assert(szind < NSIZES); + assert(szind < SC_NSIZES); assert(slab == extent_slab_get(extent)); } @@ -203,7 +274,7 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, if (alloc_ctx != NULL) { szind = alloc_ctx->szind; slab = alloc_ctx->slab; - assert(szind != NSIZES); + assert(szind != SC_NSIZES); } else { rtree_ctx = tsd_rtree_ctx(tsdn_tsd(tsdn)); rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx, @@ -215,7 +286,7 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, extent_t *extent = rtree_extent_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr, true); assert(szind == extent_szind_get(extent)); - assert(szind < NSIZES); + assert(szind < SC_NSIZES); assert(slab == extent_slab_get(extent)); } @@ -225,7 +296,7 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, slow_path); } else { if (szind < nhbins) { - if (config_prof && unlikely(szind < NBINS)) { + if (config_prof && unlikely(szind < SC_NBINS)) { arena_dalloc_promoted(tsdn, ptr, tcache, slow_path); } else { @@ -242,7 +313,7 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, static inline void arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) { assert(ptr != NULL); - assert(size <= LARGE_MAXCLASS); + assert(size <= SC_LARGE_MAXCLASS); szind_t szind; bool slab; @@ -252,7 +323,7 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) { * object, so base szind and slab on the given size. */ szind = sz_size2index(size); - slab = (szind < NBINS); + slab = (szind < SC_NBINS); } if ((config_prof && opt_prof) || config_debug) { @@ -264,7 +335,7 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) { (uintptr_t)ptr, true, &szind, &slab); assert(szind == sz_size2index(size)); - assert((config_prof && opt_prof) || slab == (szind < NBINS)); + assert((config_prof && opt_prof) || slab == (szind < SC_NBINS)); if (config_debug) { extent_t *extent = rtree_extent_read(tsdn, @@ -288,7 +359,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, alloc_ctx_t *alloc_ctx, bool slow_path) { assert(!tsdn_null(tsdn) || tcache == NULL); assert(ptr != NULL); - assert(size <= LARGE_MAXCLASS); + assert(size <= SC_LARGE_MAXCLASS); if (unlikely(tcache == NULL)) { arena_sdalloc_no_tcache(tsdn, ptr, size); @@ -297,7 +368,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, szind_t szind; bool slab; - UNUSED alloc_ctx_t local_ctx; + alloc_ctx_t local_ctx; if (config_prof && opt_prof) { if (alloc_ctx == NULL) { /* Uncommon case and should be a static check. */ @@ -318,7 +389,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, * object, so base szind and slab on the given size. */ szind = sz_size2index(size); - slab = (szind < NBINS); + slab = (szind < SC_NBINS); } if (config_debug) { @@ -337,7 +408,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, slow_path); } else { if (szind < nhbins) { - if (config_prof && unlikely(szind < NBINS)) { + if (config_prof && unlikely(szind < SC_NBINS)) { arena_dalloc_promoted(tsdn, ptr, tcache, slow_path); } else { |
