diff options
author | Jason Evans <jasone@canonware.com> | 2017-03-14 00:48:26 (GMT) |
---|---|---|
committer | Jason Evans <jasone@canonware.com> | 2017-03-23 01:33:32 (GMT) |
commit | f50d6009fe945f17584e4a004d1aae60d07bedb5 (patch) | |
tree | 1f3f5aedc1fc15c0ad17a76a764278b7c87866f9 | |
parent | e8921cf2eb1d049b688e29e14187c26ca05193ee (diff) | |
download | jemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.zip jemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.tar.gz jemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.tar.bz2 |
Remove binind field from arena_slab_data_t.
binind is now redundant; the containing extent_t's szind field always
provides the same value.
-rw-r--r-- | include/jemalloc/internal/arena_inlines_b.h | 17 | ||||
-rw-r--r-- | include/jemalloc/internal/arena_structs_a.h | 3 | ||||
-rw-r--r-- | src/arena.c | 10 |
3 files changed, 8 insertions, 22 deletions
diff --git a/include/jemalloc/internal/arena_inlines_b.h b/include/jemalloc/internal/arena_inlines_b.h index 0d4aff3..92c89a5 100644 --- a/include/jemalloc/internal/arena_inlines_b.h +++ b/include/jemalloc/internal/arena_inlines_b.h @@ -117,17 +117,9 @@ arena_aalloc(tsdn_t *tsdn, const void *ptr) { /* Return the size of the allocation pointed to by ptr. */ JEMALLOC_ALWAYS_INLINE size_t arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr) { - size_t ret; - assert(ptr != NULL); - if (likely(extent_slab_get(extent))) { - ret = index2size(extent_slab_data_get_const(extent)->binind); - } else { - ret = large_salloc(tsdn, extent); - } - - return ret; + return index2size(extent_szind_get(extent)); } JEMALLOC_ALWAYS_INLINE void @@ -136,19 +128,17 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache, assert(!tsdn_null(tsdn) || tcache == NULL); assert(ptr != NULL); + szind_t szind = extent_szind_get(extent); if (likely(extent_slab_get(extent))) { /* Small allocation. */ if (likely(tcache != NULL)) { - szind_t binind = extent_slab_data_get(extent)->binind; - tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, binind, + tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind, slow_path); } else { arena_dalloc_small(tsdn, extent_arena_get(extent), extent, ptr); } } else { - szind_t szind = extent_szind_get(extent); - if (likely(tcache != NULL) && szind < nhbins) { if (config_prof && unlikely(szind < NBINS)) { arena_dalloc_promoted(tsdn, extent, ptr, @@ -173,7 +163,6 @@ arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size, if (likely(extent_slab_get(extent))) { /* Small allocation. */ if (likely(tcache != NULL)) { - assert(szind == extent_slab_data_get(extent)->binind); tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind, slow_path); } else { diff --git a/include/jemalloc/internal/arena_structs_a.h b/include/jemalloc/internal/arena_structs_a.h index ccb3b05..ed265b2 100644 --- a/include/jemalloc/internal/arena_structs_a.h +++ b/include/jemalloc/internal/arena_structs_a.h @@ -2,9 +2,6 @@ #define JEMALLOC_INTERNAL_ARENA_STRUCTS_A_H struct arena_slab_data_s { - /* Index of bin this slab is associated with. */ - szind_t binind; - /* Number of free regions in slab. */ unsigned nfree; diff --git a/src/arena.c b/src/arena.c index 2c432e6..968343c 100644 --- a/src/arena.c +++ b/src/arena.c @@ -384,7 +384,7 @@ arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) { JEMALLOC_INLINE_C void arena_slab_reg_dalloc(tsdn_t *tsdn, extent_t *slab, arena_slab_data_t *slab_data, void *ptr) { - szind_t binind = slab_data->binind; + szind_t binind = extent_szind_get(slab); const arena_bin_info_t *bin_info = &arena_bin_info[binind]; size_t regind = arena_slab_regind(slab, binind, ptr); @@ -1181,7 +1181,6 @@ arena_slab_alloc(tsdn_t *tsdn, arena_t *arena, szind_t binind, /* Initialize slab internals. */ arena_slab_data_t *slab_data = extent_slab_data_get(slab); - slab_data->binind = binind; slab_data->nfree = bin_info->nregs; bitmap_init(slab_data->bitmap, &bin_info->bitmap_info); @@ -1511,7 +1510,7 @@ arena_dissociate_bin_slab(extent_t *slab, arena_bin_t *bin) { if (slab == bin->slabcur) { bin->slabcur = NULL; } else { - szind_t binind = extent_slab_data_get(slab)->binind; + szind_t binind = extent_szind_get(slab); const arena_bin_info_t *bin_info = &arena_bin_info[binind]; /* @@ -1573,7 +1572,7 @@ static void arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, extent_t *slab, void *ptr, bool junked) { arena_slab_data_t *slab_data = extent_slab_data_get(slab); - szind_t binind = slab_data->binind; + szind_t binind = extent_szind_get(slab); arena_bin_t *bin = &arena->bins[binind]; const arena_bin_info_t *bin_info = &arena_bin_info[binind]; @@ -1604,7 +1603,8 @@ arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena, extent_t *extent, static void arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr) { - arena_bin_t *bin = &arena->bins[extent_slab_data_get(extent)->binind]; + szind_t binind = extent_szind_get(extent); + arena_bin_t *bin = &arena->bins[binind]; malloc_mutex_lock(tsdn, &bin->lock); arena_dalloc_bin_locked_impl(tsdn, arena, extent, ptr, false); |