summaryrefslogtreecommitdiffstats
path: root/include/jemalloc
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2017-03-14 00:48:26 (GMT)
committerJason Evans <jasone@canonware.com>2017-03-23 01:33:32 (GMT)
commitf50d6009fe945f17584e4a004d1aae60d07bedb5 (patch)
tree1f3f5aedc1fc15c0ad17a76a764278b7c87866f9 /include/jemalloc
parente8921cf2eb1d049b688e29e14187c26ca05193ee (diff)
downloadjemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.zip
jemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.tar.gz
jemalloc-f50d6009fe945f17584e4a004d1aae60d07bedb5.tar.bz2
Remove binind field from arena_slab_data_t.
binind is now redundant; the containing extent_t's szind field always provides the same value.
Diffstat (limited to 'include/jemalloc')
-rw-r--r--include/jemalloc/internal/arena_inlines_b.h17
-rw-r--r--include/jemalloc/internal/arena_structs_a.h3
2 files changed, 3 insertions, 17 deletions
diff --git a/include/jemalloc/internal/arena_inlines_b.h b/include/jemalloc/internal/arena_inlines_b.h
index 0d4aff3..92c89a5 100644
--- a/include/jemalloc/internal/arena_inlines_b.h
+++ b/include/jemalloc/internal/arena_inlines_b.h
@@ -117,17 +117,9 @@ arena_aalloc(tsdn_t *tsdn, const void *ptr) {
/* Return the size of the allocation pointed to by ptr. */
JEMALLOC_ALWAYS_INLINE size_t
arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
- size_t ret;
-
assert(ptr != NULL);
- if (likely(extent_slab_get(extent))) {
- ret = index2size(extent_slab_data_get_const(extent)->binind);
- } else {
- ret = large_salloc(tsdn, extent);
- }
-
- return ret;
+ return index2size(extent_szind_get(extent));
}
JEMALLOC_ALWAYS_INLINE void
@@ -136,19 +128,17 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
assert(!tsdn_null(tsdn) || tcache == NULL);
assert(ptr != NULL);
+ szind_t szind = extent_szind_get(extent);
if (likely(extent_slab_get(extent))) {
/* Small allocation. */
if (likely(tcache != NULL)) {
- szind_t binind = extent_slab_data_get(extent)->binind;
- tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, binind,
+ tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
slow_path);
} else {
arena_dalloc_small(tsdn, extent_arena_get(extent),
extent, ptr);
}
} else {
- szind_t szind = extent_szind_get(extent);
-
if (likely(tcache != NULL) && szind < nhbins) {
if (config_prof && unlikely(szind < NBINS)) {
arena_dalloc_promoted(tsdn, extent, ptr,
@@ -173,7 +163,6 @@ arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
if (likely(extent_slab_get(extent))) {
/* Small allocation. */
if (likely(tcache != NULL)) {
- assert(szind == extent_slab_data_get(extent)->binind);
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
slow_path);
} else {
diff --git a/include/jemalloc/internal/arena_structs_a.h b/include/jemalloc/internal/arena_structs_a.h
index ccb3b05..ed265b2 100644
--- a/include/jemalloc/internal/arena_structs_a.h
+++ b/include/jemalloc/internal/arena_structs_a.h
@@ -2,9 +2,6 @@
#define JEMALLOC_INTERNAL_ARENA_STRUCTS_A_H
struct arena_slab_data_s {
- /* Index of bin this slab is associated with. */
- szind_t binind;
-
/* Number of free regions in slab. */
unsigned nfree;