summaryrefslogtreecommitdiffstats
path: root/include/jemalloc
diff options
context:
space:
mode:
authorQi Wang <interwq@gwu.edu>2017-03-27 18:48:39 (GMT)
committerQi Wang <interwq@gmail.com>2017-03-27 20:42:31 (GMT)
commitaf3d737a9aafae8b27a837edaf1f54c14d45a727 (patch)
treeff91e4be033b483eca844b319ec632040b415d75 /include/jemalloc
parentc6d1819e483ca226e27ca4cf81f7d850e4d4dc79 (diff)
downloadjemalloc-af3d737a9aafae8b27a837edaf1f54c14d45a727.zip
jemalloc-af3d737a9aafae8b27a837edaf1f54c14d45a727.tar.gz
jemalloc-af3d737a9aafae8b27a837edaf1f54c14d45a727.tar.bz2
Simplify rtree cache replacement policy.
To avoid memmove on free() fast path, simplify the cache replacement policy to only bubble up the cache hit element by 1.
Diffstat (limited to 'include/jemalloc')
-rw-r--r--include/jemalloc/internal/rtree_inlines.h25
1 files changed, 11 insertions, 14 deletions
diff --git a/include/jemalloc/internal/rtree_inlines.h b/include/jemalloc/internal/rtree_inlines.h
index 4b530df..3e619b3 100644
--- a/include/jemalloc/internal/rtree_inlines.h
+++ b/include/jemalloc/internal/rtree_inlines.h
@@ -325,30 +325,27 @@ rtree_leaf_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
if (likely(rtree_ctx->cache[i].leafkey == leafkey)) { \
rtree_leaf_elm_t *leaf = rtree_ctx->cache[i].leaf; \
if (likely(leaf != NULL)) { \
- /* Reorder. */ \
- memmove(&rtree_ctx->cache[1], \
- &rtree_ctx->cache[0], \
- sizeof(rtree_ctx_cache_elm_t) * i); \
- rtree_ctx->cache[0].leafkey = leafkey; \
- rtree_ctx->cache[0].leaf = leaf; \
- \
+ /* Bubble up by one. */ \
+ if (i > 0) { \
+ rtree_ctx->cache[i] = \
+ rtree_ctx->cache[i - 1]; \
+ rtree_ctx->cache[i - 1].leafkey = \
+ leafkey; \
+ rtree_ctx->cache[i - 1].leaf = leaf; \
+ } \
uintptr_t subkey = rtree_subkey(key, \
RTREE_HEIGHT-1); \
return &leaf[subkey]; \
} \
} \
} while (0)
- /* Check the MRU cache entry. */
+ /* Check the first cache entry. */
RTREE_CACHE_CHECK(0);
/*
* Search the remaining cache elements, and on success move the matching
- * element to the front. Unroll the first iteration to avoid calling
- * memmove() (the compiler typically optimizes it into raw moves).
+ * element up by one slot.
*/
- if (RTREE_CTX_NCACHE > 1) {
- RTREE_CACHE_CHECK(1);
- }
- for (unsigned i = 2; i < RTREE_CTX_NCACHE; i++) {
+ for (unsigned i = 1; i < RTREE_CTX_NCACHE; i++) {
RTREE_CACHE_CHECK(i);
}
#undef RTREE_CACHE_CHECK