summaryrefslogtreecommitdiffstats
path: root/include/jemalloc
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2015-05-16 00:02:30 (GMT)
committerJason Evans <jasone@canonware.com>2015-05-16 00:02:30 (GMT)
commitfd5f9e43c35b39740e218fececbb70d929546bb0 (patch)
tree8e8f3c26fa6da935e317aab282d25e48da09d48c /include/jemalloc
parentc451831264885b84f54a05e0894ad88bb30bd5df (diff)
downloadjemalloc-fd5f9e43c35b39740e218fececbb70d929546bb0.zip
jemalloc-fd5f9e43c35b39740e218fececbb70d929546bb0.tar.gz
jemalloc-fd5f9e43c35b39740e218fececbb70d929546bb0.tar.bz2
Avoid atomic operations for dependent rtree reads.
Diffstat (limited to 'include/jemalloc')
-rw-r--r--include/jemalloc/internal/chunk.h6
-rw-r--r--include/jemalloc/internal/jemalloc_internal.h.in2
-rw-r--r--include/jemalloc/internal/rtree.h31
3 files changed, 28 insertions, 11 deletions
diff --git a/include/jemalloc/internal/chunk.h b/include/jemalloc/internal/chunk.h
index 8093814..c253cdc 100644
--- a/include/jemalloc/internal/chunk.h
+++ b/include/jemalloc/internal/chunk.h
@@ -70,15 +70,15 @@ void chunk_postfork_child(void);
#ifdef JEMALLOC_H_INLINES
#ifndef JEMALLOC_ENABLE_INLINE
-extent_node_t *chunk_lookup(const void *chunk);
+extent_node_t *chunk_lookup(const void *chunk, bool dependent);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_CHUNK_C_))
JEMALLOC_INLINE extent_node_t *
-chunk_lookup(const void *chunk)
+chunk_lookup(const void *ptr, bool dependent)
{
- return (rtree_get(&chunks_rtree, (uintptr_t)chunk));
+ return (rtree_get(&chunks_rtree, (uintptr_t)ptr, dependent));
}
#endif
diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in
index 910ebf7..0268245 100644
--- a/include/jemalloc/internal/jemalloc_internal.h.in
+++ b/include/jemalloc/internal/jemalloc_internal.h.in
@@ -948,7 +948,7 @@ ivsalloc(const void *ptr, bool demote)
extent_node_t *node;
/* Return 0 if ptr is not within a chunk managed by jemalloc. */
- node = chunk_lookup(CHUNK_ADDR2BASE(ptr));
+ node = chunk_lookup(ptr, false);
if (node == NULL)
return (0);
/* Only arena chunks should be looked up via interior pointers. */
diff --git a/include/jemalloc/internal/rtree.h b/include/jemalloc/internal/rtree.h
index 7a8ebfd..28ae9d1 100644
--- a/include/jemalloc/internal/rtree.h
+++ b/include/jemalloc/internal/rtree.h
@@ -114,13 +114,14 @@ bool rtree_node_valid(rtree_node_elm_t *node);
rtree_node_elm_t *rtree_child_tryread(rtree_node_elm_t *elm);
rtree_node_elm_t *rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm,
unsigned level);
-extent_node_t *rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm);
+extent_node_t *rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm,
+ bool dependent);
void rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm,
const extent_node_t *val);
rtree_node_elm_t *rtree_subtree_tryread(rtree_t *rtree, unsigned level);
rtree_node_elm_t *rtree_subtree_read(rtree_t *rtree, unsigned level);
-extent_node_t *rtree_get(rtree_t *rtree, uintptr_t key);
+extent_node_t *rtree_get(rtree_t *rtree, uintptr_t key, bool dependent);
bool rtree_set(rtree_t *rtree, uintptr_t key, const extent_node_t *val);
#endif
@@ -179,10 +180,25 @@ rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level)
}
JEMALLOC_INLINE extent_node_t *
-rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm)
+rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm, bool dependent)
{
- return (atomic_read_p(&elm->pun));
+ if (dependent) {
+ /*
+ * Reading a val on behalf of a pointer to a valid allocation is
+ * guaranteed to be a clean read even without synchronization,
+ * because the rtree update became visible in memory before the
+ * pointer came into existence.
+ */
+ return (elm->val);
+ } else {
+ /*
+ * An arbitrary read, e.g. on behalf of ivsalloc(), may not be
+ * dependent on a previous rtree write, which means a stale read
+ * could result if synchronization were omitted here.
+ */
+ return (atomic_read_p(&elm->pun));
+ }
}
JEMALLOC_INLINE void
@@ -216,7 +232,7 @@ rtree_subtree_read(rtree_t *rtree, unsigned level)
}
JEMALLOC_INLINE extent_node_t *
-rtree_get(rtree_t *rtree, uintptr_t key)
+rtree_get(rtree_t *rtree, uintptr_t key, bool dependent)
{
uintptr_t subkey;
unsigned i, start_level;
@@ -226,7 +242,7 @@ rtree_get(rtree_t *rtree, uintptr_t key)
for (i = start_level, node = rtree_subtree_tryread(rtree, start_level);
/**/; i++, node = child) {
- if (unlikely(!rtree_node_valid(node)))
+ if (!dependent && unlikely(!rtree_node_valid(node)))
return (NULL);
subkey = rtree_subkey(rtree, key, i);
if (i == rtree->height - 1) {
@@ -234,7 +250,8 @@ rtree_get(rtree_t *rtree, uintptr_t key)
* node is a leaf, so it contains values rather than
* child pointers.
*/
- return (rtree_val_read(rtree, &node[subkey]));
+ return (rtree_val_read(rtree, &node[subkey],
+ dependent));
}
assert(i < rtree->height - 1);
child = rtree_child_tryread(&node[subkey]);