summaryrefslogtreecommitdiffstats
path: root/include/jemalloc/internal
diff options
context:
space:
mode:
authorJason Evans <je@fb.com>2014-11-27 19:22:36 (GMT)
committerJason Evans <je@fb.com>2015-01-24 07:34:43 (GMT)
commit4581b97809e7e545c38b996870a4e7284a620bc5 (patch)
tree9bd91eba3bf168fbaed53e81c729aaf742e27acb /include/jemalloc/internal
parentec98a44662a82aff30a54ed86bd9b24f36cfe67e (diff)
downloadjemalloc-4581b97809e7e545c38b996870a4e7284a620bc5.zip
jemalloc-4581b97809e7e545c38b996870a4e7284a620bc5.tar.gz
jemalloc-4581b97809e7e545c38b996870a4e7284a620bc5.tar.bz2
Implement metadata statistics.
There are three categories of metadata: - Base allocations are used for bootstrap-sensitive internal allocator data structures. - Arena chunk headers comprise pages which track the states of the non-metadata pages. - Internal allocations differ from application-originated allocations in that they are for internal use, and that they are omitted from heap profiles. The metadata statistics comprise the metadata categories as follows: - stats.metadata: All metadata -- base + arena chunk headers + internal allocations. - stats.arenas.<i>.metadata.mapped: Arena chunk headers. - stats.arenas.<i>.metadata.allocated: Internal allocations. This is reported separately from the other metadata statistics because it overlaps with the allocated and active statistics, whereas the other metadata statistics do not. Base allocations are not reported separately, though their magnitude can be computed by subtracting the arena-specific metadata. This resolves #163.
Diffstat (limited to 'include/jemalloc/internal')
-rw-r--r--include/jemalloc/internal/arena.h34
-rw-r--r--include/jemalloc/internal/base.h1
-rw-r--r--include/jemalloc/internal/ctl.h1
-rw-r--r--include/jemalloc/internal/huge.h1
-rw-r--r--include/jemalloc/internal/jemalloc_internal.h.in156
-rw-r--r--include/jemalloc/internal/private_symbols.txt10
-rw-r--r--include/jemalloc/internal/stats.h7
7 files changed, 161 insertions, 49 deletions
diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h
index 1e19023..46367f6 100644
--- a/include/jemalloc/internal/arena.h
+++ b/include/jemalloc/internal/arena.h
@@ -437,6 +437,9 @@ void arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind,
size_t runind, index_t binind, size_t flags);
void arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
size_t unzeroed);
+void arena_metadata_allocated_add(arena_t *arena, size_t size);
+void arena_metadata_allocated_sub(arena_t *arena, size_t size);
+size_t arena_metadata_allocated_get(arena_t *arena);
bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
bool arena_prof_accum(arena_t *arena, uint64_t accumbytes);
@@ -448,6 +451,7 @@ prof_tctx_t *arena_prof_tctx_get(const void *ptr);
void arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
void *arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
bool try_tcache);
+arena_t *arena_aalloc(const void *ptr);
size_t arena_salloc(const void *ptr, bool demote);
void arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr,
bool try_tcache);
@@ -699,6 +703,27 @@ arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
unzeroed);
}
+JEMALLOC_INLINE void
+arena_metadata_allocated_add(arena_t *arena, size_t size)
+{
+
+ atomic_add_z(&arena->stats.metadata_allocated, size);
+}
+
+JEMALLOC_INLINE void
+arena_metadata_allocated_sub(arena_t *arena, size_t size)
+{
+
+ atomic_sub_z(&arena->stats.metadata_allocated, size);
+}
+
+JEMALLOC_INLINE size_t
+arena_metadata_allocated_get(arena_t *arena)
+{
+
+ return (atomic_read_z(&arena->stats.metadata_allocated));
+}
+
JEMALLOC_INLINE bool
arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes)
{
@@ -952,6 +977,15 @@ arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
}
}
+JEMALLOC_ALWAYS_INLINE arena_t *
+arena_aalloc(const void *ptr)
+{
+ arena_chunk_t *chunk;
+
+ chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
+ return (chunk->arena);
+}
+
/* Return the size of the allocation pointed to by ptr. */
JEMALLOC_ALWAYS_INLINE size_t
arena_salloc(const void *ptr, bool demote)
diff --git a/include/jemalloc/internal/base.h b/include/jemalloc/internal/base.h
index 3fb80b9..18b7a72 100644
--- a/include/jemalloc/internal/base.h
+++ b/include/jemalloc/internal/base.h
@@ -13,6 +13,7 @@ void *base_alloc(size_t size);
void *base_calloc(size_t number, size_t size);
extent_node_t *base_node_alloc(void);
void base_node_dalloc(extent_node_t *node);
+size_t base_allocated_get(void);
bool base_boot(void);
void base_prefork(void);
void base_postfork_parent(void);
diff --git a/include/jemalloc/internal/ctl.h b/include/jemalloc/internal/ctl.h
index a3e899e..65617bc 100644
--- a/include/jemalloc/internal/ctl.h
+++ b/include/jemalloc/internal/ctl.h
@@ -52,6 +52,7 @@ struct ctl_arena_stats_s {
struct ctl_stats_s {
size_t allocated;
size_t active;
+ size_t metadata;
size_t mapped;
struct {
size_t current; /* stats_chunks.curchunks */
diff --git a/include/jemalloc/internal/huge.h b/include/jemalloc/internal/huge.h
index 39d8aa5..decb024 100644
--- a/include/jemalloc/internal/huge.h
+++ b/include/jemalloc/internal/huge.h
@@ -23,6 +23,7 @@ typedef void (huge_dalloc_junk_t)(void *, size_t);
extern huge_dalloc_junk_t *huge_dalloc_junk;
#endif
void huge_dalloc(tsd_t *tsd, void *ptr, bool try_tcache);
+arena_t *huge_aalloc(const void *ptr);
size_t huge_salloc(const void *ptr);
prof_tctx_t *huge_prof_tctx_get(const void *ptr);
void huge_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in
index 4107860..a477855 100644
--- a/include/jemalloc/internal/jemalloc_internal.h.in
+++ b/include/jemalloc/internal/jemalloc_internal.h.in
@@ -404,8 +404,9 @@ extern size_t const index2size_tab[NSIZES];
extern uint8_t const size2index_tab[];
arena_t *a0get(void);
-void *a0malloc(size_t size, bool zero);
+void *a0malloc(size_t size);
void a0dalloc(void *ptr);
+size_t a0allocated(void);
arena_t *arenas_extend(unsigned ind);
arena_t *arena_init(unsigned ind);
unsigned narenas_total_get(void);
@@ -776,21 +777,27 @@ arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
#include "jemalloc/internal/quarantine.h"
#ifndef JEMALLOC_ENABLE_INLINE
+arena_t *iaalloc(const void *ptr);
+size_t isalloc(const void *ptr, bool demote);
+void *iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache,
+ bool is_metadata, arena_t *arena);
void *imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
void *imalloc(tsd_t *tsd, size_t size);
void *icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
void *icalloc(tsd_t *tsd, size_t size);
+void *ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
+ bool try_tcache, bool is_metadata, arena_t *arena);
void *ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
bool try_tcache, arena_t *arena);
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
-size_t isalloc(const void *ptr, bool demote);
size_t ivsalloc(const void *ptr, bool demote);
size_t u2rz(size_t usize);
size_t p2rz(const void *ptr);
+void idalloctm(tsd_t *tsd, void *ptr, bool try_tcache, bool is_metadata);
void idalloct(tsd_t *tsd, void *ptr, bool try_tcache);
-void isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
void idalloc(tsd_t *tsd, void *ptr);
void iqalloc(tsd_t *tsd, void *ptr, bool try_tcache);
+void isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
void isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
@@ -805,45 +812,97 @@ bool ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra,
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
+JEMALLOC_ALWAYS_INLINE arena_t *
+iaalloc(const void *ptr)
+{
+ arena_t *arena;
+ arena_chunk_t *chunk;
+
+ assert(ptr != NULL);
+
+ chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
+ if (likely(chunk != ptr))
+ arena = arena_aalloc(ptr);
+ else
+ arena = huge_aalloc(ptr);
+
+ return (arena);
+}
+
+/*
+ * Typical usage:
+ * void *ptr = [...]
+ * size_t sz = isalloc(ptr, config_prof);
+ */
+JEMALLOC_ALWAYS_INLINE size_t
+isalloc(const void *ptr, bool demote)
+{
+ size_t ret;
+ arena_chunk_t *chunk;
+
+ assert(ptr != NULL);
+ /* Demotion only makes sense if config_prof is true. */
+ assert(config_prof || !demote);
+
+ chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
+ if (likely(chunk != ptr))
+ ret = arena_salloc(ptr, demote);
+ else
+ ret = huge_salloc(ptr);
+
+ return (ret);
+}
+
JEMALLOC_ALWAYS_INLINE void *
-imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
+iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache, bool is_metadata,
+ arena_t *arena)
{
+ void *ret;
assert(size != 0);
if (likely(size <= arena_maxclass))
- return (arena_malloc(tsd, arena, size, false, try_tcache));
+ ret = arena_malloc(tsd, arena, size, zero, try_tcache);
else
- return (huge_malloc(tsd, arena, size, false, try_tcache));
+ ret = huge_malloc(tsd, arena, size, zero, try_tcache);
+ if (config_stats && is_metadata && likely(ret != NULL)) {
+ arena_metadata_allocated_add(iaalloc(ret), isalloc(ret,
+ config_prof));
+ }
+ return (ret);
+}
+
+JEMALLOC_ALWAYS_INLINE void *
+imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
+{
+
+ return (iallocztm(tsd, size, false, try_tcache, false, arena));
}
JEMALLOC_ALWAYS_INLINE void *
imalloc(tsd_t *tsd, size_t size)
{
- return (imalloct(tsd, size, true, NULL));
+ return (iallocztm(tsd, size, false, true, false, NULL));
}
JEMALLOC_ALWAYS_INLINE void *
icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
{
- if (likely(size <= arena_maxclass))
- return (arena_malloc(tsd, arena, size, true, try_tcache));
- else
- return (huge_malloc(tsd, arena, size, true, try_tcache));
+ return (iallocztm(tsd, size, true, try_tcache, false, arena));
}
JEMALLOC_ALWAYS_INLINE void *
icalloc(tsd_t *tsd, size_t size)
{
- return (icalloct(tsd, size, true, NULL));
+ return (iallocztm(tsd, size, true, true, false, NULL));
}
JEMALLOC_ALWAYS_INLINE void *
-ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache,
- arena_t *arena)
+ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
+ bool try_tcache, bool is_metadata, arena_t *arena)
{
void *ret;
@@ -865,40 +924,28 @@ ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache,
try_tcache);
}
}
-
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
+ if (config_stats && is_metadata && likely(ret != NULL)) {
+ arena_metadata_allocated_add(iaalloc(ret), isalloc(ret,
+ config_prof));
+ }
return (ret);
}
JEMALLOC_ALWAYS_INLINE void *
-ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
+ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache,
+ arena_t *arena)
{
- return (ipalloct(tsd, usize, alignment, zero, true, NULL));
+ return (ipallocztm(tsd, usize, alignment, zero, try_tcache, false,
+ arena));
}
-/*
- * Typical usage:
- * void *ptr = [...]
- * size_t sz = isalloc(ptr, config_prof);
- */
-JEMALLOC_ALWAYS_INLINE size_t
-isalloc(const void *ptr, bool demote)
+JEMALLOC_ALWAYS_INLINE void *
+ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
{
- size_t ret;
- arena_chunk_t *chunk;
-
- assert(ptr != NULL);
- /* Demotion only makes sense if config_prof is true. */
- assert(config_prof || !demote);
-
- chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
- if (likely(chunk != ptr))
- ret = arena_salloc(ptr, demote);
- else
- ret = huge_salloc(ptr);
- return (ret);
+ return (ipallocztm(tsd, usize, alignment, zero, true, false, NULL));
}
JEMALLOC_ALWAYS_INLINE size_t
@@ -935,11 +982,15 @@ p2rz(const void *ptr)
}
JEMALLOC_ALWAYS_INLINE void
-idalloct(tsd_t *tsd, void *ptr, bool try_tcache)
+idalloctm(tsd_t *tsd, void *ptr, bool try_tcache, bool is_metadata)
{
arena_chunk_t *chunk;
assert(ptr != NULL);
+ if (config_stats && is_metadata) {
+ arena_metadata_allocated_sub(iaalloc(ptr), isalloc(ptr,
+ config_prof));
+ }
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
if (likely(chunk != ptr))
@@ -949,24 +1000,17 @@ idalloct(tsd_t *tsd, void *ptr, bool try_tcache)
}
JEMALLOC_ALWAYS_INLINE void
-isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
+idalloct(tsd_t *tsd, void *ptr, bool try_tcache)
{
- arena_chunk_t *chunk;
-
- assert(ptr != NULL);
- chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
- if (likely(chunk != ptr))
- arena_sdalloc(tsd, chunk, ptr, size, try_tcache);
- else
- huge_dalloc(tsd, ptr, try_tcache);
+ idalloctm(tsd, ptr, try_tcache, false);
}
JEMALLOC_ALWAYS_INLINE void
idalloc(tsd_t *tsd, void *ptr)
{
- idalloct(tsd, ptr, true);
+ idalloctm(tsd, ptr, true, false);
}
JEMALLOC_ALWAYS_INLINE void
@@ -976,7 +1020,21 @@ iqalloc(tsd_t *tsd, void *ptr, bool try_tcache)
if (config_fill && unlikely(opt_quarantine))
quarantine(tsd, ptr);
else
- idalloct(tsd, ptr, try_tcache);
+ idalloctm(tsd, ptr, try_tcache, false);
+}
+
+JEMALLOC_ALWAYS_INLINE void
+isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
+{
+ arena_chunk_t *chunk;
+
+ assert(ptr != NULL);
+
+ chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
+ if (likely(chunk != ptr))
+ arena_sdalloc(tsd, chunk, ptr, size, try_tcache);
+ else
+ huge_dalloc(tsd, ptr, try_tcache);
}
JEMALLOC_ALWAYS_INLINE void
diff --git a/include/jemalloc/internal/private_symbols.txt b/include/jemalloc/internal/private_symbols.txt
index 1aaf80b..dfa8755 100644
--- a/include/jemalloc/internal/private_symbols.txt
+++ b/include/jemalloc/internal/private_symbols.txt
@@ -1,6 +1,7 @@
a0dalloc
a0get
a0malloc
+arena_aalloc
arena_get
arena_get_hard
arena_alloc_junk_small
@@ -50,6 +51,9 @@ arena_mapbitsp_read
arena_mapbitsp_write
arena_maxclass
arena_maxrun
+arena_metadata_allocated_add
+arena_metadata_allocated_get
+arena_metadata_allocated_sub
arena_migrate
arena_miscelm_get
arena_miscelm_to_pageind
@@ -90,6 +94,7 @@ atomic_sub_uint32
atomic_sub_uint64
atomic_sub_z
base_alloc
+base_allocated_get
base_boot
base_calloc
base_node_alloc
@@ -205,6 +210,7 @@ hash_rotl_64
hash_x64_128
hash_x86_128
hash_x86_32
+huge_aalloc
huge_allocated
huge_boot
huge_dalloc
@@ -221,10 +227,13 @@ huge_prof_tctx_set
huge_ralloc
huge_ralloc_no_move
huge_salloc
+iaalloc
+iallocztm
icalloc
icalloct
idalloc
idalloct
+idalloctm
imalloc
imalloct
in_valgrind
@@ -234,6 +243,7 @@ index2size_lookup
index2size_tab
ipalloc
ipalloct
+ipallocztm
iqalloc
iralloc
iralloct
diff --git a/include/jemalloc/internal/stats.h b/include/jemalloc/internal/stats.h
index d8600ed..7cba77b 100644
--- a/include/jemalloc/internal/stats.h
+++ b/include/jemalloc/internal/stats.h
@@ -111,6 +111,13 @@ struct arena_stats_s {
uint64_t nmadvise;
uint64_t purged;
+ /*
+ * Number of bytes currently mapped purely for metadata purposes, and
+ * number of bytes currently allocated for internal metadata.
+ */
+ size_t metadata_mapped;
+ size_t metadata_allocated; /* Protected via atomic_*_z(). */
+
/* Per-size-category statistics. */
size_t allocated_large;
uint64_t nmalloc_large;