summaryrefslogtreecommitdiffstats
path: root/include/jemalloc/internal/prof.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/jemalloc/internal/prof.h')
-rw-r--r--include/jemalloc/internal/prof.h58
1 files changed, 40 insertions, 18 deletions
diff --git a/include/jemalloc/internal/prof.h b/include/jemalloc/internal/prof.h
index 2e22711..e5198c3 100644
--- a/include/jemalloc/internal/prof.h
+++ b/include/jemalloc/internal/prof.h
@@ -90,10 +90,11 @@ struct prof_tctx_s {
prof_tdata_t *tdata;
/*
- * Copy of tdata->thr_uid, necessary because tdata may be defunct during
- * teardown.
+ * Copy of tdata->thr_{uid,discrim}, necessary because tdata may be
+ * defunct during teardown.
*/
uint64_t thr_uid;
+ uint64_t thr_discrim;
/* Profiling counters, protected by tdata->lock. */
prof_cnt_t cnts;
@@ -330,14 +331,18 @@ bool prof_gdump_get_unlocked(void);
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
prof_tdata_t **tdata_out);
-prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool update);
+prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
+ bool update);
prof_tctx_t *prof_tctx_get(const void *ptr);
-void prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
+void prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
+void prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
+ prof_tctx_t *tctx);
void prof_malloc_sample_object(const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
- prof_tctx_t *tctx, bool updated, size_t old_usize, prof_tctx_t *old_tctx);
+ prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
+ size_t old_usize, prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
#endif
@@ -402,13 +407,24 @@ prof_tctx_get(const void *ptr)
}
JEMALLOC_ALWAYS_INLINE void
-prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
+prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx)
{
cassert(config_prof);
assert(ptr != NULL);
- arena_prof_tctx_set(ptr, tctx);
+ arena_prof_tctx_set(ptr, usize, tctx);
+}
+
+JEMALLOC_ALWAYS_INLINE void
+prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
+ prof_tctx_t *old_tctx)
+{
+
+ cassert(config_prof);
+ assert(ptr != NULL);
+
+ arena_prof_tctx_reset(ptr, usize, old_ptr, old_tctx);
}
JEMALLOC_ALWAYS_INLINE bool
@@ -442,7 +458,7 @@ prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
}
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
-prof_alloc_prep(tsd_t *tsd, size_t usize, bool update)
+prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
{
prof_tctx_t *ret;
prof_tdata_t *tdata;
@@ -450,8 +466,8 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool update)
assert(usize == s2u(usize));
- if (!prof_active_get_unlocked() || likely(prof_sample_accum_update(tsd,
- usize, update, &tdata)))
+ if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,
+ &tdata)))
ret = (prof_tctx_t *)(uintptr_t)1U;
else {
bt_init(&bt, tdata->vec);
@@ -473,22 +489,24 @@ prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx)
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
prof_malloc_sample_object(ptr, usize, tctx);
else
- prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
+ prof_tctx_set(ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
}
JEMALLOC_ALWAYS_INLINE void
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
- bool updated, size_t old_usize, prof_tctx_t *old_tctx)
+ bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
+ prof_tctx_t *old_tctx)
{
+ bool sampled, old_sampled;
cassert(config_prof);
assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U);
- if (!updated && ptr != NULL) {
+ if (prof_active && !updated && ptr != NULL) {
assert(usize == isalloc(ptr, true));
if (prof_sample_accum_update(tsd, usize, true, NULL)) {
/*
- * Don't sample. The usize passed to PROF_ALLOC_PREP()
+ * Don't sample. The usize passed to prof_alloc_prep()
* was larger than what actually got allocated, so a
* backtrace was captured for this allocation, even
* though its actual usize was insufficient to cross the
@@ -498,12 +516,16 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
}
}
- if (unlikely((uintptr_t)old_tctx > (uintptr_t)1U))
- prof_free_sampled_object(tsd, old_usize, old_tctx);
- if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
+ sampled = ((uintptr_t)tctx > (uintptr_t)1U);
+ old_sampled = ((uintptr_t)old_tctx > (uintptr_t)1U);
+
+ if (unlikely(sampled))
prof_malloc_sample_object(ptr, usize, tctx);
else
- prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
+ prof_tctx_reset(ptr, usize, old_ptr, old_tctx);
+
+ if (unlikely(old_sampled))
+ prof_free_sampled_object(tsd, old_usize, old_tctx);
}
JEMALLOC_ALWAYS_INLINE void