summaryrefslogtreecommitdiffstats
path: root/include/jemalloc/internal/prof.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/jemalloc/internal/prof.h')
-rw-r--r--include/jemalloc/internal/prof.h122
1 files changed, 54 insertions, 68 deletions
diff --git a/include/jemalloc/internal/prof.h b/include/jemalloc/internal/prof.h
index f943873..e9064ba 100644
--- a/include/jemalloc/internal/prof.h
+++ b/include/jemalloc/internal/prof.h
@@ -227,9 +227,60 @@ bool prof_boot2(void);
/******************************************************************************/
#ifdef JEMALLOC_H_INLINES
+#define PROF_ALLOC_PREP(nignore, size, ret) do { \
+ prof_tdata_t *prof_tdata; \
+ prof_bt_t bt; \
+ \
+ assert(size == s2u(size)); \
+ \
+ prof_tdata = PROF_TCACHE_GET(); \
+ if (prof_tdata == NULL) { \
+ prof_tdata = prof_tdata_init(); \
+ if (prof_tdata == NULL) { \
+ ret = NULL; \
+ break; \
+ } \
+ } \
+ \
+ if (opt_prof_active == false) { \
+ /* Sampling is currently inactive, so avoid sampling. */\
+ ret = (prof_thr_cnt_t *)(uintptr_t)1U; \
+ } else if (opt_lg_prof_sample == 0) { \
+ /* Don't bother with sampling logic, since sampling */\
+ /* interval is 1. */\
+ bt_init(&bt, prof_tdata->vec); \
+ prof_backtrace(&bt, nignore, prof_bt_max); \
+ ret = prof_lookup(&bt); \
+ } else { \
+ if (prof_tdata->threshold == 0) { \
+ /* Initialize. Seed the prng differently for */\
+ /* each thread. */\
+ prof_tdata->prn_state = \
+ (uint64_t)(uintptr_t)&size; \
+ prof_sample_threshold_update(prof_tdata); \
+ } \
+ \
+ /* Determine whether to capture a backtrace based on */\
+ /* whether size is enough for prof_accum to reach */\
+ /* prof_tdata->threshold. However, delay updating */\
+ /* these variables until prof_{m,re}alloc(), because */\
+ /* we don't know for sure that the allocation will */\
+ /* succeed. */\
+ /* */\
+ /* Use subtraction rather than addition to avoid */\
+ /* potential integer overflow. */\
+ if (size >= prof_tdata->threshold - \
+ prof_tdata->accum) { \
+ bt_init(&bt, prof_tdata->vec); \
+ prof_backtrace(&bt, nignore, prof_bt_max); \
+ ret = prof_lookup(&bt); \
+ } else \
+ ret = (prof_thr_cnt_t *)(uintptr_t)1U; \
+ } \
+} while (0)
+
#ifndef JEMALLOC_ENABLE_INLINE
void prof_sample_threshold_update(prof_tdata_t *prof_tdata);
-prof_thr_cnt_t *prof_alloc_prep(size_t size);
prof_ctx_t *prof_ctx_get(const void *ptr);
void prof_ctx_set(const void *ptr, prof_ctx_t *ctx);
bool prof_sample_accum_update(size_t size);
@@ -272,71 +323,6 @@ prof_sample_threshold_update(prof_tdata_t *prof_tdata)
+ (uint64_t)1U;
}
-JEMALLOC_INLINE prof_thr_cnt_t *
-prof_alloc_prep(size_t size)
-{
-#ifdef JEMALLOC_ENABLE_INLINE
- /* This function does not have its own stack frame, because it is inlined. */
-# define NIGNORE 1
-#else
-# define NIGNORE 2
-#endif
- prof_thr_cnt_t *ret;
- prof_tdata_t *prof_tdata;
- prof_bt_t bt;
-
- assert(size == s2u(size));
-
- prof_tdata = PROF_TCACHE_GET();
- if (prof_tdata == NULL) {
- prof_tdata = prof_tdata_init();
- if (prof_tdata == NULL)
- return (NULL);
- }
-
- if (opt_prof_active == false) {
- /* Sampling is currently inactive, so avoid sampling. */
- ret = (prof_thr_cnt_t *)(uintptr_t)1U;
- } else if (opt_lg_prof_sample == 0) {
- /*
- * Don't bother with sampling logic, since sampling interval is
- * 1.
- */
- bt_init(&bt, prof_tdata->vec);
- prof_backtrace(&bt, NIGNORE, prof_bt_max);
- ret = prof_lookup(&bt);
- } else {
- if (prof_tdata->threshold == 0) {
- /*
- * Initialize. Seed the prng differently for each
- * thread.
- */
- prof_tdata->prn_state = (uint64_t)(uintptr_t)&size;
- prof_sample_threshold_update(prof_tdata);
- }
-
- /*
- * Determine whether to capture a backtrace based on whether
- * size is enough for prof_accum to reach
- * prof_tdata->threshold. However, delay updating these
- * variables until prof_{m,re}alloc(), because we don't know
- * for sure that the allocation will succeed.
- *
- * Use subtraction rather than addition to avoid potential
- * integer overflow.
- */
- if (size >= prof_tdata->threshold - prof_tdata->accum) {
- bt_init(&bt, prof_tdata->vec);
- prof_backtrace(&bt, NIGNORE, prof_bt_max);
- ret = prof_lookup(&bt);
- } else
- ret = (prof_thr_cnt_t *)(uintptr_t)1U;
- }
-
- return (ret);
-#undef NIGNORE
-}
-
JEMALLOC_INLINE prof_ctx_t *
prof_ctx_get(const void *ptr)
{
@@ -415,7 +401,7 @@ prof_malloc(const void *ptr, size_t size, prof_thr_cnt_t *cnt)
* always possible to tell in advance how large an
* object's usable size will be, so there should never
* be a difference between the size passed to
- * prof_alloc_prep() and prof_malloc().
+ * PROF_ALLOC_PREP() and prof_malloc().
*/
assert((uintptr_t)cnt == (uintptr_t)1U);
}
@@ -459,7 +445,7 @@ prof_realloc(const void *ptr, size_t size, prof_thr_cnt_t *cnt,
if (prof_sample_accum_update(size)) {
/*
* Don't sample. The size passed to
- * prof_alloc_prep() was larger than what
+ * PROF_ALLOC_PREP() was larger than what
* actually got allocated, so a backtrace was
* captured for this allocation, even though
* its actual size was insufficient to cross