summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/jemalloc/internal/private_namespace.h1
-rw-r--r--include/jemalloc/internal/prof.h122
-rw-r--r--src/jemalloc.c49
3 files changed, 90 insertions, 82 deletions
diff --git a/include/jemalloc/internal/private_namespace.h b/include/jemalloc/internal/private_namespace.h
index 2cfb171..d4f5f96 100644
--- a/include/jemalloc/internal/private_namespace.h
+++ b/include/jemalloc/internal/private_namespace.h
@@ -145,7 +145,6 @@
#define malloc_write JEMALLOC_N(malloc_write)
#define mb_write JEMALLOC_N(mb_write)
#define pow2_ceil JEMALLOC_N(pow2_ceil)
-#define prof_alloc_prep JEMALLOC_N(prof_alloc_prep)
#define prof_backtrace JEMALLOC_N(prof_backtrace)
#define prof_boot0 JEMALLOC_N(prof_boot0)
#define prof_boot1 JEMALLOC_N(prof_boot1)
diff --git a/include/jemalloc/internal/prof.h b/include/jemalloc/internal/prof.h
index f943873..e9064ba 100644
--- a/include/jemalloc/internal/prof.h
+++ b/include/jemalloc/internal/prof.h
@@ -227,9 +227,60 @@ bool prof_boot2(void);
/******************************************************************************/
#ifdef JEMALLOC_H_INLINES
+#define PROF_ALLOC_PREP(nignore, size, ret) do { \
+ prof_tdata_t *prof_tdata; \
+ prof_bt_t bt; \
+ \
+ assert(size == s2u(size)); \
+ \
+ prof_tdata = PROF_TCACHE_GET(); \
+ if (prof_tdata == NULL) { \
+ prof_tdata = prof_tdata_init(); \
+ if (prof_tdata == NULL) { \
+ ret = NULL; \
+ break; \
+ } \
+ } \
+ \
+ if (opt_prof_active == false) { \
+ /* Sampling is currently inactive, so avoid sampling. */\
+ ret = (prof_thr_cnt_t *)(uintptr_t)1U; \
+ } else if (opt_lg_prof_sample == 0) { \
+ /* Don't bother with sampling logic, since sampling */\
+ /* interval is 1. */\
+ bt_init(&bt, prof_tdata->vec); \
+ prof_backtrace(&bt, nignore, prof_bt_max); \
+ ret = prof_lookup(&bt); \
+ } else { \
+ if (prof_tdata->threshold == 0) { \
+ /* Initialize. Seed the prng differently for */\
+ /* each thread. */\
+ prof_tdata->prn_state = \
+ (uint64_t)(uintptr_t)&size; \
+ prof_sample_threshold_update(prof_tdata); \
+ } \
+ \
+ /* Determine whether to capture a backtrace based on */\
+ /* whether size is enough for prof_accum to reach */\
+ /* prof_tdata->threshold. However, delay updating */\
+ /* these variables until prof_{m,re}alloc(), because */\
+ /* we don't know for sure that the allocation will */\
+ /* succeed. */\
+ /* */\
+ /* Use subtraction rather than addition to avoid */\
+ /* potential integer overflow. */\
+ if (size >= prof_tdata->threshold - \
+ prof_tdata->accum) { \
+ bt_init(&bt, prof_tdata->vec); \
+ prof_backtrace(&bt, nignore, prof_bt_max); \
+ ret = prof_lookup(&bt); \
+ } else \
+ ret = (prof_thr_cnt_t *)(uintptr_t)1U; \
+ } \
+} while (0)
+
#ifndef JEMALLOC_ENABLE_INLINE
void prof_sample_threshold_update(prof_tdata_t *prof_tdata);
-prof_thr_cnt_t *prof_alloc_prep(size_t size);
prof_ctx_t *prof_ctx_get(const void *ptr);
void prof_ctx_set(const void *ptr, prof_ctx_t *ctx);
bool prof_sample_accum_update(size_t size);
@@ -272,71 +323,6 @@ prof_sample_threshold_update(prof_tdata_t *prof_tdata)
+ (uint64_t)1U;
}
-JEMALLOC_INLINE prof_thr_cnt_t *
-prof_alloc_prep(size_t size)
-{
-#ifdef JEMALLOC_ENABLE_INLINE
- /* This function does not have its own stack frame, because it is inlined. */
-# define NIGNORE 1
-#else
-# define NIGNORE 2
-#endif
- prof_thr_cnt_t *ret;
- prof_tdata_t *prof_tdata;
- prof_bt_t bt;
-
- assert(size == s2u(size));
-
- prof_tdata = PROF_TCACHE_GET();
- if (prof_tdata == NULL) {
- prof_tdata = prof_tdata_init();
- if (prof_tdata == NULL)
- return (NULL);
- }
-
- if (opt_prof_active == false) {
- /* Sampling is currently inactive, so avoid sampling. */
- ret = (prof_thr_cnt_t *)(uintptr_t)1U;
- } else if (opt_lg_prof_sample == 0) {
- /*
- * Don't bother with sampling logic, since sampling interval is
- * 1.
- */
- bt_init(&bt, prof_tdata->vec);
- prof_backtrace(&bt, NIGNORE, prof_bt_max);
- ret = prof_lookup(&bt);
- } else {
- if (prof_tdata->threshold == 0) {
- /*
- * Initialize. Seed the prng differently for each
- * thread.
- */
- prof_tdata->prn_state = (uint64_t)(uintptr_t)&size;
- prof_sample_threshold_update(prof_tdata);
- }
-
- /*
- * Determine whether to capture a backtrace based on whether
- * size is enough for prof_accum to reach
- * prof_tdata->threshold. However, delay updating these
- * variables until prof_{m,re}alloc(), because we don't know
- * for sure that the allocation will succeed.
- *
- * Use subtraction rather than addition to avoid potential
- * integer overflow.
- */
- if (size >= prof_tdata->threshold - prof_tdata->accum) {
- bt_init(&bt, prof_tdata->vec);
- prof_backtrace(&bt, NIGNORE, prof_bt_max);
- ret = prof_lookup(&bt);
- } else
- ret = (prof_thr_cnt_t *)(uintptr_t)1U;
- }
-
- return (ret);
-#undef NIGNORE
-}
-
JEMALLOC_INLINE prof_ctx_t *
prof_ctx_get(const void *ptr)
{
@@ -415,7 +401,7 @@ prof_malloc(const void *ptr, size_t size, prof_thr_cnt_t *cnt)
* always possible to tell in advance how large an
* object's usable size will be, so there should never
* be a difference between the size passed to
- * prof_alloc_prep() and prof_malloc().
+ * PROF_ALLOC_PREP() and prof_malloc().
*/
assert((uintptr_t)cnt == (uintptr_t)1U);
}
@@ -459,7 +445,7 @@ prof_realloc(const void *ptr, size_t size, prof_thr_cnt_t *cnt,
if (prof_sample_accum_update(size)) {
/*
* Don't sample. The size passed to
- * prof_alloc_prep() was larger than what
+ * PROF_ALLOC_PREP() was larger than what
* actually got allocated, so a backtrace was
* captured for this allocation, even though
* its actual size was insufficient to cross
diff --git a/src/jemalloc.c b/src/jemalloc.c
index 4d10e90..14a0c7c 100644
--- a/src/jemalloc.c
+++ b/src/jemalloc.c
@@ -84,6 +84,7 @@ static void malloc_conf_error(const char *msg, const char *k, size_t klen,
const char *v, size_t vlen);
static void malloc_conf_init(void);
static bool malloc_init_hard(void);
+static int imemalign(void **memptr, size_t alignment, size_t size);
/******************************************************************************/
/* malloc_message() setup. */
@@ -939,7 +940,8 @@ JEMALLOC_P(malloc)(size_t size)
#ifdef JEMALLOC_PROF
if (opt_prof) {
usize = s2u(size);
- if ((cnt = prof_alloc_prep(usize)) == NULL) {
+ PROF_ALLOC_PREP(1, usize, cnt);
+ if (cnt == NULL) {
ret = NULL;
goto OOM;
}
@@ -988,9 +990,15 @@ RETURN:
}
JEMALLOC_ATTR(nonnull(1))
-JEMALLOC_ATTR(visibility("default"))
-int
-JEMALLOC_P(posix_memalign)(void **memptr, size_t alignment, size_t size)
+#ifdef JEMALLOC_PROF
+/*
+ * Avoid any uncertainty as to how many backtrace frames to ignore in
+ * PROF_ALLOC_PREP().
+ */
+JEMALLOC_ATTR(noinline)
+#endif
+static int
+imemalign(void **memptr, size_t alignment, size_t size)
{
int ret;
size_t usize
@@ -1057,7 +1065,8 @@ JEMALLOC_P(posix_memalign)(void **memptr, size_t alignment, size_t size)
#ifdef JEMALLOC_PROF
if (opt_prof) {
- if ((cnt = prof_alloc_prep(usize)) == NULL) {
+ PROF_ALLOC_PREP(2, usize, cnt);
+ if (cnt == NULL) {
result = NULL;
ret = EINVAL;
} else {
@@ -1110,6 +1119,15 @@ RETURN:
return (ret);
}
+JEMALLOC_ATTR(nonnull(1))
+JEMALLOC_ATTR(visibility("default"))
+int
+JEMALLOC_P(posix_memalign)(void **memptr, size_t alignment, size_t size)
+{
+
+ return imemalign(memptr, alignment, size);
+}
+
JEMALLOC_ATTR(malloc)
JEMALLOC_ATTR(visibility("default"))
void *
@@ -1165,7 +1183,8 @@ JEMALLOC_P(calloc)(size_t num, size_t size)
#ifdef JEMALLOC_PROF
if (opt_prof) {
usize = s2u(num_size);
- if ((cnt = prof_alloc_prep(usize)) == NULL) {
+ PROF_ALLOC_PREP(1, usize, cnt);
+ if (cnt == NULL) {
ret = NULL;
goto RETURN;
}
@@ -1278,7 +1297,8 @@ JEMALLOC_P(realloc)(void *ptr, size_t size)
if (opt_prof) {
usize = s2u(size);
old_ctx = prof_ctx_get(ptr);
- if ((cnt = prof_alloc_prep(usize)) == NULL) {
+ PROF_ALLOC_PREP(1, usize, cnt);
+ if (cnt == NULL) {
ret = NULL;
goto OOM;
}
@@ -1327,7 +1347,8 @@ OOM:
#ifdef JEMALLOC_PROF
if (opt_prof) {
usize = s2u(size);
- if ((cnt = prof_alloc_prep(usize)) == NULL)
+ PROF_ALLOC_PREP(1, usize, cnt);
+ if (cnt == NULL)
ret = NULL;
else {
if (prof_promote && (uintptr_t)cnt !=
@@ -1432,7 +1453,7 @@ JEMALLOC_P(memalign)(size_t alignment, size_t size)
#ifdef JEMALLOC_CC_SILENCE
int result =
#endif
- JEMALLOC_P(posix_memalign)(&ret, alignment, size);
+ imemalign(&ret, alignment, size);
#ifdef JEMALLOC_CC_SILENCE
if (result != 0)
return (NULL);
@@ -1451,7 +1472,7 @@ JEMALLOC_P(valloc)(size_t size)
#ifdef JEMALLOC_CC_SILENCE
int result =
#endif
- JEMALLOC_P(posix_memalign)(&ret, PAGE_SIZE, size);
+ imemalign(&ret, PAGE_SIZE, size);
#ifdef JEMALLOC_CC_SILENCE
if (result != 0)
return (NULL);
@@ -1573,7 +1594,8 @@ JEMALLOC_P(allocm)(void **ptr, size_t *rsize, size_t size, int flags)
#ifdef JEMALLOC_PROF
if (opt_prof) {
- if ((cnt = prof_alloc_prep(usize)) == NULL)
+ PROF_ALLOC_PREP(1, usize, cnt);
+ if (cnt == NULL)
goto OOM;
if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
small_maxclass) {
@@ -1660,7 +1682,7 @@ JEMALLOC_P(rallocm)(void **ptr, size_t *rsize, size_t size, size_t extra,
/*
* usize isn't knowable before iralloc() returns when extra is
* non-zero. Therefore, compute its maximum possible value and
- * use that in prof_alloc_prep() to decide whether to capture a
+ * use that in PROF_ALLOC_PREP() to decide whether to capture a
* backtrace. prof_realloc() will use the actual usize to
* decide whether to sample.
*/
@@ -1668,7 +1690,8 @@ JEMALLOC_P(rallocm)(void **ptr, size_t *rsize, size_t size, size_t extra,
sa2u(size+extra, alignment, NULL);
old_size = isalloc(p);
old_ctx = prof_ctx_get(p);
- if ((cnt = prof_alloc_prep(max_usize)) == NULL)
+ PROF_ALLOC_PREP(1, max_usize, cnt);
+ if (cnt == NULL)
goto OOM;
/*
* Use minimum usize to determine whether promotion may happen.