summaryrefslogtreecommitdiffstats
path: root/jemalloc
diff options
context:
space:
mode:
authorJason Evans <je@facebook.com>2010-04-01 01:43:24 (GMT)
committerJason Evans <je@facebook.com>2010-04-01 01:43:24 (GMT)
commitf18c98200145de70779a1b3286e7829b0268231e (patch)
tree95fa6b8e63861d0d34efd0ca4bc5008fe9cc6512 /jemalloc
parenta02fc08ec9dd8479a6430155b6a433da09f6ff10 (diff)
downloadjemalloc-f18c98200145de70779a1b3286e7829b0268231e.zip
jemalloc-f18c98200145de70779a1b3286e7829b0268231e.tar.gz
jemalloc-f18c98200145de70779a1b3286e7829b0268231e.tar.bz2
Add sampling activation/deactivation control.
Add the E/e options to control whether the application starts with sampling active/inactive (secondary control to F/f). Add the prof.active mallctl so that the application can activate/deactivate sampling on the fly.
Diffstat (limited to 'jemalloc')
-rw-r--r--jemalloc/doc/jemalloc.3.in21
-rw-r--r--jemalloc/include/jemalloc/internal/prof.h7
-rw-r--r--jemalloc/src/ctl.c29
-rw-r--r--jemalloc/src/jemalloc.c6
-rw-r--r--jemalloc/src/prof.c6
5 files changed, 68 insertions, 1 deletions
diff --git a/jemalloc/doc/jemalloc.3.in b/jemalloc/doc/jemalloc.3.in
index 13e616a..7e80916 100644
--- a/jemalloc/doc/jemalloc.3.in
+++ b/jemalloc/doc/jemalloc.3.in
@@ -341,6 +341,16 @@ physical memory becomes scarce and the pages remain unused.
The default minimum ratio is 32:1;
.Ev JEMALLOC_OPTIONS=6D
will disable dirty page purging.
+@roff_prof@.It E
+@roff_prof@Activate/deactivate profiling.
+@roff_prof@This is a secondary control mechanism that makes it possible to
+@roff_prof@start the application with profiling enabled (see the
+@roff_prof@.Dq F
+@roff_prof@option) but inactive, then toggle profiling at any time during
+@roff_prof@program execution with the
+@roff_prof@.Dq prof.active
+@roff_prof@mallctl.
+@roff_prof@This option is enabled by default.
@roff_prof@.It F
@roff_prof@Profile memory allocation activity, and use an
@roff_prof@.Xr atexit 3
@@ -356,6 +366,9 @@ will disable dirty page purging.
@roff_prof@.Dq B
@roff_prof@option for backtrace depth control.
@roff_prof@See the
+@roff_prof@.Dq E
+@roff_prof@option for on-the-fly activation/deactivation.
+@roff_prof@See the
@roff_prof@.Dq S
@roff_prof@option for probabilistic sampling control.
@roff_prof@See the
@@ -993,6 +1006,14 @@ Total number of large size classes.
Maximum size supported by this large size class.
.Ed
.\"-----------------------------------------------------------------------------
+@roff_prof@.It Sy "prof.active (bool) rw"
+@roff_prof@.Bd -ragged -offset indent -compact
+@roff_prof@Control whether sampling is currently active.
+@roff_prof@See the
+@roff_prof@.Dq E
+@roff_prof@option for additional information.
+@roff_prof@.Ed
+.\"-----------------------------------------------------------------------------
@roff_prof@.It Sy "prof.dump (const char *) -w"
@roff_prof@.Bd -ragged -offset indent -compact
@roff_prof@Dump a memory profile to the specified file, or if NULL is specified,
diff --git a/jemalloc/include/jemalloc/internal/prof.h b/jemalloc/include/jemalloc/internal/prof.h
index 2a0e539..6e71552 100644
--- a/jemalloc/include/jemalloc/internal/prof.h
+++ b/jemalloc/include/jemalloc/internal/prof.h
@@ -119,6 +119,13 @@ struct prof_ctx_s {
#ifdef JEMALLOC_H_EXTERNS
extern bool opt_prof;
+/*
+ * Even if opt_prof is true, sampling can be temporarily disabled by setting
+ * opt_prof_active to false. No locking is used when updating opt_prof_active,
+ * so there are no guarantees regarding how long it will take for all threads
+ * to notice state changes.
+ */
+extern bool opt_prof_active;
extern size_t opt_lg_prof_bt_max; /* Maximum backtrace depth. */
extern size_t opt_lg_prof_sample; /* Mean bytes between samples. */
extern ssize_t opt_lg_prof_interval; /* lg(prof_interval). */
diff --git a/jemalloc/src/ctl.c b/jemalloc/src/ctl.c
index f628c13..ffb732d 100644
--- a/jemalloc/src/ctl.c
+++ b/jemalloc/src/ctl.c
@@ -75,6 +75,7 @@ CTL_PROTO(opt_lg_tcache_gc_sweep)
#endif
#ifdef JEMALLOC_PROF
CTL_PROTO(opt_prof)
+CTL_PROTO(opt_prof_active)
CTL_PROTO(opt_lg_prof_bt_max)
CTL_PROTO(opt_lg_prof_sample)
CTL_PROTO(opt_lg_prof_interval)
@@ -125,6 +126,7 @@ CTL_PROTO(arenas_nhbins)
#endif
CTL_PROTO(arenas_nlruns)
#ifdef JEMALLOC_PROF
+CTL_PROTO(prof_active)
CTL_PROTO(prof_dump)
CTL_PROTO(prof_interval)
#endif
@@ -246,6 +248,7 @@ static const ctl_node_t opt_node[] = {
#endif
#ifdef JEMALLOC_PROF
{NAME("prof"), CTL(opt_prof)},
+ {NAME("prof_active"), CTL(opt_prof_active)},
{NAME("lg_prof_bt_max"), CTL(opt_lg_prof_bt_max)},
{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
@@ -323,6 +326,7 @@ static const ctl_node_t arenas_node[] = {
#ifdef JEMALLOC_PROF
static const ctl_node_t prof_node[] = {
+ {NAME("active"), CTL(prof_active)},
{NAME("dump"), CTL(prof_dump)},
{NAME("interval"), CTL(prof_interval)}
};
@@ -1151,6 +1155,7 @@ CTL_RO_GEN(opt_lg_tcache_gc_sweep, opt_lg_tcache_gc_sweep, ssize_t)
#endif
#ifdef JEMALLOC_PROF
CTL_RO_GEN(opt_prof, opt_prof, bool)
+CTL_RO_GEN(opt_prof_active, opt_prof_active, bool)
CTL_RO_GEN(opt_lg_prof_bt_max, opt_lg_prof_bt_max, size_t)
CTL_RO_GEN(opt_lg_prof_sample, opt_lg_prof_sample, size_t)
CTL_RO_GEN(opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
@@ -1248,6 +1253,30 @@ CTL_RO_GEN(arenas_nlruns, nlclasses, size_t)
#ifdef JEMALLOC_PROF
static int
+prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
+ void *newp, size_t newlen)
+{
+ int ret;
+ bool oldval;
+
+ oldval = opt_prof_active;
+ if (newp != NULL) {
+ /*
+ * The memory barriers will tend to make opt_prof_active
+ * propagate faster on systems with weak memory ordering.
+ */
+ mb_write();
+ WRITE(opt_prof_active, bool);
+ mb_write();
+ }
+ READ(oldval, bool);
+
+ ret = 0;
+RETURN:
+ return (ret);
+}
+
+static int
prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
void *newp, size_t newlen)
{
diff --git a/jemalloc/src/jemalloc.c b/jemalloc/src/jemalloc.c
index d3c7cca..e01de0d 100644
--- a/jemalloc/src/jemalloc.c
+++ b/jemalloc/src/jemalloc.c
@@ -460,6 +460,12 @@ MALLOC_OUT:
opt_lg_dirty_mult--;
break;
#ifdef JEMALLOC_PROF
+ case 'e':
+ opt_prof_active = false;
+ break;
+ case 'E':
+ opt_prof_active = true;
+ break;
case 'f':
opt_prof = false;
break;
diff --git a/jemalloc/src/prof.c b/jemalloc/src/prof.c
index 97db422..4cdebb9 100644
--- a/jemalloc/src/prof.c
+++ b/jemalloc/src/prof.c
@@ -18,6 +18,7 @@
/* Data. */
bool opt_prof = false;
+bool opt_prof_active = true;
size_t opt_lg_prof_bt_max = LG_PROF_BT_MAX_DEFAULT;
size_t opt_lg_prof_sample = LG_PROF_SAMPLE_DEFAULT;
ssize_t opt_lg_prof_interval = LG_PROF_INTERVAL_DEFAULT;
@@ -537,7 +538,10 @@ prof_alloc_prep(size_t size)
void *vec[prof_bt_max];
prof_bt_t bt;
- if (opt_lg_prof_sample == 0) {
+ if (opt_prof_active == false) {
+ /* Sampling is currently inactive, so avoid sampling. */
+ ret = (prof_thr_cnt_t *)(uintptr_t)1U;
+ } else if (opt_lg_prof_sample == 0) {
/*
* Don't bother with sampling logic, since sampling interval is
* 1.