summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2014-01-21 22:59:40 (GMT)
committerJason Evans <jasone@canonware.com>2014-01-21 22:59:40 (GMT)
commit898960247a8b2e6534738b7a3a244855f379faf9 (patch)
tree015c344cb5cabbf7362707f5600d37563425c2ed
parente2206edebcce662e3979c68ba182d0bcf38141ba (diff)
downloadjemalloc-898960247a8b2e6534738b7a3a244855f379faf9.zip
jemalloc-898960247a8b2e6534738b7a3a244855f379faf9.tar.gz
jemalloc-898960247a8b2e6534738b7a3a244855f379faf9.tar.bz2
Subvert tail call optimization in backtrace test.
Re-structure alloc_[01](), which are mutually tail-recursive functions, to do (unnecessary) work post-recursion so that the compiler cannot perform tail call optimization, thus preserving intentionally unique call paths in captured backtraces.
-rw-r--r--test/unit/prof_accum.c28
1 files changed, 17 insertions, 11 deletions
diff --git a/test/unit/prof_accum.c b/test/unit/prof_accum.c
index b5f1c8d..cf3f287 100644
--- a/test/unit/prof_accum.c
+++ b/test/unit/prof_accum.c
@@ -28,18 +28,24 @@ static void *alloc_##n(unsigned bits);
static void * \
alloc_##n(unsigned bits) \
{ \
+ void *p; \
\
- if (bits == 0) { \
- void *p = mallocx(1, 0); \
- assert_ptr_not_null(p, "Unexpected mallocx() failure"); \
- return (p); \
- } \
- \
- switch (bits & 0x1U) { \
- case 0: return (alloc_0(bits >> 1)); \
- case 1: return (alloc_1(bits >> 1)); \
- default: not_reached(); \
+ if (bits == 0) \
+ p = mallocx(1, 0); \
+ else { \
+ switch (bits & 0x1U) { \
+ case 0: \
+ p = alloc_0(bits >> 1); \
+ break; \
+ case 1: \
+ p = alloc_1(bits >> 1); \
+ break; \
+ default: not_reached(); \
+ } \
} \
+ /* Intentionally sabotage tail call optimization. */ \
+ assert_ptr_not_null(p, "Unexpected mallocx() failure"); \
+ return (p); \
}
alloc_n_proto(0)
alloc_n_proto(1)
@@ -74,7 +80,7 @@ thd_start(void *varg)
i+1 == NALLOCS_PER_THREAD) {
bt_count = prof_bt_count();
assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
- "Expected larger bactrace count increase");
+ "Expected larger backtrace count increase");
i_prev = i;
bt_count_prev = bt_count;
}