1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
|
#include "test/jemalloc_test.h"
#define NTHREADS 4
#define NALLOCS_PER_THREAD 50
#define DUMP_INTERVAL 1
#define BT_COUNT_CHECK_INTERVAL 5
#ifdef JEMALLOC_PROF
const char *malloc_conf =
"prof:true,prof_accum:true,prof_active:false,lg_prof_sample:0";
#endif
static int
prof_dump_open_intercept(bool propagate_err, const char *filename)
{
int fd;
fd = open("/dev/null", O_WRONLY);
assert_d_ne(fd, -1, "Unexpected open() failure");
return (fd);
}
#define alloc_n_proto(n) \
static void *alloc_##n(unsigned bits);
#define alloc_n_gen(n) \
static void * \
alloc_##n(unsigned bits) \
{ \
void *p; \
\
if (bits == 0) \
p = mallocx(1, 0); \
else { \
switch (bits & 0x1U) { \
case 0: \
p = alloc_0(bits >> 1); \
break; \
case 1: \
p = alloc_1(bits >> 1); \
break; \
default: not_reached(); \
} \
} \
/* Intentionally sabotage tail call optimization. */ \
assert_ptr_not_null(p, "Unexpected mallocx() failure"); \
return (p); \
}
alloc_n_proto(0)
alloc_n_proto(1)
alloc_n_gen(0)
alloc_n_gen(1)
static void *
alloc_from_permuted_backtrace(unsigned thd_ind, unsigned iteration)
{
return (alloc_0(thd_ind*NALLOCS_PER_THREAD + iteration));
}
static void *
thd_start(void *varg)
{
unsigned thd_ind = *(unsigned *)varg;
size_t bt_count_prev, bt_count;
unsigned i_prev, i;
i_prev = 0;
bt_count_prev = 0;
for (i = 0; i < NALLOCS_PER_THREAD; i++) {
void *p = alloc_from_permuted_backtrace(thd_ind, i);
dallocx(p, 0);
if (i % DUMP_INTERVAL == 0) {
assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
0, "Unexpected error while dumping heap profile");
}
if (i % BT_COUNT_CHECK_INTERVAL == 0 ||
i+1 == NALLOCS_PER_THREAD) {
bt_count = prof_bt_count();
assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
"Expected larger backtrace count increase");
i_prev = i;
bt_count_prev = bt_count;
}
}
return (NULL);
}
TEST_BEGIN(test_idump)
{
bool active;
thd_t thds[NTHREADS];
unsigned thd_args[NTHREADS];
unsigned i;
test_skip_if(!config_prof);
active = true;
assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
0, "Unexpected mallctl failure while activating profiling");
prof_dump_open = prof_dump_open_intercept;
for (i = 0; i < NTHREADS; i++) {
thd_args[i] = i;
thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
}
for (i = 0; i < NTHREADS; i++)
thd_join(thds[i], NULL);
}
TEST_END
int
main(void)
{
return (test(
test_idump));
}
|