Home | History | Annotate | Download | only in unit
      1 #include "test/jemalloc_test.h"
      2 
      3 #ifdef JEMALLOC_PROF
      4 const char *malloc_conf =
      5     "prof:true,prof_active:false,lg_prof_sample:0";
      6 #endif
      7 
      8 static int
      9 prof_dump_open_intercept(bool propagate_err, const char *filename)
     10 {
     11 	int fd;
     12 
     13 	fd = open("/dev/null", O_WRONLY);
     14 	assert_d_ne(fd, -1, "Unexpected open() failure");
     15 
     16 	return (fd);
     17 }
     18 
     19 static void
     20 set_prof_active(bool active)
     21 {
     22 
     23 	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
     24 	    0, "Unexpected mallctl failure");
     25 }
     26 
     27 static size_t
     28 get_lg_prof_sample(void)
     29 {
     30 	size_t lg_prof_sample;
     31 	size_t sz = sizeof(size_t);
     32 
     33 	assert_d_eq(mallctl("prof.lg_sample", &lg_prof_sample, &sz, NULL, 0), 0,
     34 	    "Unexpected mallctl failure while reading profiling sample rate");
     35 	return (lg_prof_sample);
     36 }
     37 
     38 static void
     39 do_prof_reset(size_t lg_prof_sample)
     40 {
     41 	assert_d_eq(mallctl("prof.reset", NULL, NULL,
     42 	    &lg_prof_sample, sizeof(size_t)), 0,
     43 	    "Unexpected mallctl failure while resetting profile data");
     44 	assert_zu_eq(lg_prof_sample, get_lg_prof_sample(),
     45 	    "Expected profile sample rate change");
     46 }
     47 
     48 TEST_BEGIN(test_prof_reset_basic)
     49 {
     50 	size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
     51 	size_t sz;
     52 	unsigned i;
     53 
     54 	test_skip_if(!config_prof);
     55 
     56 	sz = sizeof(size_t);
     57 	assert_d_eq(mallctl("opt.lg_prof_sample", &lg_prof_sample_orig, &sz,
     58 	    NULL, 0), 0,
     59 	    "Unexpected mallctl failure while reading profiling sample rate");
     60 	assert_zu_eq(lg_prof_sample_orig, 0,
     61 	    "Unexpected profiling sample rate");
     62 	lg_prof_sample = get_lg_prof_sample();
     63 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
     64 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
     65 	    "\"prof.lg_sample\"");
     66 
     67 	/* Test simple resets. */
     68 	for (i = 0; i < 2; i++) {
     69 		assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
     70 		    "Unexpected mallctl failure while resetting profile data");
     71 		lg_prof_sample = get_lg_prof_sample();
     72 		assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
     73 		    "Unexpected profile sample rate change");
     74 	}
     75 
     76 	/* Test resets with prof.lg_sample changes. */
     77 	lg_prof_sample_next = 1;
     78 	for (i = 0; i < 2; i++) {
     79 		do_prof_reset(lg_prof_sample_next);
     80 		lg_prof_sample = get_lg_prof_sample();
     81 		assert_zu_eq(lg_prof_sample, lg_prof_sample_next,
     82 		    "Expected profile sample rate change");
     83 		lg_prof_sample_next = lg_prof_sample_orig;
     84 	}
     85 
     86 	/* Make sure the test code restored prof.lg_sample. */
     87 	lg_prof_sample = get_lg_prof_sample();
     88 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
     89 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
     90 	    "\"prof.lg_sample\"");
     91 }
     92 TEST_END
     93 
     94 bool prof_dump_header_intercepted = false;
     95 prof_cnt_t cnt_all_copy = {0, 0, 0, 0};
     96 static bool
     97 prof_dump_header_intercept(bool propagate_err, const prof_cnt_t *cnt_all)
     98 {
     99 
    100 	prof_dump_header_intercepted = true;
    101 	memcpy(&cnt_all_copy, cnt_all, sizeof(prof_cnt_t));
    102 
    103 	return (false);
    104 }
    105 
    106 TEST_BEGIN(test_prof_reset_cleanup)
    107 {
    108 	void *p;
    109 	prof_dump_header_t *prof_dump_header_orig;
    110 
    111 	test_skip_if(!config_prof);
    112 
    113 	set_prof_active(true);
    114 
    115 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
    116 	p = mallocx(1, 0);
    117 	assert_ptr_not_null(p, "Unexpected mallocx() failure");
    118 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
    119 
    120 	prof_dump_header_orig = prof_dump_header;
    121 	prof_dump_header = prof_dump_header_intercept;
    122 	assert_false(prof_dump_header_intercepted, "Unexpected intercept");
    123 
    124 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
    125 	    0, "Unexpected error while dumping heap profile");
    126 	assert_true(prof_dump_header_intercepted, "Expected intercept");
    127 	assert_u64_eq(cnt_all_copy.curobjs, 1, "Expected 1 allocation");
    128 
    129 	assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
    130 	    "Unexpected error while resetting heap profile data");
    131 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
    132 	    0, "Unexpected error while dumping heap profile");
    133 	assert_u64_eq(cnt_all_copy.curobjs, 0, "Expected 0 allocations");
    134 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
    135 
    136 	prof_dump_header = prof_dump_header_orig;
    137 
    138 	dallocx(p, 0);
    139 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
    140 
    141 	set_prof_active(false);
    142 }
    143 TEST_END
    144 
    145 #define	NTHREADS		4
    146 #define	NALLOCS_PER_THREAD	(1U << 13)
    147 #define	OBJ_RING_BUF_COUNT	1531
    148 #define	RESET_INTERVAL		(1U << 10)
    149 #define	DUMP_INTERVAL		3677
    150 static void *
    151 thd_start(void *varg)
    152 {
    153 	unsigned thd_ind = *(unsigned *)varg;
    154 	unsigned i;
    155 	void *objs[OBJ_RING_BUF_COUNT];
    156 
    157 	memset(objs, 0, sizeof(objs));
    158 
    159 	for (i = 0; i < NALLOCS_PER_THREAD; i++) {
    160 		if (i % RESET_INTERVAL == 0) {
    161 			assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0),
    162 			    0, "Unexpected error while resetting heap profile "
    163 			    "data");
    164 		}
    165 
    166 		if (i % DUMP_INTERVAL == 0) {
    167 			assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
    168 			    0, "Unexpected error while dumping heap profile");
    169 		}
    170 
    171 		{
    172 			void **pp = &objs[i % OBJ_RING_BUF_COUNT];
    173 			if (*pp != NULL) {
    174 				dallocx(*pp, 0);
    175 				*pp = NULL;
    176 			}
    177 			*pp = btalloc(1, thd_ind*NALLOCS_PER_THREAD + i);
    178 			assert_ptr_not_null(*pp,
    179 			    "Unexpected btalloc() failure");
    180 		}
    181 	}
    182 
    183 	/* Clean up any remaining objects. */
    184 	for (i = 0; i < OBJ_RING_BUF_COUNT; i++) {
    185 		void **pp = &objs[i % OBJ_RING_BUF_COUNT];
    186 		if (*pp != NULL) {
    187 			dallocx(*pp, 0);
    188 			*pp = NULL;
    189 		}
    190 	}
    191 
    192 	return (NULL);
    193 }
    194 
    195 TEST_BEGIN(test_prof_reset)
    196 {
    197 	size_t lg_prof_sample_orig;
    198 	thd_t thds[NTHREADS];
    199 	unsigned thd_args[NTHREADS];
    200 	unsigned i;
    201 	size_t bt_count, tdata_count;
    202 
    203 	test_skip_if(!config_prof);
    204 
    205 	bt_count = prof_bt_count();
    206 	assert_zu_eq(bt_count, 0,
    207 	    "Unexpected pre-existing tdata structures");
    208 	tdata_count = prof_tdata_count();
    209 
    210 	lg_prof_sample_orig = get_lg_prof_sample();
    211 	do_prof_reset(5);
    212 
    213 	set_prof_active(true);
    214 
    215 	for (i = 0; i < NTHREADS; i++) {
    216 		thd_args[i] = i;
    217 		thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
    218 	}
    219 	for (i = 0; i < NTHREADS; i++)
    220 		thd_join(thds[i], NULL);
    221 
    222 	assert_zu_eq(prof_bt_count(), bt_count,
    223 	    "Unexpected bactrace count change");
    224 	assert_zu_eq(prof_tdata_count(), tdata_count,
    225 	    "Unexpected remaining tdata structures");
    226 
    227 	set_prof_active(false);
    228 
    229 	do_prof_reset(lg_prof_sample_orig);
    230 }
    231 TEST_END
    232 #undef NTHREADS
    233 #undef NALLOCS_PER_THREAD
    234 #undef OBJ_RING_BUF_COUNT
    235 #undef RESET_INTERVAL
    236 #undef DUMP_INTERVAL
    237 
    238 /* Test sampling at the same allocation site across resets. */
    239 #define	NITER 10
    240 TEST_BEGIN(test_xallocx)
    241 {
    242 	size_t lg_prof_sample_orig;
    243 	unsigned i;
    244 	void *ptrs[NITER];
    245 
    246 	test_skip_if(!config_prof);
    247 
    248 	lg_prof_sample_orig = get_lg_prof_sample();
    249 	set_prof_active(true);
    250 
    251 	/* Reset profiling. */
    252 	do_prof_reset(0);
    253 
    254 	for (i = 0; i < NITER; i++) {
    255 		void *p;
    256 		size_t sz, nsz;
    257 
    258 		/* Reset profiling. */
    259 		do_prof_reset(0);
    260 
    261 		/* Allocate small object (which will be promoted). */
    262 		p = ptrs[i] = mallocx(1, 0);
    263 		assert_ptr_not_null(p, "Unexpected mallocx() failure");
    264 
    265 		/* Reset profiling. */
    266 		do_prof_reset(0);
    267 
    268 		/* Perform successful xallocx(). */
    269 		sz = sallocx(p, 0);
    270 		assert_zu_eq(xallocx(p, sz, 0, 0), sz,
    271 		    "Unexpected xallocx() failure");
    272 
    273 		/* Perform unsuccessful xallocx(). */
    274 		nsz = nallocx(sz+1, 0);
    275 		assert_zu_eq(xallocx(p, nsz, 0, 0), sz,
    276 		    "Unexpected xallocx() success");
    277 	}
    278 
    279 	for (i = 0; i < NITER; i++) {
    280 		/* dallocx. */
    281 		dallocx(ptrs[i], 0);
    282 	}
    283 
    284 	set_prof_active(false);
    285 	do_prof_reset(lg_prof_sample_orig);
    286 }
    287 TEST_END
    288 #undef NITER
    289 
    290 int
    291 main(void)
    292 {
    293 
    294 	/* Intercept dumping prior to running any tests. */
    295 	prof_dump_open = prof_dump_open_intercept;
    296 
    297 	return (test(
    298 	    test_prof_reset_basic,
    299 	    test_prof_reset_cleanup,
    300 	    test_prof_reset,
    301 	    test_xallocx));
    302 }
    303