Home | History | Annotate | Download | only in src
      1 #define	JEMALLOC_CTL_C_
      2 #include "jemalloc/internal/jemalloc_internal.h"
      3 
      4 /******************************************************************************/
      5 /* Data. */
      6 
      7 /*
      8  * ctl_mtx protects the following:
      9  * - ctl_stats.*
     10  */
     11 static malloc_mutex_t	ctl_mtx;
     12 static bool		ctl_initialized;
     13 static uint64_t		ctl_epoch;
     14 static ctl_stats_t	ctl_stats;
     15 
     16 /******************************************************************************/
     17 /* Helpers for named and indexed nodes. */
     18 
     19 JEMALLOC_INLINE_C const ctl_named_node_t *
     20 ctl_named_node(const ctl_node_t *node)
     21 {
     22 
     23 	return ((node->named) ? (const ctl_named_node_t *)node : NULL);
     24 }
     25 
     26 JEMALLOC_INLINE_C const ctl_named_node_t *
     27 ctl_named_children(const ctl_named_node_t *node, size_t index)
     28 {
     29 	const ctl_named_node_t *children = ctl_named_node(node->children);
     30 
     31 	return (children ? &children[index] : NULL);
     32 }
     33 
     34 JEMALLOC_INLINE_C const ctl_indexed_node_t *
     35 ctl_indexed_node(const ctl_node_t *node)
     36 {
     37 
     38 	return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
     39 }
     40 
     41 /******************************************************************************/
     42 /* Function prototypes for non-inline static functions. */
     43 
     44 #define	CTL_PROTO(n)							\
     45 static int	n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,	\
     46     void *oldp, size_t *oldlenp, void *newp, size_t newlen);
     47 
     48 #define	INDEX_PROTO(n)							\
     49 static const ctl_named_node_t	*n##_index(tsdn_t *tsdn,		\
     50     const size_t *mib, size_t miblen, size_t i);
     51 
     52 static bool	ctl_arena_init(ctl_arena_stats_t *astats);
     53 static void	ctl_arena_clear(ctl_arena_stats_t *astats);
     54 static void	ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats,
     55     arena_t *arena);
     56 static void	ctl_arena_stats_smerge(ctl_arena_stats_t *sstats,
     57     ctl_arena_stats_t *astats);
     58 static void	ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i);
     59 static bool	ctl_grow(tsdn_t *tsdn);
     60 static void	ctl_refresh(tsdn_t *tsdn);
     61 static bool	ctl_init(tsdn_t *tsdn);
     62 static int	ctl_lookup(tsdn_t *tsdn, const char *name,
     63     ctl_node_t const **nodesp, size_t *mibp, size_t *depthp);
     64 
     65 CTL_PROTO(version)
     66 CTL_PROTO(epoch)
     67 CTL_PROTO(thread_tcache_enabled)
     68 CTL_PROTO(thread_tcache_flush)
     69 CTL_PROTO(thread_prof_name)
     70 CTL_PROTO(thread_prof_active)
     71 CTL_PROTO(thread_arena)
     72 CTL_PROTO(thread_allocated)
     73 CTL_PROTO(thread_allocatedp)
     74 CTL_PROTO(thread_deallocated)
     75 CTL_PROTO(thread_deallocatedp)
     76 CTL_PROTO(config_cache_oblivious)
     77 CTL_PROTO(config_debug)
     78 CTL_PROTO(config_fill)
     79 CTL_PROTO(config_lazy_lock)
     80 CTL_PROTO(config_malloc_conf)
     81 CTL_PROTO(config_munmap)
     82 CTL_PROTO(config_prof)
     83 CTL_PROTO(config_prof_libgcc)
     84 CTL_PROTO(config_prof_libunwind)
     85 CTL_PROTO(config_stats)
     86 CTL_PROTO(config_tcache)
     87 CTL_PROTO(config_tls)
     88 CTL_PROTO(config_utrace)
     89 CTL_PROTO(config_valgrind)
     90 CTL_PROTO(config_xmalloc)
     91 CTL_PROTO(opt_abort)
     92 CTL_PROTO(opt_dss)
     93 CTL_PROTO(opt_lg_chunk)
     94 CTL_PROTO(opt_narenas)
     95 CTL_PROTO(opt_purge)
     96 CTL_PROTO(opt_lg_dirty_mult)
     97 CTL_PROTO(opt_decay_time)
     98 CTL_PROTO(opt_stats_print)
     99 CTL_PROTO(opt_junk)
    100 CTL_PROTO(opt_zero)
    101 CTL_PROTO(opt_quarantine)
    102 CTL_PROTO(opt_redzone)
    103 CTL_PROTO(opt_utrace)
    104 CTL_PROTO(opt_xmalloc)
    105 CTL_PROTO(opt_tcache)
    106 CTL_PROTO(opt_lg_tcache_max)
    107 CTL_PROTO(opt_prof)
    108 CTL_PROTO(opt_prof_prefix)
    109 CTL_PROTO(opt_prof_active)
    110 CTL_PROTO(opt_prof_thread_active_init)
    111 CTL_PROTO(opt_lg_prof_sample)
    112 CTL_PROTO(opt_lg_prof_interval)
    113 CTL_PROTO(opt_prof_gdump)
    114 CTL_PROTO(opt_prof_final)
    115 CTL_PROTO(opt_prof_leak)
    116 CTL_PROTO(opt_prof_accum)
    117 CTL_PROTO(tcache_create)
    118 CTL_PROTO(tcache_flush)
    119 CTL_PROTO(tcache_destroy)
    120 static void	arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all);
    121 CTL_PROTO(arena_i_purge)
    122 CTL_PROTO(arena_i_decay)
    123 CTL_PROTO(arena_i_reset)
    124 CTL_PROTO(arena_i_dss)
    125 CTL_PROTO(arena_i_lg_dirty_mult)
    126 CTL_PROTO(arena_i_decay_time)
    127 CTL_PROTO(arena_i_chunk_hooks)
    128 INDEX_PROTO(arena_i)
    129 CTL_PROTO(arenas_bin_i_size)
    130 CTL_PROTO(arenas_bin_i_nregs)
    131 CTL_PROTO(arenas_bin_i_run_size)
    132 INDEX_PROTO(arenas_bin_i)
    133 CTL_PROTO(arenas_lrun_i_size)
    134 INDEX_PROTO(arenas_lrun_i)
    135 CTL_PROTO(arenas_hchunk_i_size)
    136 INDEX_PROTO(arenas_hchunk_i)
    137 CTL_PROTO(arenas_narenas)
    138 CTL_PROTO(arenas_initialized)
    139 CTL_PROTO(arenas_lg_dirty_mult)
    140 CTL_PROTO(arenas_decay_time)
    141 CTL_PROTO(arenas_quantum)
    142 CTL_PROTO(arenas_page)
    143 CTL_PROTO(arenas_tcache_max)
    144 CTL_PROTO(arenas_nbins)
    145 CTL_PROTO(arenas_nhbins)
    146 CTL_PROTO(arenas_nlruns)
    147 CTL_PROTO(arenas_nhchunks)
    148 CTL_PROTO(arenas_extend)
    149 CTL_PROTO(prof_thread_active_init)
    150 CTL_PROTO(prof_active)
    151 CTL_PROTO(prof_dump)
    152 CTL_PROTO(prof_gdump)
    153 CTL_PROTO(prof_reset)
    154 CTL_PROTO(prof_interval)
    155 CTL_PROTO(lg_prof_sample)
    156 CTL_PROTO(stats_arenas_i_small_allocated)
    157 CTL_PROTO(stats_arenas_i_small_nmalloc)
    158 CTL_PROTO(stats_arenas_i_small_ndalloc)
    159 CTL_PROTO(stats_arenas_i_small_nrequests)
    160 CTL_PROTO(stats_arenas_i_large_allocated)
    161 CTL_PROTO(stats_arenas_i_large_nmalloc)
    162 CTL_PROTO(stats_arenas_i_large_ndalloc)
    163 CTL_PROTO(stats_arenas_i_large_nrequests)
    164 CTL_PROTO(stats_arenas_i_huge_allocated)
    165 CTL_PROTO(stats_arenas_i_huge_nmalloc)
    166 CTL_PROTO(stats_arenas_i_huge_ndalloc)
    167 CTL_PROTO(stats_arenas_i_huge_nrequests)
    168 CTL_PROTO(stats_arenas_i_bins_j_nmalloc)
    169 CTL_PROTO(stats_arenas_i_bins_j_ndalloc)
    170 CTL_PROTO(stats_arenas_i_bins_j_nrequests)
    171 CTL_PROTO(stats_arenas_i_bins_j_curregs)
    172 CTL_PROTO(stats_arenas_i_bins_j_nfills)
    173 CTL_PROTO(stats_arenas_i_bins_j_nflushes)
    174 CTL_PROTO(stats_arenas_i_bins_j_nruns)
    175 CTL_PROTO(stats_arenas_i_bins_j_nreruns)
    176 CTL_PROTO(stats_arenas_i_bins_j_curruns)
    177 INDEX_PROTO(stats_arenas_i_bins_j)
    178 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc)
    179 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc)
    180 CTL_PROTO(stats_arenas_i_lruns_j_nrequests)
    181 CTL_PROTO(stats_arenas_i_lruns_j_curruns)
    182 INDEX_PROTO(stats_arenas_i_lruns_j)
    183 CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc)
    184 CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc)
    185 CTL_PROTO(stats_arenas_i_hchunks_j_nrequests)
    186 CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks)
    187 INDEX_PROTO(stats_arenas_i_hchunks_j)
    188 CTL_PROTO(stats_arenas_i_nthreads)
    189 CTL_PROTO(stats_arenas_i_dss)
    190 CTL_PROTO(stats_arenas_i_lg_dirty_mult)
    191 CTL_PROTO(stats_arenas_i_decay_time)
    192 CTL_PROTO(stats_arenas_i_pactive)
    193 CTL_PROTO(stats_arenas_i_pdirty)
    194 CTL_PROTO(stats_arenas_i_mapped)
    195 CTL_PROTO(stats_arenas_i_retained)
    196 CTL_PROTO(stats_arenas_i_npurge)
    197 CTL_PROTO(stats_arenas_i_nmadvise)
    198 CTL_PROTO(stats_arenas_i_purged)
    199 CTL_PROTO(stats_arenas_i_metadata_mapped)
    200 CTL_PROTO(stats_arenas_i_metadata_allocated)
    201 INDEX_PROTO(stats_arenas_i)
    202 CTL_PROTO(stats_cactive)
    203 CTL_PROTO(stats_allocated)
    204 CTL_PROTO(stats_active)
    205 CTL_PROTO(stats_metadata)
    206 CTL_PROTO(stats_resident)
    207 CTL_PROTO(stats_mapped)
    208 CTL_PROTO(stats_retained)
    209 
    210 /******************************************************************************/
    211 /* mallctl tree. */
    212 
    213 /* Maximum tree depth. */
    214 #define	CTL_MAX_DEPTH	6
    215 
    216 #define	NAME(n)	{true},	n
    217 #define	CHILD(t, c)							\
    218 	sizeof(c##_node) / sizeof(ctl_##t##_node_t),			\
    219 	(ctl_node_t *)c##_node,						\
    220 	NULL
    221 #define	CTL(c)	0, NULL, c##_ctl
    222 
    223 /*
    224  * Only handles internal indexed nodes, since there are currently no external
    225  * ones.
    226  */
    227 #define	INDEX(i)	{false},	i##_index
    228 
    229 static const ctl_named_node_t	thread_tcache_node[] = {
    230 	{NAME("enabled"),	CTL(thread_tcache_enabled)},
    231 	{NAME("flush"),		CTL(thread_tcache_flush)}
    232 };
    233 
    234 static const ctl_named_node_t	thread_prof_node[] = {
    235 	{NAME("name"),		CTL(thread_prof_name)},
    236 	{NAME("active"),	CTL(thread_prof_active)}
    237 };
    238 
    239 static const ctl_named_node_t	thread_node[] = {
    240 	{NAME("arena"),		CTL(thread_arena)},
    241 	{NAME("allocated"),	CTL(thread_allocated)},
    242 	{NAME("allocatedp"),	CTL(thread_allocatedp)},
    243 	{NAME("deallocated"),	CTL(thread_deallocated)},
    244 	{NAME("deallocatedp"),	CTL(thread_deallocatedp)},
    245 	{NAME("tcache"),	CHILD(named, thread_tcache)},
    246 	{NAME("prof"),		CHILD(named, thread_prof)}
    247 };
    248 
    249 static const ctl_named_node_t	config_node[] = {
    250 	{NAME("cache_oblivious"), CTL(config_cache_oblivious)},
    251 	{NAME("debug"),		CTL(config_debug)},
    252 	{NAME("fill"),		CTL(config_fill)},
    253 	{NAME("lazy_lock"),	CTL(config_lazy_lock)},
    254 	{NAME("malloc_conf"),	CTL(config_malloc_conf)},
    255 	{NAME("munmap"),	CTL(config_munmap)},
    256 	{NAME("prof"),		CTL(config_prof)},
    257 	{NAME("prof_libgcc"),	CTL(config_prof_libgcc)},
    258 	{NAME("prof_libunwind"), CTL(config_prof_libunwind)},
    259 	{NAME("stats"),		CTL(config_stats)},
    260 	{NAME("tcache"),	CTL(config_tcache)},
    261 	{NAME("tls"),		CTL(config_tls)},
    262 	{NAME("utrace"),	CTL(config_utrace)},
    263 	{NAME("valgrind"),	CTL(config_valgrind)},
    264 	{NAME("xmalloc"),	CTL(config_xmalloc)}
    265 };
    266 
    267 static const ctl_named_node_t opt_node[] = {
    268 	{NAME("abort"),		CTL(opt_abort)},
    269 	{NAME("dss"),		CTL(opt_dss)},
    270 	{NAME("lg_chunk"),	CTL(opt_lg_chunk)},
    271 	{NAME("narenas"),	CTL(opt_narenas)},
    272 	{NAME("purge"),		CTL(opt_purge)},
    273 	{NAME("lg_dirty_mult"),	CTL(opt_lg_dirty_mult)},
    274 	{NAME("decay_time"),	CTL(opt_decay_time)},
    275 	{NAME("stats_print"),	CTL(opt_stats_print)},
    276 	{NAME("junk"),		CTL(opt_junk)},
    277 	{NAME("zero"),		CTL(opt_zero)},
    278 	{NAME("quarantine"),	CTL(opt_quarantine)},
    279 	{NAME("redzone"),	CTL(opt_redzone)},
    280 	{NAME("utrace"),	CTL(opt_utrace)},
    281 	{NAME("xmalloc"),	CTL(opt_xmalloc)},
    282 	{NAME("tcache"),	CTL(opt_tcache)},
    283 	{NAME("lg_tcache_max"),	CTL(opt_lg_tcache_max)},
    284 	{NAME("prof"),		CTL(opt_prof)},
    285 	{NAME("prof_prefix"),	CTL(opt_prof_prefix)},
    286 	{NAME("prof_active"),	CTL(opt_prof_active)},
    287 	{NAME("prof_thread_active_init"), CTL(opt_prof_thread_active_init)},
    288 	{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
    289 	{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
    290 	{NAME("prof_gdump"),	CTL(opt_prof_gdump)},
    291 	{NAME("prof_final"),	CTL(opt_prof_final)},
    292 	{NAME("prof_leak"),	CTL(opt_prof_leak)},
    293 	{NAME("prof_accum"),	CTL(opt_prof_accum)}
    294 };
    295 
    296 static const ctl_named_node_t	tcache_node[] = {
    297 	{NAME("create"),	CTL(tcache_create)},
    298 	{NAME("flush"),		CTL(tcache_flush)},
    299 	{NAME("destroy"),	CTL(tcache_destroy)}
    300 };
    301 
    302 static const ctl_named_node_t arena_i_node[] = {
    303 	{NAME("purge"),		CTL(arena_i_purge)},
    304 	{NAME("decay"),		CTL(arena_i_decay)},
    305 	{NAME("reset"),		CTL(arena_i_reset)},
    306 	{NAME("dss"),		CTL(arena_i_dss)},
    307 	{NAME("lg_dirty_mult"),	CTL(arena_i_lg_dirty_mult)},
    308 	{NAME("decay_time"),	CTL(arena_i_decay_time)},
    309 	{NAME("chunk_hooks"),	CTL(arena_i_chunk_hooks)}
    310 };
    311 static const ctl_named_node_t super_arena_i_node[] = {
    312 	{NAME(""),		CHILD(named, arena_i)}
    313 };
    314 
    315 static const ctl_indexed_node_t arena_node[] = {
    316 	{INDEX(arena_i)}
    317 };
    318 
    319 static const ctl_named_node_t arenas_bin_i_node[] = {
    320 	{NAME("size"),		CTL(arenas_bin_i_size)},
    321 	{NAME("nregs"),		CTL(arenas_bin_i_nregs)},
    322 	{NAME("run_size"),	CTL(arenas_bin_i_run_size)}
    323 };
    324 static const ctl_named_node_t super_arenas_bin_i_node[] = {
    325 	{NAME(""),		CHILD(named, arenas_bin_i)}
    326 };
    327 
    328 static const ctl_indexed_node_t arenas_bin_node[] = {
    329 	{INDEX(arenas_bin_i)}
    330 };
    331 
    332 static const ctl_named_node_t arenas_lrun_i_node[] = {
    333 	{NAME("size"),		CTL(arenas_lrun_i_size)}
    334 };
    335 static const ctl_named_node_t super_arenas_lrun_i_node[] = {
    336 	{NAME(""),		CHILD(named, arenas_lrun_i)}
    337 };
    338 
    339 static const ctl_indexed_node_t arenas_lrun_node[] = {
    340 	{INDEX(arenas_lrun_i)}
    341 };
    342 
    343 static const ctl_named_node_t arenas_hchunk_i_node[] = {
    344 	{NAME("size"),		CTL(arenas_hchunk_i_size)}
    345 };
    346 static const ctl_named_node_t super_arenas_hchunk_i_node[] = {
    347 	{NAME(""),		CHILD(named, arenas_hchunk_i)}
    348 };
    349 
    350 static const ctl_indexed_node_t arenas_hchunk_node[] = {
    351 	{INDEX(arenas_hchunk_i)}
    352 };
    353 
    354 static const ctl_named_node_t arenas_node[] = {
    355 	{NAME("narenas"),	CTL(arenas_narenas)},
    356 	{NAME("initialized"),	CTL(arenas_initialized)},
    357 	{NAME("lg_dirty_mult"),	CTL(arenas_lg_dirty_mult)},
    358 	{NAME("decay_time"),	CTL(arenas_decay_time)},
    359 	{NAME("quantum"),	CTL(arenas_quantum)},
    360 	{NAME("page"),		CTL(arenas_page)},
    361 	{NAME("tcache_max"),	CTL(arenas_tcache_max)},
    362 	{NAME("nbins"),		CTL(arenas_nbins)},
    363 	{NAME("nhbins"),	CTL(arenas_nhbins)},
    364 	{NAME("bin"),		CHILD(indexed, arenas_bin)},
    365 	{NAME("nlruns"),	CTL(arenas_nlruns)},
    366 	{NAME("lrun"),		CHILD(indexed, arenas_lrun)},
    367 	{NAME("nhchunks"),	CTL(arenas_nhchunks)},
    368 	{NAME("hchunk"),	CHILD(indexed, arenas_hchunk)},
    369 	{NAME("extend"),	CTL(arenas_extend)}
    370 };
    371 
    372 static const ctl_named_node_t	prof_node[] = {
    373 	{NAME("thread_active_init"), CTL(prof_thread_active_init)},
    374 	{NAME("active"),	CTL(prof_active)},
    375 	{NAME("dump"),		CTL(prof_dump)},
    376 	{NAME("gdump"),		CTL(prof_gdump)},
    377 	{NAME("reset"),		CTL(prof_reset)},
    378 	{NAME("interval"),	CTL(prof_interval)},
    379 	{NAME("lg_sample"),	CTL(lg_prof_sample)}
    380 };
    381 
    382 static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
    383 	{NAME("mapped"),	CTL(stats_arenas_i_metadata_mapped)},
    384 	{NAME("allocated"),	CTL(stats_arenas_i_metadata_allocated)}
    385 };
    386 
    387 static const ctl_named_node_t stats_arenas_i_small_node[] = {
    388 	{NAME("allocated"),	CTL(stats_arenas_i_small_allocated)},
    389 	{NAME("nmalloc"),	CTL(stats_arenas_i_small_nmalloc)},
    390 	{NAME("ndalloc"),	CTL(stats_arenas_i_small_ndalloc)},
    391 	{NAME("nrequests"),	CTL(stats_arenas_i_small_nrequests)}
    392 };
    393 
    394 static const ctl_named_node_t stats_arenas_i_large_node[] = {
    395 	{NAME("allocated"),	CTL(stats_arenas_i_large_allocated)},
    396 	{NAME("nmalloc"),	CTL(stats_arenas_i_large_nmalloc)},
    397 	{NAME("ndalloc"),	CTL(stats_arenas_i_large_ndalloc)},
    398 	{NAME("nrequests"),	CTL(stats_arenas_i_large_nrequests)}
    399 };
    400 
    401 static const ctl_named_node_t stats_arenas_i_huge_node[] = {
    402 	{NAME("allocated"),	CTL(stats_arenas_i_huge_allocated)},
    403 	{NAME("nmalloc"),	CTL(stats_arenas_i_huge_nmalloc)},
    404 	{NAME("ndalloc"),	CTL(stats_arenas_i_huge_ndalloc)},
    405 	{NAME("nrequests"),	CTL(stats_arenas_i_huge_nrequests)}
    406 };
    407 
    408 static const ctl_named_node_t stats_arenas_i_bins_j_node[] = {
    409 	{NAME("nmalloc"),	CTL(stats_arenas_i_bins_j_nmalloc)},
    410 	{NAME("ndalloc"),	CTL(stats_arenas_i_bins_j_ndalloc)},
    411 	{NAME("nrequests"),	CTL(stats_arenas_i_bins_j_nrequests)},
    412 	{NAME("curregs"),	CTL(stats_arenas_i_bins_j_curregs)},
    413 	{NAME("nfills"),	CTL(stats_arenas_i_bins_j_nfills)},
    414 	{NAME("nflushes"),	CTL(stats_arenas_i_bins_j_nflushes)},
    415 	{NAME("nruns"),		CTL(stats_arenas_i_bins_j_nruns)},
    416 	{NAME("nreruns"),	CTL(stats_arenas_i_bins_j_nreruns)},
    417 	{NAME("curruns"),	CTL(stats_arenas_i_bins_j_curruns)}
    418 };
    419 static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = {
    420 	{NAME(""),		CHILD(named, stats_arenas_i_bins_j)}
    421 };
    422 
    423 static const ctl_indexed_node_t stats_arenas_i_bins_node[] = {
    424 	{INDEX(stats_arenas_i_bins_j)}
    425 };
    426 
    427 static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = {
    428 	{NAME("nmalloc"),	CTL(stats_arenas_i_lruns_j_nmalloc)},
    429 	{NAME("ndalloc"),	CTL(stats_arenas_i_lruns_j_ndalloc)},
    430 	{NAME("nrequests"),	CTL(stats_arenas_i_lruns_j_nrequests)},
    431 	{NAME("curruns"),	CTL(stats_arenas_i_lruns_j_curruns)}
    432 };
    433 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = {
    434 	{NAME(""),		CHILD(named, stats_arenas_i_lruns_j)}
    435 };
    436 
    437 static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = {
    438 	{INDEX(stats_arenas_i_lruns_j)}
    439 };
    440 
    441 static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = {
    442 	{NAME("nmalloc"),	CTL(stats_arenas_i_hchunks_j_nmalloc)},
    443 	{NAME("ndalloc"),	CTL(stats_arenas_i_hchunks_j_ndalloc)},
    444 	{NAME("nrequests"),	CTL(stats_arenas_i_hchunks_j_nrequests)},
    445 	{NAME("curhchunks"),	CTL(stats_arenas_i_hchunks_j_curhchunks)}
    446 };
    447 static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = {
    448 	{NAME(""),		CHILD(named, stats_arenas_i_hchunks_j)}
    449 };
    450 
    451 static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = {
    452 	{INDEX(stats_arenas_i_hchunks_j)}
    453 };
    454 
    455 static const ctl_named_node_t stats_arenas_i_node[] = {
    456 	{NAME("nthreads"),	CTL(stats_arenas_i_nthreads)},
    457 	{NAME("dss"),		CTL(stats_arenas_i_dss)},
    458 	{NAME("lg_dirty_mult"),	CTL(stats_arenas_i_lg_dirty_mult)},
    459 	{NAME("decay_time"),	CTL(stats_arenas_i_decay_time)},
    460 	{NAME("pactive"),	CTL(stats_arenas_i_pactive)},
    461 	{NAME("pdirty"),	CTL(stats_arenas_i_pdirty)},
    462 	{NAME("mapped"),	CTL(stats_arenas_i_mapped)},
    463 	{NAME("retained"),	CTL(stats_arenas_i_retained)},
    464 	{NAME("npurge"),	CTL(stats_arenas_i_npurge)},
    465 	{NAME("nmadvise"),	CTL(stats_arenas_i_nmadvise)},
    466 	{NAME("purged"),	CTL(stats_arenas_i_purged)},
    467 	{NAME("metadata"),	CHILD(named, stats_arenas_i_metadata)},
    468 	{NAME("small"),		CHILD(named, stats_arenas_i_small)},
    469 	{NAME("large"),		CHILD(named, stats_arenas_i_large)},
    470 	{NAME("huge"),		CHILD(named, stats_arenas_i_huge)},
    471 	{NAME("bins"),		CHILD(indexed, stats_arenas_i_bins)},
    472 	{NAME("lruns"),		CHILD(indexed, stats_arenas_i_lruns)},
    473 	{NAME("hchunks"),	CHILD(indexed, stats_arenas_i_hchunks)}
    474 };
    475 static const ctl_named_node_t super_stats_arenas_i_node[] = {
    476 	{NAME(""),		CHILD(named, stats_arenas_i)}
    477 };
    478 
    479 static const ctl_indexed_node_t stats_arenas_node[] = {
    480 	{INDEX(stats_arenas_i)}
    481 };
    482 
    483 static const ctl_named_node_t stats_node[] = {
    484 	{NAME("cactive"),	CTL(stats_cactive)},
    485 	{NAME("allocated"),	CTL(stats_allocated)},
    486 	{NAME("active"),	CTL(stats_active)},
    487 	{NAME("metadata"),	CTL(stats_metadata)},
    488 	{NAME("resident"),	CTL(stats_resident)},
    489 	{NAME("mapped"),	CTL(stats_mapped)},
    490 	{NAME("retained"),	CTL(stats_retained)},
    491 	{NAME("arenas"),	CHILD(indexed, stats_arenas)}
    492 };
    493 
    494 static const ctl_named_node_t	root_node[] = {
    495 	{NAME("version"),	CTL(version)},
    496 	{NAME("epoch"),		CTL(epoch)},
    497 	{NAME("thread"),	CHILD(named, thread)},
    498 	{NAME("config"),	CHILD(named, config)},
    499 	{NAME("opt"),		CHILD(named, opt)},
    500 	{NAME("tcache"),	CHILD(named, tcache)},
    501 	{NAME("arena"),		CHILD(indexed, arena)},
    502 	{NAME("arenas"),	CHILD(named, arenas)},
    503 	{NAME("prof"),		CHILD(named, prof)},
    504 	{NAME("stats"),		CHILD(named, stats)}
    505 };
    506 static const ctl_named_node_t super_root_node[] = {
    507 	{NAME(""),		CHILD(named, root)}
    508 };
    509 
    510 #undef NAME
    511 #undef CHILD
    512 #undef CTL
    513 #undef INDEX
    514 
    515 /******************************************************************************/
    516 
    517 static bool
    518 ctl_arena_init(ctl_arena_stats_t *astats)
    519 {
    520 
    521 	if (astats->lstats == NULL) {
    522 		astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses *
    523 		    sizeof(malloc_large_stats_t));
    524 		if (astats->lstats == NULL)
    525 			return (true);
    526 	}
    527 
    528 	if (astats->hstats == NULL) {
    529 		astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses *
    530 		    sizeof(malloc_huge_stats_t));
    531 		if (astats->hstats == NULL)
    532 			return (true);
    533 	}
    534 
    535 	return (false);
    536 }
    537 
    538 static void
    539 ctl_arena_clear(ctl_arena_stats_t *astats)
    540 {
    541 
    542 	astats->nthreads = 0;
    543 	astats->dss = dss_prec_names[dss_prec_limit];
    544 	astats->lg_dirty_mult = -1;
    545 	astats->decay_time = -1;
    546 	astats->pactive = 0;
    547 	astats->pdirty = 0;
    548 	if (config_stats) {
    549 		memset(&astats->astats, 0, sizeof(arena_stats_t));
    550 		astats->allocated_small = 0;
    551 		astats->nmalloc_small = 0;
    552 		astats->ndalloc_small = 0;
    553 		astats->nrequests_small = 0;
    554 		memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t));
    555 		memset(astats->lstats, 0, nlclasses *
    556 		    sizeof(malloc_large_stats_t));
    557 		memset(astats->hstats, 0, nhclasses *
    558 		    sizeof(malloc_huge_stats_t));
    559 	}
    560 }
    561 
    562 static void
    563 ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats, arena_t *arena)
    564 {
    565 	unsigned i;
    566 
    567 	if (config_stats) {
    568 		arena_stats_merge(tsdn, arena, &cstats->nthreads, &cstats->dss,
    569 		    &cstats->lg_dirty_mult, &cstats->decay_time,
    570 		    &cstats->pactive, &cstats->pdirty, &cstats->astats,
    571 		    cstats->bstats, cstats->lstats, cstats->hstats);
    572 
    573 		for (i = 0; i < NBINS; i++) {
    574 			cstats->allocated_small += cstats->bstats[i].curregs *
    575 			    index2size(i);
    576 			cstats->nmalloc_small += cstats->bstats[i].nmalloc;
    577 			cstats->ndalloc_small += cstats->bstats[i].ndalloc;
    578 			cstats->nrequests_small += cstats->bstats[i].nrequests;
    579 		}
    580 	} else {
    581 		arena_basic_stats_merge(tsdn, arena, &cstats->nthreads,
    582 		    &cstats->dss, &cstats->lg_dirty_mult, &cstats->decay_time,
    583 		    &cstats->pactive, &cstats->pdirty);
    584 	}
    585 }
    586 
    587 static void
    588 ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
    589 {
    590 	unsigned i;
    591 
    592 	sstats->nthreads += astats->nthreads;
    593 	sstats->pactive += astats->pactive;
    594 	sstats->pdirty += astats->pdirty;
    595 
    596 	if (config_stats) {
    597 		sstats->astats.mapped += astats->astats.mapped;
    598 		sstats->astats.retained += astats->astats.retained;
    599 		sstats->astats.npurge += astats->astats.npurge;
    600 		sstats->astats.nmadvise += astats->astats.nmadvise;
    601 		sstats->astats.purged += astats->astats.purged;
    602 
    603 		sstats->astats.metadata_mapped +=
    604 		    astats->astats.metadata_mapped;
    605 		sstats->astats.metadata_allocated +=
    606 		    astats->astats.metadata_allocated;
    607 
    608 		sstats->allocated_small += astats->allocated_small;
    609 		sstats->nmalloc_small += astats->nmalloc_small;
    610 		sstats->ndalloc_small += astats->ndalloc_small;
    611 		sstats->nrequests_small += astats->nrequests_small;
    612 
    613 		sstats->astats.allocated_large +=
    614 		    astats->astats.allocated_large;
    615 		sstats->astats.nmalloc_large += astats->astats.nmalloc_large;
    616 		sstats->astats.ndalloc_large += astats->astats.ndalloc_large;
    617 		sstats->astats.nrequests_large +=
    618 		    astats->astats.nrequests_large;
    619 
    620 		sstats->astats.allocated_huge += astats->astats.allocated_huge;
    621 		sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge;
    622 		sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge;
    623 
    624 		for (i = 0; i < NBINS; i++) {
    625 			sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc;
    626 			sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc;
    627 			sstats->bstats[i].nrequests +=
    628 			    astats->bstats[i].nrequests;
    629 			sstats->bstats[i].curregs += astats->bstats[i].curregs;
    630 			if (config_tcache) {
    631 				sstats->bstats[i].nfills +=
    632 				    astats->bstats[i].nfills;
    633 				sstats->bstats[i].nflushes +=
    634 				    astats->bstats[i].nflushes;
    635 			}
    636 			sstats->bstats[i].nruns += astats->bstats[i].nruns;
    637 			sstats->bstats[i].reruns += astats->bstats[i].reruns;
    638 			sstats->bstats[i].curruns += astats->bstats[i].curruns;
    639 		}
    640 
    641 		for (i = 0; i < nlclasses; i++) {
    642 			sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
    643 			sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc;
    644 			sstats->lstats[i].nrequests +=
    645 			    astats->lstats[i].nrequests;
    646 			sstats->lstats[i].curruns += astats->lstats[i].curruns;
    647 		}
    648 
    649 		for (i = 0; i < nhclasses; i++) {
    650 			sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc;
    651 			sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc;
    652 			sstats->hstats[i].curhchunks +=
    653 			    astats->hstats[i].curhchunks;
    654 		}
    655 	}
    656 }
    657 
    658 static void
    659 ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i)
    660 {
    661 	ctl_arena_stats_t *astats = &ctl_stats.arenas[i];
    662 	ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas];
    663 
    664 	ctl_arena_clear(astats);
    665 	ctl_arena_stats_amerge(tsdn, astats, arena);
    666 	/* Merge into sum stats as well. */
    667 	ctl_arena_stats_smerge(sstats, astats);
    668 }
    669 
    670 static bool
    671 ctl_grow(tsdn_t *tsdn)
    672 {
    673 	ctl_arena_stats_t *astats;
    674 
    675 	/* Initialize new arena. */
    676 	if (arena_init(tsdn, ctl_stats.narenas) == NULL)
    677 		return (true);
    678 
    679 	/* Allocate extended arena stats. */
    680 	astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) *
    681 	    sizeof(ctl_arena_stats_t));
    682 	if (astats == NULL)
    683 		return (true);
    684 
    685 	/* Initialize the new astats element. */
    686 	memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
    687 	    sizeof(ctl_arena_stats_t));
    688 	memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
    689 	if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
    690 		a0dalloc(astats);
    691 		return (true);
    692 	}
    693 	/* Swap merged stats to their new location. */
    694 	{
    695 		ctl_arena_stats_t tstats;
    696 		memcpy(&tstats, &astats[ctl_stats.narenas],
    697 		    sizeof(ctl_arena_stats_t));
    698 		memcpy(&astats[ctl_stats.narenas],
    699 		    &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t));
    700 		memcpy(&astats[ctl_stats.narenas + 1], &tstats,
    701 		    sizeof(ctl_arena_stats_t));
    702 	}
    703 	a0dalloc(ctl_stats.arenas);
    704 	ctl_stats.arenas = astats;
    705 	ctl_stats.narenas++;
    706 
    707 	return (false);
    708 }
    709 
    710 static void
    711 ctl_refresh(tsdn_t *tsdn)
    712 {
    713 	unsigned i;
    714 	VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
    715 
    716 	/*
    717 	 * Clear sum stats, since they will be merged into by
    718 	 * ctl_arena_refresh().
    719 	 */
    720 	ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]);
    721 
    722 	for (i = 0; i < ctl_stats.narenas; i++)
    723 		tarenas[i] = arena_get(tsdn, i, false);
    724 
    725 	for (i = 0; i < ctl_stats.narenas; i++) {
    726 		bool initialized = (tarenas[i] != NULL);
    727 
    728 		ctl_stats.arenas[i].initialized = initialized;
    729 		if (initialized)
    730 			ctl_arena_refresh(tsdn, tarenas[i], i);
    731 	}
    732 
    733 	if (config_stats) {
    734 		size_t base_allocated, base_resident, base_mapped;
    735 		base_stats_get(tsdn, &base_allocated, &base_resident,
    736 		    &base_mapped);
    737 		ctl_stats.allocated =
    738 		    ctl_stats.arenas[ctl_stats.narenas].allocated_small +
    739 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large +
    740 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge;
    741 		ctl_stats.active =
    742 		    (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
    743 		ctl_stats.metadata = base_allocated +
    744 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
    745 		    ctl_stats.arenas[ctl_stats.narenas].astats
    746 		    .metadata_allocated;
    747 		ctl_stats.resident = base_resident +
    748 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
    749 		    ((ctl_stats.arenas[ctl_stats.narenas].pactive +
    750 		    ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
    751 		ctl_stats.mapped = base_mapped +
    752 		    ctl_stats.arenas[ctl_stats.narenas].astats.mapped;
    753 		ctl_stats.retained =
    754 		    ctl_stats.arenas[ctl_stats.narenas].astats.retained;
    755 	}
    756 
    757 	ctl_epoch++;
    758 }
    759 
    760 static bool
    761 ctl_init(tsdn_t *tsdn)
    762 {
    763 	bool ret;
    764 
    765 	malloc_mutex_lock(tsdn, &ctl_mtx);
    766 	if (!ctl_initialized) {
    767 		/*
    768 		 * Allocate space for one extra arena stats element, which
    769 		 * contains summed stats across all arenas.
    770 		 */
    771 		ctl_stats.narenas = narenas_total_get();
    772 		ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc(
    773 		    (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t));
    774 		if (ctl_stats.arenas == NULL) {
    775 			ret = true;
    776 			goto label_return;
    777 		}
    778 		memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) *
    779 		    sizeof(ctl_arena_stats_t));
    780 
    781 		/*
    782 		 * Initialize all stats structures, regardless of whether they
    783 		 * ever get used.  Lazy initialization would allow errors to
    784 		 * cause inconsistent state to be viewable by the application.
    785 		 */
    786 		if (config_stats) {
    787 			unsigned i;
    788 			for (i = 0; i <= ctl_stats.narenas; i++) {
    789 				if (ctl_arena_init(&ctl_stats.arenas[i])) {
    790 					unsigned j;
    791 					for (j = 0; j < i; j++) {
    792 						a0dalloc(
    793 						    ctl_stats.arenas[j].lstats);
    794 						a0dalloc(
    795 						    ctl_stats.arenas[j].hstats);
    796 					}
    797 					a0dalloc(ctl_stats.arenas);
    798 					ctl_stats.arenas = NULL;
    799 					ret = true;
    800 					goto label_return;
    801 				}
    802 			}
    803 		}
    804 		ctl_stats.arenas[ctl_stats.narenas].initialized = true;
    805 
    806 		ctl_epoch = 0;
    807 		ctl_refresh(tsdn);
    808 		ctl_initialized = true;
    809 	}
    810 
    811 	ret = false;
    812 label_return:
    813 	malloc_mutex_unlock(tsdn, &ctl_mtx);
    814 	return (ret);
    815 }
    816 
    817 static int
    818 ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
    819     size_t *mibp, size_t *depthp)
    820 {
    821 	int ret;
    822 	const char *elm, *tdot, *dot;
    823 	size_t elen, i, j;
    824 	const ctl_named_node_t *node;
    825 
    826 	elm = name;
    827 	/* Equivalent to strchrnul(). */
    828 	dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0');
    829 	elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
    830 	if (elen == 0) {
    831 		ret = ENOENT;
    832 		goto label_return;
    833 	}
    834 	node = super_root_node;
    835 	for (i = 0; i < *depthp; i++) {
    836 		assert(node);
    837 		assert(node->nchildren > 0);
    838 		if (ctl_named_node(node->children) != NULL) {
    839 			const ctl_named_node_t *pnode = node;
    840 
    841 			/* Children are named. */
    842 			for (j = 0; j < node->nchildren; j++) {
    843 				const ctl_named_node_t *child =
    844 				    ctl_named_children(node, j);
    845 				if (strlen(child->name) == elen &&
    846 				    strncmp(elm, child->name, elen) == 0) {
    847 					node = child;
    848 					if (nodesp != NULL)
    849 						nodesp[i] =
    850 						    (const ctl_node_t *)node;
    851 					mibp[i] = j;
    852 					break;
    853 				}
    854 			}
    855 			if (node == pnode) {
    856 				ret = ENOENT;
    857 				goto label_return;
    858 			}
    859 		} else {
    860 			uintmax_t index;
    861 			const ctl_indexed_node_t *inode;
    862 
    863 			/* Children are indexed. */
    864 			index = malloc_strtoumax(elm, NULL, 10);
    865 			if (index == UINTMAX_MAX || index > SIZE_T_MAX) {
    866 				ret = ENOENT;
    867 				goto label_return;
    868 			}
    869 
    870 			inode = ctl_indexed_node(node->children);
    871 			node = inode->index(tsdn, mibp, *depthp, (size_t)index);
    872 			if (node == NULL) {
    873 				ret = ENOENT;
    874 				goto label_return;
    875 			}
    876 
    877 			if (nodesp != NULL)
    878 				nodesp[i] = (const ctl_node_t *)node;
    879 			mibp[i] = (size_t)index;
    880 		}
    881 
    882 		if (node->ctl != NULL) {
    883 			/* Terminal node. */
    884 			if (*dot != '\0') {
    885 				/*
    886 				 * The name contains more elements than are
    887 				 * in this path through the tree.
    888 				 */
    889 				ret = ENOENT;
    890 				goto label_return;
    891 			}
    892 			/* Complete lookup successful. */
    893 			*depthp = i + 1;
    894 			break;
    895 		}
    896 
    897 		/* Update elm. */
    898 		if (*dot == '\0') {
    899 			/* No more elements. */
    900 			ret = ENOENT;
    901 			goto label_return;
    902 		}
    903 		elm = &dot[1];
    904 		dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot :
    905 		    strchr(elm, '\0');
    906 		elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
    907 	}
    908 
    909 	ret = 0;
    910 label_return:
    911 	return (ret);
    912 }
    913 
    914 int
    915 ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
    916     void *newp, size_t newlen)
    917 {
    918 	int ret;
    919 	size_t depth;
    920 	ctl_node_t const *nodes[CTL_MAX_DEPTH];
    921 	size_t mib[CTL_MAX_DEPTH];
    922 	const ctl_named_node_t *node;
    923 
    924 	if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) {
    925 		ret = EAGAIN;
    926 		goto label_return;
    927 	}
    928 
    929 	depth = CTL_MAX_DEPTH;
    930 	ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth);
    931 	if (ret != 0)
    932 		goto label_return;
    933 
    934 	node = ctl_named_node(nodes[depth-1]);
    935 	if (node != NULL && node->ctl)
    936 		ret = node->ctl(tsd, mib, depth, oldp, oldlenp, newp, newlen);
    937 	else {
    938 		/* The name refers to a partial path through the ctl tree. */
    939 		ret = ENOENT;
    940 	}
    941 
    942 label_return:
    943 	return(ret);
    944 }
    945 
    946 int
    947 ctl_nametomib(tsdn_t *tsdn, const char *name, size_t *mibp, size_t *miblenp)
    948 {
    949 	int ret;
    950 
    951 	if (!ctl_initialized && ctl_init(tsdn)) {
    952 		ret = EAGAIN;
    953 		goto label_return;
    954 	}
    955 
    956 	ret = ctl_lookup(tsdn, name, NULL, mibp, miblenp);
    957 label_return:
    958 	return(ret);
    959 }
    960 
    961 int
    962 ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
    963     size_t *oldlenp, void *newp, size_t newlen)
    964 {
    965 	int ret;
    966 	const ctl_named_node_t *node;
    967 	size_t i;
    968 
    969 	if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) {
    970 		ret = EAGAIN;
    971 		goto label_return;
    972 	}
    973 
    974 	/* Iterate down the tree. */
    975 	node = super_root_node;
    976 	for (i = 0; i < miblen; i++) {
    977 		assert(node);
    978 		assert(node->nchildren > 0);
    979 		if (ctl_named_node(node->children) != NULL) {
    980 			/* Children are named. */
    981 			if (node->nchildren <= (unsigned)mib[i]) {
    982 				ret = ENOENT;
    983 				goto label_return;
    984 			}
    985 			node = ctl_named_children(node, mib[i]);
    986 		} else {
    987 			const ctl_indexed_node_t *inode;
    988 
    989 			/* Indexed element. */
    990 			inode = ctl_indexed_node(node->children);
    991 			node = inode->index(tsd_tsdn(tsd), mib, miblen, mib[i]);
    992 			if (node == NULL) {
    993 				ret = ENOENT;
    994 				goto label_return;
    995 			}
    996 		}
    997 	}
    998 
    999 	/* Call the ctl function. */
   1000 	if (node && node->ctl)
   1001 		ret = node->ctl(tsd, mib, miblen, oldp, oldlenp, newp, newlen);
   1002 	else {
   1003 		/* Partial MIB. */
   1004 		ret = ENOENT;
   1005 	}
   1006 
   1007 label_return:
   1008 	return(ret);
   1009 }
   1010 
   1011 bool
   1012 ctl_boot(void)
   1013 {
   1014 
   1015 	if (malloc_mutex_init(&ctl_mtx, "ctl", WITNESS_RANK_CTL))
   1016 		return (true);
   1017 
   1018 	ctl_initialized = false;
   1019 
   1020 	return (false);
   1021 }
   1022 
   1023 void
   1024 ctl_prefork(tsdn_t *tsdn)
   1025 {
   1026 
   1027 	malloc_mutex_prefork(tsdn, &ctl_mtx);
   1028 }
   1029 
   1030 void
   1031 ctl_postfork_parent(tsdn_t *tsdn)
   1032 {
   1033 
   1034 	malloc_mutex_postfork_parent(tsdn, &ctl_mtx);
   1035 }
   1036 
   1037 void
   1038 ctl_postfork_child(tsdn_t *tsdn)
   1039 {
   1040 
   1041 	malloc_mutex_postfork_child(tsdn, &ctl_mtx);
   1042 }
   1043 
   1044 /******************************************************************************/
   1045 /* *_ctl() functions. */
   1046 
   1047 #define	READONLY()	do {						\
   1048 	if (newp != NULL || newlen != 0) {				\
   1049 		ret = EPERM;						\
   1050 		goto label_return;					\
   1051 	}								\
   1052 } while (0)
   1053 
   1054 #define	WRITEONLY()	do {						\
   1055 	if (oldp != NULL || oldlenp != NULL) {				\
   1056 		ret = EPERM;						\
   1057 		goto label_return;					\
   1058 	}								\
   1059 } while (0)
   1060 
   1061 #define	READ_XOR_WRITE()	do {					\
   1062 	if ((oldp != NULL && oldlenp != NULL) && (newp != NULL ||	\
   1063 	    newlen != 0)) {						\
   1064 		ret = EPERM;						\
   1065 		goto label_return;					\
   1066 	}								\
   1067 } while (0)
   1068 
   1069 #define	READ(v, t)	do {						\
   1070 	if (oldp != NULL && oldlenp != NULL) {				\
   1071 		if (*oldlenp != sizeof(t)) {				\
   1072 			size_t	copylen = (sizeof(t) <= *oldlenp)	\
   1073 			    ? sizeof(t) : *oldlenp;			\
   1074 			memcpy(oldp, (void *)&(v), copylen);		\
   1075 			ret = EINVAL;					\
   1076 			goto label_return;				\
   1077 		}							\
   1078 		*(t *)oldp = (v);					\
   1079 	}								\
   1080 } while (0)
   1081 
   1082 #define	WRITE(v, t)	do {						\
   1083 	if (newp != NULL) {						\
   1084 		if (newlen != sizeof(t)) {				\
   1085 			ret = EINVAL;					\
   1086 			goto label_return;				\
   1087 		}							\
   1088 		(v) = *(t *)newp;					\
   1089 	}								\
   1090 } while (0)
   1091 
   1092 /*
   1093  * There's a lot of code duplication in the following macros due to limitations
   1094  * in how nested cpp macros are expanded.
   1095  */
   1096 #define	CTL_RO_CLGEN(c, l, n, v, t)					\
   1097 static int								\
   1098 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1099     size_t *oldlenp, void *newp, size_t newlen)				\
   1100 {									\
   1101 	int ret;							\
   1102 	t oldval;							\
   1103 									\
   1104 	if (!(c))							\
   1105 		return (ENOENT);					\
   1106 	if (l)								\
   1107 		malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);		\
   1108 	READONLY();							\
   1109 	oldval = (v);							\
   1110 	READ(oldval, t);						\
   1111 									\
   1112 	ret = 0;							\
   1113 label_return:								\
   1114 	if (l)								\
   1115 		malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);		\
   1116 	return (ret);							\
   1117 }
   1118 
   1119 #define	CTL_RO_CGEN(c, n, v, t)						\
   1120 static int								\
   1121 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1122     size_t *oldlenp, void *newp, size_t newlen)				\
   1123 {									\
   1124 	int ret;							\
   1125 	t oldval;							\
   1126 									\
   1127 	if (!(c))							\
   1128 		return (ENOENT);					\
   1129 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);			\
   1130 	READONLY();							\
   1131 	oldval = (v);							\
   1132 	READ(oldval, t);						\
   1133 									\
   1134 	ret = 0;							\
   1135 label_return:								\
   1136 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);			\
   1137 	return (ret);							\
   1138 }
   1139 
   1140 #define	CTL_RO_GEN(n, v, t)						\
   1141 static int								\
   1142 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1143     size_t *oldlenp, void *newp, size_t newlen)				\
   1144 {									\
   1145 	int ret;							\
   1146 	t oldval;							\
   1147 									\
   1148 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);			\
   1149 	READONLY();							\
   1150 	oldval = (v);							\
   1151 	READ(oldval, t);						\
   1152 									\
   1153 	ret = 0;							\
   1154 label_return:								\
   1155 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);			\
   1156 	return (ret);							\
   1157 }
   1158 
   1159 /*
   1160  * ctl_mtx is not acquired, under the assumption that no pertinent data will
   1161  * mutate during the call.
   1162  */
   1163 #define	CTL_RO_NL_CGEN(c, n, v, t)					\
   1164 static int								\
   1165 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1166     size_t *oldlenp, void *newp, size_t newlen)				\
   1167 {									\
   1168 	int ret;							\
   1169 	t oldval;							\
   1170 									\
   1171 	if (!(c))							\
   1172 		return (ENOENT);					\
   1173 	READONLY();							\
   1174 	oldval = (v);							\
   1175 	READ(oldval, t);						\
   1176 									\
   1177 	ret = 0;							\
   1178 label_return:								\
   1179 	return (ret);							\
   1180 }
   1181 
   1182 #define	CTL_RO_NL_GEN(n, v, t)						\
   1183 static int								\
   1184 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1185     size_t *oldlenp, void *newp, size_t newlen)				\
   1186 {									\
   1187 	int ret;							\
   1188 	t oldval;							\
   1189 									\
   1190 	READONLY();							\
   1191 	oldval = (v);							\
   1192 	READ(oldval, t);						\
   1193 									\
   1194 	ret = 0;							\
   1195 label_return:								\
   1196 	return (ret);							\
   1197 }
   1198 
   1199 #define	CTL_TSD_RO_NL_CGEN(c, n, m, t)					\
   1200 static int								\
   1201 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1202     size_t *oldlenp, void *newp, size_t newlen)				\
   1203 {									\
   1204 	int ret;							\
   1205 	t oldval;							\
   1206 									\
   1207 	if (!(c))							\
   1208 		return (ENOENT);					\
   1209 	READONLY();							\
   1210 	oldval = (m(tsd));						\
   1211 	READ(oldval, t);						\
   1212 									\
   1213 	ret = 0;							\
   1214 label_return:								\
   1215 	return (ret);							\
   1216 }
   1217 
   1218 #define	CTL_RO_CONFIG_GEN(n, t)						\
   1219 static int								\
   1220 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
   1221     size_t *oldlenp, void *newp, size_t newlen)				\
   1222 {									\
   1223 	int ret;							\
   1224 	t oldval;							\
   1225 									\
   1226 	READONLY();							\
   1227 	oldval = n;							\
   1228 	READ(oldval, t);						\
   1229 									\
   1230 	ret = 0;							\
   1231 label_return:								\
   1232 	return (ret);							\
   1233 }
   1234 
   1235 /******************************************************************************/
   1236 
   1237 CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
   1238 
   1239 static int
   1240 epoch_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1241     size_t *oldlenp, void *newp, size_t newlen)
   1242 {
   1243 	int ret;
   1244 	UNUSED uint64_t newval;
   1245 
   1246 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1247 	WRITE(newval, uint64_t);
   1248 	if (newp != NULL)
   1249 		ctl_refresh(tsd_tsdn(tsd));
   1250 	READ(ctl_epoch, uint64_t);
   1251 
   1252 	ret = 0;
   1253 label_return:
   1254 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1255 	return (ret);
   1256 }
   1257 
   1258 /******************************************************************************/
   1259 
   1260 CTL_RO_CONFIG_GEN(config_cache_oblivious, bool)
   1261 CTL_RO_CONFIG_GEN(config_debug, bool)
   1262 CTL_RO_CONFIG_GEN(config_fill, bool)
   1263 CTL_RO_CONFIG_GEN(config_lazy_lock, bool)
   1264 CTL_RO_CONFIG_GEN(config_malloc_conf, const char *)
   1265 CTL_RO_CONFIG_GEN(config_munmap, bool)
   1266 CTL_RO_CONFIG_GEN(config_prof, bool)
   1267 CTL_RO_CONFIG_GEN(config_prof_libgcc, bool)
   1268 CTL_RO_CONFIG_GEN(config_prof_libunwind, bool)
   1269 CTL_RO_CONFIG_GEN(config_stats, bool)
   1270 CTL_RO_CONFIG_GEN(config_tcache, bool)
   1271 CTL_RO_CONFIG_GEN(config_tls, bool)
   1272 CTL_RO_CONFIG_GEN(config_utrace, bool)
   1273 CTL_RO_CONFIG_GEN(config_valgrind, bool)
   1274 CTL_RO_CONFIG_GEN(config_xmalloc, bool)
   1275 
   1276 /******************************************************************************/
   1277 
   1278 CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
   1279 CTL_RO_NL_GEN(opt_dss, opt_dss, const char *)
   1280 CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
   1281 CTL_RO_NL_GEN(opt_narenas, opt_narenas, unsigned)
   1282 CTL_RO_NL_GEN(opt_purge, purge_mode_names[opt_purge], const char *)
   1283 CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
   1284 CTL_RO_NL_GEN(opt_decay_time, opt_decay_time, ssize_t)
   1285 CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
   1286 CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
   1287 CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
   1288 CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
   1289 CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
   1290 CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool)
   1291 CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool)
   1292 CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool)
   1293 CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t)
   1294 CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool)
   1295 CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *)
   1296 CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool)
   1297 CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init,
   1298     opt_prof_thread_active_init, bool)
   1299 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t)
   1300 CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool)
   1301 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
   1302 CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool)
   1303 CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool)
   1304 CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
   1305 
   1306 /******************************************************************************/
   1307 
   1308 static int
   1309 thread_arena_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1310     size_t *oldlenp, void *newp, size_t newlen)
   1311 {
   1312 	int ret;
   1313 	arena_t *oldarena;
   1314 	unsigned newind, oldind;
   1315 
   1316 	oldarena = arena_choose(tsd, NULL);
   1317 	if (oldarena == NULL)
   1318 		return (EAGAIN);
   1319 
   1320 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1321 	newind = oldind = oldarena->ind;
   1322 	WRITE(newind, unsigned);
   1323 	READ(oldind, unsigned);
   1324 	if (newind != oldind) {
   1325 		arena_t *newarena;
   1326 
   1327 		if (newind >= ctl_stats.narenas) {
   1328 			/* New arena index is out of range. */
   1329 			ret = EFAULT;
   1330 			goto label_return;
   1331 		}
   1332 
   1333 		/* Initialize arena if necessary. */
   1334 		newarena = arena_get(tsd_tsdn(tsd), newind, true);
   1335 		if (newarena == NULL) {
   1336 			ret = EAGAIN;
   1337 			goto label_return;
   1338 		}
   1339 		/* Set new arena/tcache associations. */
   1340 		arena_migrate(tsd, oldind, newind);
   1341 		if (config_tcache) {
   1342 			tcache_t *tcache = tsd_tcache_get(tsd);
   1343 			if (tcache != NULL) {
   1344 				tcache_arena_reassociate(tsd_tsdn(tsd), tcache,
   1345 				    oldarena, newarena);
   1346 			}
   1347 		}
   1348 	}
   1349 
   1350 	ret = 0;
   1351 label_return:
   1352 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1353 	return (ret);
   1354 }
   1355 
   1356 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get,
   1357     uint64_t)
   1358 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get,
   1359     uint64_t *)
   1360 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get,
   1361     uint64_t)
   1362 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
   1363     tsd_thread_deallocatedp_get, uint64_t *)
   1364 
   1365 static int
   1366 thread_tcache_enabled_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1367     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1368 {
   1369 	int ret;
   1370 	bool oldval;
   1371 
   1372 	if (!config_tcache)
   1373 		return (ENOENT);
   1374 
   1375 	oldval = tcache_enabled_get();
   1376 	if (newp != NULL) {
   1377 		if (newlen != sizeof(bool)) {
   1378 			ret = EINVAL;
   1379 			goto label_return;
   1380 		}
   1381 		tcache_enabled_set(*(bool *)newp);
   1382 	}
   1383 	READ(oldval, bool);
   1384 
   1385 	ret = 0;
   1386 label_return:
   1387 	return (ret);
   1388 }
   1389 
   1390 static int
   1391 thread_tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1392     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1393 {
   1394 	int ret;
   1395 
   1396 	if (!config_tcache)
   1397 		return (ENOENT);
   1398 
   1399 	READONLY();
   1400 	WRITEONLY();
   1401 
   1402 	tcache_flush();
   1403 
   1404 	ret = 0;
   1405 label_return:
   1406 	return (ret);
   1407 }
   1408 
   1409 static int
   1410 thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1411     size_t *oldlenp, void *newp, size_t newlen)
   1412 {
   1413 	int ret;
   1414 
   1415 	if (!config_prof)
   1416 		return (ENOENT);
   1417 
   1418 	READ_XOR_WRITE();
   1419 
   1420 	if (newp != NULL) {
   1421 		if (newlen != sizeof(const char *)) {
   1422 			ret = EINVAL;
   1423 			goto label_return;
   1424 		}
   1425 
   1426 		if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
   1427 		    0)
   1428 			goto label_return;
   1429 	} else {
   1430 		const char *oldname = prof_thread_name_get(tsd);
   1431 		READ(oldname, const char *);
   1432 	}
   1433 
   1434 	ret = 0;
   1435 label_return:
   1436 	return (ret);
   1437 }
   1438 
   1439 static int
   1440 thread_prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1441     size_t *oldlenp, void *newp, size_t newlen)
   1442 {
   1443 	int ret;
   1444 	bool oldval;
   1445 
   1446 	if (!config_prof)
   1447 		return (ENOENT);
   1448 
   1449 	oldval = prof_thread_active_get(tsd);
   1450 	if (newp != NULL) {
   1451 		if (newlen != sizeof(bool)) {
   1452 			ret = EINVAL;
   1453 			goto label_return;
   1454 		}
   1455 		if (prof_thread_active_set(tsd, *(bool *)newp)) {
   1456 			ret = EAGAIN;
   1457 			goto label_return;
   1458 		}
   1459 	}
   1460 	READ(oldval, bool);
   1461 
   1462 	ret = 0;
   1463 label_return:
   1464 	return (ret);
   1465 }
   1466 
   1467 /******************************************************************************/
   1468 
   1469 static int
   1470 tcache_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1471     size_t *oldlenp, void *newp, size_t newlen)
   1472 {
   1473 	int ret;
   1474 	unsigned tcache_ind;
   1475 
   1476 	if (!config_tcache)
   1477 		return (ENOENT);
   1478 
   1479 	READONLY();
   1480 	if (tcaches_create(tsd, &tcache_ind)) {
   1481 		ret = EFAULT;
   1482 		goto label_return;
   1483 	}
   1484 	READ(tcache_ind, unsigned);
   1485 
   1486 	ret = 0;
   1487 label_return:
   1488 	return ret;
   1489 }
   1490 
   1491 static int
   1492 tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1493     size_t *oldlenp, void *newp, size_t newlen)
   1494 {
   1495 	int ret;
   1496 	unsigned tcache_ind;
   1497 
   1498 	if (!config_tcache)
   1499 		return (ENOENT);
   1500 
   1501 	WRITEONLY();
   1502 	tcache_ind = UINT_MAX;
   1503 	WRITE(tcache_ind, unsigned);
   1504 	if (tcache_ind == UINT_MAX) {
   1505 		ret = EFAULT;
   1506 		goto label_return;
   1507 	}
   1508 	tcaches_flush(tsd, tcache_ind);
   1509 
   1510 	ret = 0;
   1511 label_return:
   1512 	return (ret);
   1513 }
   1514 
   1515 static int
   1516 tcache_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1517     size_t *oldlenp, void *newp, size_t newlen)
   1518 {
   1519 	int ret;
   1520 	unsigned tcache_ind;
   1521 
   1522 	if (!config_tcache)
   1523 		return (ENOENT);
   1524 
   1525 	WRITEONLY();
   1526 	tcache_ind = UINT_MAX;
   1527 	WRITE(tcache_ind, unsigned);
   1528 	if (tcache_ind == UINT_MAX) {
   1529 		ret = EFAULT;
   1530 		goto label_return;
   1531 	}
   1532 	tcaches_destroy(tsd, tcache_ind);
   1533 
   1534 	ret = 0;
   1535 label_return:
   1536 	return (ret);
   1537 }
   1538 
   1539 /******************************************************************************/
   1540 
   1541 static void
   1542 arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
   1543 {
   1544 
   1545 	malloc_mutex_lock(tsdn, &ctl_mtx);
   1546 	{
   1547 		unsigned narenas = ctl_stats.narenas;
   1548 
   1549 		if (arena_ind == narenas) {
   1550 			unsigned i;
   1551 			VARIABLE_ARRAY(arena_t *, tarenas, narenas);
   1552 
   1553 			for (i = 0; i < narenas; i++)
   1554 				tarenas[i] = arena_get(tsdn, i, false);
   1555 
   1556 			/*
   1557 			 * No further need to hold ctl_mtx, since narenas and
   1558 			 * tarenas contain everything needed below.
   1559 			 */
   1560 			malloc_mutex_unlock(tsdn, &ctl_mtx);
   1561 
   1562 			for (i = 0; i < narenas; i++) {
   1563 				if (tarenas[i] != NULL)
   1564 					arena_purge(tsdn, tarenas[i], all);
   1565 			}
   1566 		} else {
   1567 			arena_t *tarena;
   1568 
   1569 			assert(arena_ind < narenas);
   1570 
   1571 			tarena = arena_get(tsdn, arena_ind, false);
   1572 
   1573 			/* No further need to hold ctl_mtx. */
   1574 			malloc_mutex_unlock(tsdn, &ctl_mtx);
   1575 
   1576 			if (tarena != NULL)
   1577 				arena_purge(tsdn, tarena, all);
   1578 		}
   1579 	}
   1580 }
   1581 
   1582 static int
   1583 arena_i_purge_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1584     size_t *oldlenp, void *newp, size_t newlen)
   1585 {
   1586 	int ret;
   1587 
   1588 	READONLY();
   1589 	WRITEONLY();
   1590 	arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], true);
   1591 
   1592 	ret = 0;
   1593 label_return:
   1594 	return (ret);
   1595 }
   1596 
   1597 static int
   1598 arena_i_decay_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1599     size_t *oldlenp, void *newp, size_t newlen)
   1600 {
   1601 	int ret;
   1602 
   1603 	READONLY();
   1604 	WRITEONLY();
   1605 	arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], false);
   1606 
   1607 	ret = 0;
   1608 label_return:
   1609 	return (ret);
   1610 }
   1611 
   1612 static int
   1613 arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1614     size_t *oldlenp, void *newp, size_t newlen)
   1615 {
   1616 	int ret;
   1617 	unsigned arena_ind;
   1618 	arena_t *arena;
   1619 
   1620 	READONLY();
   1621 	WRITEONLY();
   1622 
   1623 	if ((config_valgrind && unlikely(in_valgrind)) || (config_fill &&
   1624 	    unlikely(opt_quarantine))) {
   1625 		ret = EFAULT;
   1626 		goto label_return;
   1627 	}
   1628 
   1629 	arena_ind = (unsigned)mib[1];
   1630 	if (config_debug) {
   1631 		malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1632 		assert(arena_ind < ctl_stats.narenas);
   1633 		malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1634 	}
   1635 	assert(arena_ind >= opt_narenas);
   1636 
   1637 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
   1638 
   1639 	arena_reset(tsd, arena);
   1640 
   1641 	ret = 0;
   1642 label_return:
   1643 	return (ret);
   1644 }
   1645 
   1646 static int
   1647 arena_i_dss_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1648     size_t *oldlenp, void *newp, size_t newlen)
   1649 {
   1650 	int ret;
   1651 	const char *dss = NULL;
   1652 	unsigned arena_ind = (unsigned)mib[1];
   1653 	dss_prec_t dss_prec_old = dss_prec_limit;
   1654 	dss_prec_t dss_prec = dss_prec_limit;
   1655 
   1656 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1657 	WRITE(dss, const char *);
   1658 	if (dss != NULL) {
   1659 		int i;
   1660 		bool match = false;
   1661 
   1662 		for (i = 0; i < dss_prec_limit; i++) {
   1663 			if (strcmp(dss_prec_names[i], dss) == 0) {
   1664 				dss_prec = i;
   1665 				match = true;
   1666 				break;
   1667 			}
   1668 		}
   1669 
   1670 		if (!match) {
   1671 			ret = EINVAL;
   1672 			goto label_return;
   1673 		}
   1674 	}
   1675 
   1676 	if (arena_ind < ctl_stats.narenas) {
   1677 		arena_t *arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
   1678 		if (arena == NULL || (dss_prec != dss_prec_limit &&
   1679 		    arena_dss_prec_set(tsd_tsdn(tsd), arena, dss_prec))) {
   1680 			ret = EFAULT;
   1681 			goto label_return;
   1682 		}
   1683 		dss_prec_old = arena_dss_prec_get(tsd_tsdn(tsd), arena);
   1684 	} else {
   1685 		if (dss_prec != dss_prec_limit &&
   1686 		    chunk_dss_prec_set(dss_prec)) {
   1687 			ret = EFAULT;
   1688 			goto label_return;
   1689 		}
   1690 		dss_prec_old = chunk_dss_prec_get();
   1691 	}
   1692 
   1693 	dss = dss_prec_names[dss_prec_old];
   1694 	READ(dss, const char *);
   1695 
   1696 	ret = 0;
   1697 label_return:
   1698 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1699 	return (ret);
   1700 }
   1701 
   1702 static int
   1703 arena_i_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1704     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1705 {
   1706 	int ret;
   1707 	unsigned arena_ind = (unsigned)mib[1];
   1708 	arena_t *arena;
   1709 
   1710 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
   1711 	if (arena == NULL) {
   1712 		ret = EFAULT;
   1713 		goto label_return;
   1714 	}
   1715 
   1716 	if (oldp != NULL && oldlenp != NULL) {
   1717 		size_t oldval = arena_lg_dirty_mult_get(tsd_tsdn(tsd), arena);
   1718 		READ(oldval, ssize_t);
   1719 	}
   1720 	if (newp != NULL) {
   1721 		if (newlen != sizeof(ssize_t)) {
   1722 			ret = EINVAL;
   1723 			goto label_return;
   1724 		}
   1725 		if (arena_lg_dirty_mult_set(tsd_tsdn(tsd), arena,
   1726 		    *(ssize_t *)newp)) {
   1727 			ret = EFAULT;
   1728 			goto label_return;
   1729 		}
   1730 	}
   1731 
   1732 	ret = 0;
   1733 label_return:
   1734 	return (ret);
   1735 }
   1736 
   1737 static int
   1738 arena_i_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1739     size_t *oldlenp, void *newp, size_t newlen)
   1740 {
   1741 	int ret;
   1742 	unsigned arena_ind = (unsigned)mib[1];
   1743 	arena_t *arena;
   1744 
   1745 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
   1746 	if (arena == NULL) {
   1747 		ret = EFAULT;
   1748 		goto label_return;
   1749 	}
   1750 
   1751 	if (oldp != NULL && oldlenp != NULL) {
   1752 		size_t oldval = arena_decay_time_get(tsd_tsdn(tsd), arena);
   1753 		READ(oldval, ssize_t);
   1754 	}
   1755 	if (newp != NULL) {
   1756 		if (newlen != sizeof(ssize_t)) {
   1757 			ret = EINVAL;
   1758 			goto label_return;
   1759 		}
   1760 		if (arena_decay_time_set(tsd_tsdn(tsd), arena,
   1761 		    *(ssize_t *)newp)) {
   1762 			ret = EFAULT;
   1763 			goto label_return;
   1764 		}
   1765 	}
   1766 
   1767 	ret = 0;
   1768 label_return:
   1769 	return (ret);
   1770 }
   1771 
   1772 static int
   1773 arena_i_chunk_hooks_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1774     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1775 {
   1776 	int ret;
   1777 	unsigned arena_ind = (unsigned)mib[1];
   1778 	arena_t *arena;
   1779 
   1780 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1781 	if (arena_ind < narenas_total_get() && (arena =
   1782 	    arena_get(tsd_tsdn(tsd), arena_ind, false)) != NULL) {
   1783 		if (newp != NULL) {
   1784 			chunk_hooks_t old_chunk_hooks, new_chunk_hooks;
   1785 			WRITE(new_chunk_hooks, chunk_hooks_t);
   1786 			old_chunk_hooks = chunk_hooks_set(tsd_tsdn(tsd), arena,
   1787 			    &new_chunk_hooks);
   1788 			READ(old_chunk_hooks, chunk_hooks_t);
   1789 		} else {
   1790 			chunk_hooks_t old_chunk_hooks =
   1791 			    chunk_hooks_get(tsd_tsdn(tsd), arena);
   1792 			READ(old_chunk_hooks, chunk_hooks_t);
   1793 		}
   1794 	} else {
   1795 		ret = EFAULT;
   1796 		goto label_return;
   1797 	}
   1798 	ret = 0;
   1799 label_return:
   1800 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1801 	return (ret);
   1802 }
   1803 
   1804 static const ctl_named_node_t *
   1805 arena_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
   1806 {
   1807 	const ctl_named_node_t *ret;
   1808 
   1809 	malloc_mutex_lock(tsdn, &ctl_mtx);
   1810 	if (i > ctl_stats.narenas) {
   1811 		ret = NULL;
   1812 		goto label_return;
   1813 	}
   1814 
   1815 	ret = super_arena_i_node;
   1816 label_return:
   1817 	malloc_mutex_unlock(tsdn, &ctl_mtx);
   1818 	return (ret);
   1819 }
   1820 
   1821 /******************************************************************************/
   1822 
   1823 static int
   1824 arenas_narenas_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1825     size_t *oldlenp, void *newp, size_t newlen)
   1826 {
   1827 	int ret;
   1828 	unsigned narenas;
   1829 
   1830 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1831 	READONLY();
   1832 	if (*oldlenp != sizeof(unsigned)) {
   1833 		ret = EINVAL;
   1834 		goto label_return;
   1835 	}
   1836 	narenas = ctl_stats.narenas;
   1837 	READ(narenas, unsigned);
   1838 
   1839 	ret = 0;
   1840 label_return:
   1841 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1842 	return (ret);
   1843 }
   1844 
   1845 static int
   1846 arenas_initialized_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1847     size_t *oldlenp, void *newp, size_t newlen)
   1848 {
   1849 	int ret;
   1850 	unsigned nread, i;
   1851 
   1852 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1853 	READONLY();
   1854 	if (*oldlenp != ctl_stats.narenas * sizeof(bool)) {
   1855 		ret = EINVAL;
   1856 		nread = (*oldlenp < ctl_stats.narenas * sizeof(bool))
   1857 		    ? (unsigned)(*oldlenp / sizeof(bool)) : ctl_stats.narenas;
   1858 	} else {
   1859 		ret = 0;
   1860 		nread = ctl_stats.narenas;
   1861 	}
   1862 
   1863 	for (i = 0; i < nread; i++)
   1864 		((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
   1865 
   1866 label_return:
   1867 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1868 	return (ret);
   1869 }
   1870 
   1871 static int
   1872 arenas_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1873     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1874 {
   1875 	int ret;
   1876 
   1877 	if (oldp != NULL && oldlenp != NULL) {
   1878 		size_t oldval = arena_lg_dirty_mult_default_get();
   1879 		READ(oldval, ssize_t);
   1880 	}
   1881 	if (newp != NULL) {
   1882 		if (newlen != sizeof(ssize_t)) {
   1883 			ret = EINVAL;
   1884 			goto label_return;
   1885 		}
   1886 		if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) {
   1887 			ret = EFAULT;
   1888 			goto label_return;
   1889 		}
   1890 	}
   1891 
   1892 	ret = 0;
   1893 label_return:
   1894 	return (ret);
   1895 }
   1896 
   1897 static int
   1898 arenas_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1899     size_t *oldlenp, void *newp, size_t newlen)
   1900 {
   1901 	int ret;
   1902 
   1903 	if (oldp != NULL && oldlenp != NULL) {
   1904 		size_t oldval = arena_decay_time_default_get();
   1905 		READ(oldval, ssize_t);
   1906 	}
   1907 	if (newp != NULL) {
   1908 		if (newlen != sizeof(ssize_t)) {
   1909 			ret = EINVAL;
   1910 			goto label_return;
   1911 		}
   1912 		if (arena_decay_time_default_set(*(ssize_t *)newp)) {
   1913 			ret = EFAULT;
   1914 			goto label_return;
   1915 		}
   1916 	}
   1917 
   1918 	ret = 0;
   1919 label_return:
   1920 	return (ret);
   1921 }
   1922 
   1923 CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
   1924 CTL_RO_NL_GEN(arenas_page, PAGE, size_t)
   1925 CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t)
   1926 CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned)
   1927 CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned)
   1928 CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
   1929 CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
   1930 CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t)
   1931 static const ctl_named_node_t *
   1932 arenas_bin_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
   1933 {
   1934 
   1935 	if (i > NBINS)
   1936 		return (NULL);
   1937 	return (super_arenas_bin_i_node);
   1938 }
   1939 
   1940 CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned)
   1941 CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
   1942 static const ctl_named_node_t *
   1943 arenas_lrun_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
   1944 {
   1945 
   1946 	if (i > nlclasses)
   1947 		return (NULL);
   1948 	return (super_arenas_lrun_i_node);
   1949 }
   1950 
   1951 CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned)
   1952 CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+(szind_t)mib[2]),
   1953     size_t)
   1954 static const ctl_named_node_t *
   1955 arenas_hchunk_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
   1956 {
   1957 
   1958 	if (i > nhclasses)
   1959 		return (NULL);
   1960 	return (super_arenas_hchunk_i_node);
   1961 }
   1962 
   1963 static int
   1964 arenas_extend_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   1965     size_t *oldlenp, void *newp, size_t newlen)
   1966 {
   1967 	int ret;
   1968 	unsigned narenas;
   1969 
   1970 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
   1971 	READONLY();
   1972 	if (ctl_grow(tsd_tsdn(tsd))) {
   1973 		ret = EAGAIN;
   1974 		goto label_return;
   1975 	}
   1976 	narenas = ctl_stats.narenas - 1;
   1977 	READ(narenas, unsigned);
   1978 
   1979 	ret = 0;
   1980 label_return:
   1981 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
   1982 	return (ret);
   1983 }
   1984 
   1985 /******************************************************************************/
   1986 
   1987 static int
   1988 prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
   1989     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
   1990 {
   1991 	int ret;
   1992 	bool oldval;
   1993 
   1994 	if (!config_prof)
   1995 		return (ENOENT);
   1996 
   1997 	if (newp != NULL) {
   1998 		if (newlen != sizeof(bool)) {
   1999 			ret = EINVAL;
   2000 			goto label_return;
   2001 		}
   2002 		oldval = prof_thread_active_init_set(tsd_tsdn(tsd),
   2003 		    *(bool *)newp);
   2004 	} else
   2005 		oldval = prof_thread_active_init_get(tsd_tsdn(tsd));
   2006 	READ(oldval, bool);
   2007 
   2008 	ret = 0;
   2009 label_return:
   2010 	return (ret);
   2011 }
   2012 
   2013 static int
   2014 prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   2015     size_t *oldlenp, void *newp, size_t newlen)
   2016 {
   2017 	int ret;
   2018 	bool oldval;
   2019 
   2020 	if (!config_prof)
   2021 		return (ENOENT);
   2022 
   2023 	if (newp != NULL) {
   2024 		if (newlen != sizeof(bool)) {
   2025 			ret = EINVAL;
   2026 			goto label_return;
   2027 		}
   2028 		oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp);
   2029 	} else
   2030 		oldval = prof_active_get(tsd_tsdn(tsd));
   2031 	READ(oldval, bool);
   2032 
   2033 	ret = 0;
   2034 label_return:
   2035 	return (ret);
   2036 }
   2037 
   2038 static int
   2039 prof_dump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   2040     size_t *oldlenp, void *newp, size_t newlen)
   2041 {
   2042 	int ret;
   2043 	const char *filename = NULL;
   2044 
   2045 	if (!config_prof)
   2046 		return (ENOENT);
   2047 
   2048 	WRITEONLY();
   2049 	WRITE(filename, const char *);
   2050 
   2051 	if (prof_mdump(tsd, filename)) {
   2052 		ret = EFAULT;
   2053 		goto label_return;
   2054 	}
   2055 
   2056 	ret = 0;
   2057 label_return:
   2058 	return (ret);
   2059 }
   2060 
   2061 static int
   2062 prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   2063     size_t *oldlenp, void *newp, size_t newlen)
   2064 {
   2065 	int ret;
   2066 	bool oldval;
   2067 
   2068 	if (!config_prof)
   2069 		return (ENOENT);
   2070 
   2071 	if (newp != NULL) {
   2072 		if (newlen != sizeof(bool)) {
   2073 			ret = EINVAL;
   2074 			goto label_return;
   2075 		}
   2076 		oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp);
   2077 	} else
   2078 		oldval = prof_gdump_get(tsd_tsdn(tsd));
   2079 	READ(oldval, bool);
   2080 
   2081 	ret = 0;
   2082 label_return:
   2083 	return (ret);
   2084 }
   2085 
   2086 static int
   2087 prof_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
   2088     size_t *oldlenp, void *newp, size_t newlen)
   2089 {
   2090 	int ret;
   2091 	size_t lg_sample = lg_prof_sample;
   2092 
   2093 	if (!config_prof)
   2094 		return (ENOENT);
   2095 
   2096 	WRITEONLY();
   2097 	WRITE(lg_sample, size_t);
   2098 	if (lg_sample >= (sizeof(uint64_t) << 3))
   2099 		lg_sample = (sizeof(uint64_t) << 3) - 1;
   2100 
   2101 	prof_reset(tsd, lg_sample);
   2102 
   2103 	ret = 0;
   2104 label_return:
   2105 	return (ret);
   2106 }
   2107 
   2108 CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t)
   2109 CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t)
   2110 
   2111 /******************************************************************************/
   2112 
   2113 CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *)
   2114 CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t)
   2115 CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t)
   2116 CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t)
   2117 CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t)
   2118 CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t)
   2119 CTL_RO_CGEN(config_stats, stats_retained, ctl_stats.retained, size_t)
   2120 
   2121 CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *)
   2122 CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult,
   2123     ssize_t)
   2124 CTL_RO_GEN(stats_arenas_i_decay_time, ctl_stats.arenas[mib[2]].decay_time,
   2125     ssize_t)
   2126 CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned)
   2127 CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t)
   2128 CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t)
   2129 CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
   2130     ctl_stats.arenas[mib[2]].astats.mapped, size_t)
   2131 CTL_RO_CGEN(config_stats, stats_arenas_i_retained,
   2132     ctl_stats.arenas[mib[2]].astats.retained, size_t)
   2133 CTL_RO_CGEN(config_stats, stats_arenas_i_npurge,
   2134     ctl_stats.arenas[mib[2]].astats.npurge, uint64_t)
   2135 CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
   2136     ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
   2137 CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
   2138     ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
   2139 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
   2140     ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
   2141 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
   2142     ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
   2143 
   2144 CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
   2145     ctl_stats.arenas[mib[2]].allocated_small, size_t)
   2146 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc,
   2147     ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t)
   2148 CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc,
   2149     ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t)
   2150 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests,
   2151     ctl_stats.arenas[mib[2]].nrequests_small, uint64_t)
   2152 CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated,
   2153     ctl_stats.arenas[mib[2]].astats.allocated_large, size_t)
   2154 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc,
   2155     ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t)
   2156 CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc,
   2157     ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t)
   2158 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests,
   2159     ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t)
   2160 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated,
   2161     ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t)
   2162 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc,
   2163     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t)
   2164 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc,
   2165     ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t)
   2166 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests,
   2167     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */
   2168 
   2169 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc,
   2170     ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t)
   2171 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc,
   2172     ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t)
   2173 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests,
   2174     ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t)
   2175 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs,
   2176     ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t)
   2177 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills,
   2178     ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t)
   2179 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes,
   2180     ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t)
   2181 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns,
   2182     ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t)
   2183 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns,
   2184     ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t)
   2185 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns,
   2186     ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t)
   2187 
   2188 static const ctl_named_node_t *
   2189 stats_arenas_i_bins_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
   2190     size_t j)
   2191 {
   2192 
   2193 	if (j > NBINS)
   2194 		return (NULL);
   2195 	return (super_stats_arenas_i_bins_j_node);
   2196 }
   2197 
   2198 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc,
   2199     ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t)
   2200 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc,
   2201     ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t)
   2202 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests,
   2203     ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t)
   2204 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns,
   2205     ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t)
   2206 
   2207 static const ctl_named_node_t *
   2208 stats_arenas_i_lruns_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
   2209     size_t j)
   2210 {
   2211 
   2212 	if (j > nlclasses)
   2213 		return (NULL);
   2214 	return (super_stats_arenas_i_lruns_j_node);
   2215 }
   2216 
   2217 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc,
   2218     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t)
   2219 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc,
   2220     ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t)
   2221 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests,
   2222     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */
   2223     uint64_t)
   2224 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks,
   2225     ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t)
   2226 
   2227 static const ctl_named_node_t *
   2228 stats_arenas_i_hchunks_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
   2229     size_t j)
   2230 {
   2231 
   2232 	if (j > nhclasses)
   2233 		return (NULL);
   2234 	return (super_stats_arenas_i_hchunks_j_node);
   2235 }
   2236 
   2237 static const ctl_named_node_t *
   2238 stats_arenas_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
   2239 {
   2240 	const ctl_named_node_t * ret;
   2241 
   2242 	malloc_mutex_lock(tsdn, &ctl_mtx);
   2243 	if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) {
   2244 		ret = NULL;
   2245 		goto label_return;
   2246 	}
   2247 
   2248 	ret = super_stats_arenas_i_node;
   2249 label_return:
   2250 	malloc_mutex_unlock(tsdn, &ctl_mtx);
   2251 	return (ret);
   2252 }
   2253