Home | History | Annotate | Download | only in internal
      1 #ifndef JEMALLOC_INTERNAL_INLINES_B_H
      2 #define JEMALLOC_INTERNAL_INLINES_B_H
      3 
      4 #include "jemalloc/internal/rtree.h"
      5 
      6 /* Choose an arena based on a per-thread value. */
      7 static inline arena_t *
      8 arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) {
      9 	arena_t *ret;
     10 
     11 	if (arena != NULL) {
     12 		return arena;
     13 	}
     14 
     15 	/* During reentrancy, arena 0 is the safest bet. */
     16 	if (unlikely(tsd_reentrancy_level_get(tsd) > 0)) {
     17 		return arena_get(tsd_tsdn(tsd), 0, true);
     18 	}
     19 
     20 	ret = internal ? tsd_iarena_get(tsd) : tsd_arena_get(tsd);
     21 	if (unlikely(ret == NULL)) {
     22 		ret = arena_choose_hard(tsd, internal);
     23 		assert(ret);
     24 		if (tcache_available(tsd)) {
     25 			tcache_t *tcache = tcache_get(tsd);
     26 			if (tcache->arena != NULL) {
     27 				/* See comments in tcache_data_init().*/
     28 				assert(tcache->arena ==
     29 				    arena_get(tsd_tsdn(tsd), 0, false));
     30 				if (tcache->arena != ret) {
     31 					tcache_arena_reassociate(tsd_tsdn(tsd),
     32 					    tcache, ret);
     33 				}
     34 			} else {
     35 				tcache_arena_associate(tsd_tsdn(tsd), tcache,
     36 				    ret);
     37 			}
     38 		}
     39 	}
     40 
     41 	/*
     42 	 * Note that for percpu arena, if the current arena is outside of the
     43 	 * auto percpu arena range, (i.e. thread is assigned to a manually
     44 	 * managed arena), then percpu arena is skipped.
     45 	 */
     46 	if (have_percpu_arena && PERCPU_ARENA_ENABLED(opt_percpu_arena) &&
     47 	    !internal && (arena_ind_get(ret) <
     48 	    percpu_arena_ind_limit(opt_percpu_arena)) && (ret->last_thd !=
     49 	    tsd_tsdn(tsd))) {
     50 		unsigned ind = percpu_arena_choose();
     51 		if (arena_ind_get(ret) != ind) {
     52 			percpu_arena_update(tsd, ind);
     53 			ret = tsd_arena_get(tsd);
     54 		}
     55 		ret->last_thd = tsd_tsdn(tsd);
     56 	}
     57 
     58 	return ret;
     59 }
     60 
     61 static inline arena_t *
     62 arena_choose(tsd_t *tsd, arena_t *arena) {
     63 	return arena_choose_impl(tsd, arena, false);
     64 }
     65 
     66 static inline arena_t *
     67 arena_ichoose(tsd_t *tsd, arena_t *arena) {
     68 	return arena_choose_impl(tsd, arena, true);
     69 }
     70 
     71 static inline bool
     72 arena_is_auto(arena_t *arena) {
     73 	assert(narenas_auto > 0);
     74 	return (arena_ind_get(arena) < narenas_auto);
     75 }
     76 
     77 JEMALLOC_ALWAYS_INLINE extent_t *
     78 iealloc(tsdn_t *tsdn, const void *ptr) {
     79 	rtree_ctx_t rtree_ctx_fallback;
     80 	rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
     81 
     82 	return rtree_extent_read(tsdn, &extents_rtree, rtree_ctx,
     83 	    (uintptr_t)ptr, true);
     84 }
     85 
     86 #endif /* JEMALLOC_INTERNAL_INLINES_B_H */
     87