Home | History | Annotate | Download | only in base

Lines Matching refs:arena

71       LowLevelAlloc::Arena *arena; // pointer to parent arena
181 // Arena implementation
183 struct LowLevelAlloc::Arena {
184 Arena() : mu(SpinLock::LINKER_INITIALIZED) {} // does nothing; for static init
185 explicit Arena(int) : pagesize(0) {} // set pagesize to zero explicitly
200 // The default arena, which is used when 0 is passed instead of an Arena
202 static struct LowLevelAlloc::Arena default_arena;
206 // reporting even during arena creation.
207 static struct LowLevelAlloc::Arena unhooked_arena;
208 static struct LowLevelAlloc::Arena unhooked_async_sig_safe_arena;
217 explicit ArenaLock(LowLevelAlloc::Arena *arena)
218 EXCLUSIVE_LOCK_FUNCTION(arena->mu)
219 : left_(false), mask_valid_(false), arena_(arena) {
220 if ((arena->flags & LowLevelAlloc::kAsyncSignalSafe) != 0) {
221 // We've decided not to support async-signal-safe arena use until
230 RAW_CHECK(false, "We do not yet support async-signal-safe arena.");
235 ~ArenaLock() { RAW_CHECK(this->left_, "haven't left Arena region"); }
251 LowLevelAlloc::Arena *arena_;
262 // Initialize the fields of an Arena
263 static void ArenaInit(LowLevelAlloc::Arena *arena) {
264 if (arena->pagesize == 0) {
265 arena->pagesize = getpagesize();
267 arena->roundup = 16;
268 while (arena->roundup < sizeof (arena->freelist.header)) {
269 arena->roundup += arena->roundup;
273 arena->min_size = 2 * arena->roundup;
274 arena->freelist.header.size = 0;
275 arena->freelist.header.magic =
276 Magic(kMagicUnallocated, &arena->freelist.header);
277 arena->freelist.header.arena = arena;
278 arena->freelist.levels = 0;
279 memset(arena->freelist.next, 0, sizeof (arena->freelist.next));
280 arena->allocation_count = 0;
281 if (arena == &default_arena) {
282 // Default arena should be hooked, e.g. for heap-checker to trace
283 // pointer chains through objects in the default arena.
284 arena->flags = LowLevelAlloc::kCallMallocHook;
285 } else if (arena == &unhooked_async_sig_safe_arena) {
286 arena->flags = LowLevelAlloc::kAsyncSignalSafe;
288 arena->flags = 0; // other arenas' flags may be overridden by client,
295 LowLevelAlloc::Arena *LowLevelAlloc::NewArena(int32 flags,
296 Arena *meta_data_arena) {
297 RAW_CHECK(meta_data_arena != 0, "must pass a valid arena");
305 // Arena(0) uses the constructor for non-static contexts
306 Arena *result =
307 new (AllocWithArena(sizeof (*result), meta_data_arena)) Arena(0);
313 // L < arena->mu, L < arena->arena->mu
314 bool LowLevelAlloc::DeleteArena(Arena *arena) {
315 RAW_CHECK(arena != 0 && arena != &default_arena && arena != &unhooked_arena,
316 "may not delete default arena");
317 ArenaLock section(arena);
318 bool empty = (arena->allocation_count == 0);
321 while (arena->freelist.next[0] != 0) {
322 AllocList *region = arena->freelist.next[0];
324 arena->freelist.next[0] = region->next[0];
328 RAW_CHECK(region->header.arena == arena,
329 "bad arena pointer in DeleteArena()");
330 RAW_CHECK(size % arena->pagesize == 0,
331 "empty arena has non-page-aligned block size");
332 RAW_CHECK(reinterpret_cast<intptr_t>(region) % arena->pagesize == 0,
333 "empty arena has non-page-aligned block");
335 if ((arena->flags & LowLevelAlloc::kAsyncSignalSafe) == 0) {
343 Free(arena);
360 // L < arena->mu
361 static AllocList *Next(int i, AllocList *prev, LowLevelAlloc::Arena *arena) {
367 RAW_CHECK(next->header.arena == arena,
368 "bad arena pointer in Next()");
369 if (prev != &arena->freelist) {
383 LowLevelAlloc::Arena *arena = a->header.arena;
386 n->header.arena = 0;
388 LLA_SkiplistDelete(&arena->freelist, n, prev);
389 LLA_SkiplistDelete(&arena->freelist, a, prev);
390 a->levels = LLA_SkiplistLevels(a->header.size, arena->min_size, true);
391 LLA_SkiplistInsert(&arena->freelist, a, prev);
396 // L >= arena->mu
397 static void AddToFreelist(void *v, LowLevelAlloc::Arena *arena) {
402 RAW_CHECK(f->header.arena == arena,
403 "bad arena pointer in AddToFreelist()");
404 f->levels = LLA_SkiplistLevels(f->header.size, arena->min_size, true);
406 LLA_SkiplistInsert(&arena->freelist, f, prev);
413 // L < arena->mu
420 LowLevelAlloc::Arena *arena = f->header.arena;
421 if ((arena->flags & kCallMallocHook) != 0) {
424 ArenaLock section(arena);
425 AddToFreelist(v, arena);
426 RAW_CHECK(arena->allocation_count > 0, "nothing in arena to free");
427 arena->allocation_count--;
433 // L < arena->mu
434 static void *DoAllocWithArena(size_t request, LowLevelAlloc::Arena *arena) {
438 ArenaLock section(arena);
439 ArenaInit(arena);
441 size_t req_rnd = RoundUp(request + sizeof (s->header), arena->roundup);
444 int i = LLA_SkiplistLevels(req_rnd, arena->min_size, false) - 1;
445 if (i < arena->freelist.levels) { // potential blocks exist
446 AllocList *before = &arena->freelist; // predecessor of s
447 while ((s = Next(i, before, arena)) != 0 && s->header.size < req_rnd) {
456 arena->mu.Unlock();
459 size_t new_pages_size = RoundUp(req_rnd, arena->pagesize * 16);
461 if ((arena->flags & LowLevelAlloc::kAsyncSignalSafe) != 0) {
469 arena->mu.Lock();
474 s->header.arena = arena;
475 AddToFreelist(&s->levels, arena); // insert new region into free list
478 LLA_SkiplistDelete(&arena->freelist, s, prev); // remove from free list
480 if (req_rnd + arena->min_size <= s->header.size) { // big enough to split
485 n->header.arena = arena;
487 AddToFreelist(&n->levels, arena);
490 RAW_CHECK(s->header.arena == arena, "");
491 arena->allocation_count++;
509 void *LowLevelAlloc::AllocWithArena(size_t request, Arena *arena) {
510 RAW_CHECK(arena != 0, "must pass a valid arena");
511 void *result = DoAllocWithArena(request, arena);
512 if ((arena->flags & kCallMallocHook) != 0) {
520 LowLevelAlloc::Arena *LowLevelAlloc::DefaultArena() {