/art/compiler/dex/ |
backend.h | 32 explicit Backend(ArenaAllocator* arena) : arena_(arena) {} 33 ArenaAllocator* const arena_; member in class:art::Backend
|
ssa_transformation.cc | 91 dfs_order_ = new (arena_) GrowableArray<int>(arena_, GetNumBlocks(), kGrowableArrayDfsOrder); 99 dfs_post_order_ = new (arena_) GrowableArray<int>(arena_, GetNumBlocks(), kGrowableArrayDfsPostOrder); 139 (arena_->Alloc(sizeof(ArenaBitVector *) * num_registers, 146 new (arena_) ArenaBitVector(arena_, GetNumBlocks(), false, kBitMapBMatrix); 172 new (arena_) GrowableArray<int>(arena_, num_reachable_blocks_, 180 work_stack.push_back(std::make_pair(bb, new (arena_) ArenaBitVector::Iterator(bb->i_dominated))) [all...] |
arena_allocator.h | 146 arena_.DumpMemStats(os); 148 explicit MemStats(const ArenaAllocator &arena) : arena_(arena) {} 150 const ArenaAllocator &arena_; member in struct:art::MemStats
|
growable_array.h | 80 : arena_(arena), 84 elem_list_ = static_cast<T*>(arena_->Alloc(sizeof(T) * init_length, 98 T* new_array = static_cast<T*>(arena_->Alloc(sizeof(T) * target_length, 161 ArenaAllocator* const arena_; member in class:art::GrowableArray
|
arena_bit_vector.cc | 34 : arena_(arena), 38 storage_(static_cast<uint32_t*>(arena_->Alloc(storage_size_ * sizeof(uint32_t), 71 static_cast<uint32_t*>(arena_->Alloc(new_size * sizeof(uint32_t),
|
mir_dataflow.cc | 875 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapUse); 877 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapDef); 879 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapLiveIn); 957 mir->ssa_rep->uses = static_cast<int*>(arena_->Alloc(sizeof(int) * num_uses, 960 mir->ssa_rep->fp_use = static_cast<bool*>(arena_->Alloc(sizeof(bool) * num_uses, 975 mir->ssa_rep->uses = static_cast<int*>(arena_->Alloc(sizeof(int) * num_uses, 978 mir->ssa_rep->fp_use = static_cast<bool*>(arena_->Alloc(sizeof(bool) * num_uses [all...] |
mir_graph.cc | 111 arena_(arena) { 112 try_block_addr_ = new (arena_) ArenaBitVector(arena_, 0, true /* expandable */); 396 new (arena_) GrowableArray<SuccessorBlockInfo*>(arena_, size, kGrowableArraySuccessorBlocks); 402 static_cast<SuccessorBlockInfo*>(arena_->Alloc(sizeof(SuccessorBlockInfo), 437 new (arena_) GrowableArray<SuccessorBlockInfo*>(arena_, 2, kGrowableArraySuccessorBlocks); 447 (arena_->Alloc(sizeof(SuccessorBlockInfo), ArenaAllocator::kAllocSuccessor)); 493 MIR* new_insn = static_cast<MIR*>(arena_->Alloc(sizeof(MIR), ArenaAllocator::kAllocMIR)) [all...] |
arena_bit_vector.h | 116 ArenaAllocator* const arena_; member in class:art::ArenaBitVector
|
mir_optimization.cc | 96 is_constant_v_ = new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false); 97 constant_values_ = static_cast<int*>(arena_->Alloc(sizeof(int) * GetNumSSARegs(), 402 static_cast<int*>(arena_->Alloc(sizeof(int) * 3, ArenaAllocator::kAllocDFInfo)); 411 static_cast<int*>(arena_->Alloc(sizeof(int) * 1, ArenaAllocator::kAllocDFInfo)); 413 static_cast<bool*>(arena_->Alloc(sizeof(bool) * 1, ArenaAllocator::kAllocDFInfo)); 417 static_cast<bool*>(arena_->Alloc(sizeof(bool) * mir->ssa_rep->num_uses, 474 new (arena_) ArenaBitVector(arena_, GetNumSSARegs(), false, kBitMapNullCheck); 805 static_cast<Checkstats*>(arena_->Alloc(sizeof(Checkstats), ArenaAllocator::kAllocDFInfo)) [all...] |
/external/chromium_org/third_party/leveldatabase/src/db/ |
memtable.h | 81 Arena arena_; member in class:leveldb::MemTable
|
memtable.cc | 24 table_(comparator_, &arena_) { 31 size_t MemTable::ApproximateMemoryUsage() { return arena_.MemoryUsage(); } 96 char* buf = arena_.Allocate(encoded_len);
|
skiplist.h | 99 Arena* const arena_; // Arena used for allocations of nodes member in class:leveldb::SkipList 182 char* mem = arena_->AllocateAligned( 324 arena_(arena),
|
skiplist_test.cc | 207 Arena arena_; member in class:leveldb::ConcurrentTest 214 ConcurrentTest() : list_(Comparator(), &arena_) { }
|
/art/compiler/dex/quick/mips/ |
call_mips.cc | 70 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), ArenaAllocator::kAllocData)); 75 static_cast<LIR**>(arena_->Alloc(elements * sizeof(LIR*), ArenaAllocator::kAllocLIR)); 149 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), ArenaAllocator::kAllocData)); 153 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), 229 reinterpret_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData),
|
/external/chromium_org/third_party/tcmalloc/chromium/src/ |
memory_region_map.h | 99 // and initialize arena_ and our hook and locks, hence one can use 115 // These also protect use of arena_ if our Init() has been done. 253 return LowLevelAlloc::AllocWithArena(n, arena_); 297 static LowLevelAlloc::Arena* arena_; member in class:MemoryRegionMap 301 // Hence we protect the non-recursive lock used inside of arena_
|
memory_region_map.cc | 143 LowLevelAlloc::Arena* MemoryRegionMap::arena_ = NULL; member in class:MemoryRegionMap 219 arena_ = LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena()); 269 bool deleted_arena = LowLevelAlloc::DeleteArena(arena_); 271 arena_ = 0; [all...] |
/external/chromium_org/third_party/tcmalloc/vendor/src/ |
memory_region_map.h | 91 // and initialize arena_ and our hook and locks, hence one can use 103 // These also protect use of arena_ if our Init() has been done. 229 return LowLevelAlloc::AllocWithArena(n, arena_); 273 static LowLevelAlloc::Arena* arena_; member in class:MemoryRegionMap 277 // Hence we protect the non-recursive lock used inside of arena_
|
memory_region_map.cc | 142 LowLevelAlloc::Arena* MemoryRegionMap::arena_ = NULL; member in class:MemoryRegionMap 211 arena_ = LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena()); 237 bool deleted_arena = LowLevelAlloc::DeleteArena(arena_); 239 arena_ = 0;
|
/art/compiler/dex/quick/x86/ |
call_x86.cc | 77 static_cast<SwitchTable *>(arena_->Alloc(sizeof(SwitchTable), ArenaAllocator::kAllocData)); 81 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), 133 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), ArenaAllocator::kAllocData));
|
target_x86.cc | 441 reg_pool_ = static_cast<RegisterPool*>(arena_->Alloc(sizeof(*reg_pool_), 445 static_cast<RegisterInfo*>(arena_->Alloc(num_regs * sizeof(*reg_pool_->core_regs), 449 static_cast<RegisterInfo *>(arena_->Alloc(num_fp_regs * sizeof(*reg_pool_->FPRegs),
|
/external/chromium_org/third_party/tcmalloc/chromium/src/base/ |
low_level_alloc.cc | 219 : left_(false), mask_valid_(false), arena_(arena) { function in class:__anon17112::ArenaLock 233 this->arena_->mu.Lock(); 237 this->arena_->mu.Unlock(); 251 LowLevelAlloc::Arena *arena_; member in class:__anon17112::ArenaLock
|
/external/chromium_org/third_party/tcmalloc/vendor/src/base/ |
low_level_alloc.cc | 219 : left_(false), mask_valid_(false), arena_(arena) { function in class:__anon17156::ArenaLock 233 this->arena_->mu.Lock(); 237 this->arena_->mu.Unlock(); 251 LowLevelAlloc::Arena *arena_; member in class:__anon17156::ArenaLock
|
/art/compiler/dex/quick/arm/ |
call_arm.cc | 319 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), ArenaAllocator::kAllocData)); 323 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), 367 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), ArenaAllocator::kAllocData)); 372 static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), ArenaAllocator::kAllocLIR)); 420 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), ArenaAllocator::kAllocData));
|
/external/chromium_org/crypto/ |
openpgp_symmetric_encryption.cc | 184 i = arena_.begin(); i != arena_.end(); i++) { 187 arena_.clear(); 309 arena_.push_back(buf); 445 arena_.push_back(plaintext_key); 552 arena_.push_back(plaintext); 609 // arena_ contains malloced pointers that are used as temporary space during 611 std::vector<void*> arena_; member in class:crypto::__anon11055::Decrypter
|
/art/compiler/dex/quick/ |
local_optimizations.cc | 252 static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR)); 449 static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
|