HomeSort by relevance Sort by last modified time
    Searched refs:arena_ (Results 1 - 25 of 42) sorted by null

1 2

  /art/compiler/dex/
backend.h 51 explicit Backend(ArenaAllocator* arena) : arena_(arena) {}
52 ArenaAllocator* const arena_; member in class:art::Backend
ssa_transformation.cc 97 dfs_order_ = new (arena_) GrowableArray<BasicBlockId>(arena_, GetNumBlocks(),
106 dfs_post_order_ = new (arena_) GrowableArray<BasicBlockId>(arena_, GetNumBlocks(),
152 (arena_->Alloc(sizeof(ArenaBitVector *) * num_registers,
159 new (arena_) ArenaBitVector(arena_, GetNumBlocks(), false, kBitMapBMatrix);
185 new (arena_) GrowableArray<BasicBlockId>(arena_, num_reachable_blocks_,
276 bb->dominators = new (arena_) ArenaBitVector(arena_, num_total_blocks
    [all...]
mir_dataflow.cc 933 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapUse);
935 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapDef);
937 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapLiveIn);
1021 mir->ssa_rep->uses = static_cast<int*>(arena_->Alloc(sizeof(int) * num_uses, kArenaAllocDFInfo));
1023 mir->ssa_rep->fp_use = static_cast<bool*>(arena_->Alloc(sizeof(bool) * num_uses, kArenaAllocDFInfo));
1031 mir->ssa_rep->defs = static_cast<int*>(arena_->Alloc(sizeof(int) * num_defs,
1033 mir->ssa_rep->fp_def = static_cast<bool*>(arena_->Alloc(sizeof(bool) * num_defs
    [all...]
mir_graph.cc 116 arena_(arena),
128 try_block_addr_ = new (arena_) ArenaBitVector(arena_, 0, true /* expandable */);
517 new (arena_) GrowableArray<SuccessorBlockInfo*>(arena_, size, kGrowableArraySuccessorBlocks);
523 static_cast<SuccessorBlockInfo*>(arena_->Alloc(sizeof(SuccessorBlockInfo),
570 cur_block->successor_blocks = new (arena_) GrowableArray<SuccessorBlockInfo*>(
571 arena_, 2, kGrowableArraySuccessorBlocks);
578 (arena_->Alloc(sizeof(SuccessorBlockInfo), kArenaAllocSuccessor));
    [all...]
mir_optimization.cc 257 CompilerTemp *compiler_temp = static_cast<CompilerTemp *>(arena_->Alloc(sizeof(CompilerTemp),
281 static_cast<CompilerTemp *>(arena_->Alloc(sizeof(CompilerTemp), kArenaAllocRegAlloc));
506 static_cast<int*>(arena_->Alloc(sizeof(int) * 3, kArenaAllocDFInfo));
515 static_cast<int*>(arena_->Alloc(sizeof(int) * 1, kArenaAllocDFInfo));
517 static_cast<bool*>(arena_->Alloc(sizeof(bool) * 1, kArenaAllocDFInfo));
521 static_cast<bool*>(arena_->Alloc(sizeof(bool) * mir->ssa_rep->num_uses,
    [all...]
  /art/compiler/utils/
arena_bit_vector.cc 25 explicit ArenaBitVectorAllocator(ArenaAlloc* arena) : arena_(arena) {}
29 return arena_->Alloc(size, kArenaAllocGrowableBitMap);
40 ArenaAlloc* arena_; member in class:art::ArenaBitVectorAllocator
growable_array.h 85 : arena_(arena),
89 elem_list_ = static_cast<T*>(arena_->Alloc(sizeof(T) * init_length,
103 T* new_array = static_cast<T*>(arena_->Alloc(sizeof(T) * target_length,
208 ArenaAllocator* const arena_; member in class:art::GrowableArray
  /art/compiler/optimizing/
builder.cc 76 HLocal* local = new (arena_) HLocal(i);
96 new (arena_) HParameterValue(parameter_index++, Primitive::kPrimNot);
99 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter));
114 new (arena_) HParameterValue(parameter_index++, Primitive::GetType(shorty[pos - 1]));
119 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter));
143 T* comparison = new (arena_) T(first, second);
145 HInstruction* ifinst = new (arena_) HIf(comparison);
159 T* comparison = new (arena_) T(value, GetIntConstant(0));
161 HInstruction* ifinst = new (arena_) HIf(comparison);
181 graph_ = new (arena_) HGraph(arena_)
    [all...]
builder.h 38 : arena_(arena),
110 ArenaAllocator* const arena_; member in class:art::HGraphBuilder
nodes.cc 29 ArenaBitVector visiting(arena_, blocks_.Size(), false);
70 ArenaBitVector visited(arena_, blocks_.Size(), false);
87 GrowableArray<size_t> visits(arena_, blocks_.Size());
96 ArenaBitVector visited(arena_, blocks_.Size(), false);
143 HBasicBlock* new_block = new (arena_) HBasicBlock(this);
145 new_block->AddInstruction(new (arena_) HGoto());
165 HBasicBlock* new_back_edge = new (arena_) HBasicBlock(this);
167 new_back_edge->AddInstruction(new (arena_) HGoto());
182 HBasicBlock* pre_header = new (arena_) HBasicBlock(this);
184 pre_header->AddInstruction(new (arena_) HGoto())
    [all...]
  /external/chromium_org/third_party/leveldatabase/src/db/
memtable.h 81 Arena arena_; member in class:leveldb::MemTable
memtable.cc 24 table_(comparator_, &arena_) {
31 size_t MemTable::ApproximateMemoryUsage() { return arena_.MemoryUsage(); }
96 char* buf = arena_.Allocate(encoded_len);
skiplist.h 99 Arena* const arena_; // Arena used for allocations of nodes member in class:leveldb::SkipList
182 char* mem = arena_->AllocateAligned(
324 arena_(arena),
skiplist_test.cc 207 Arena arena_; member in class:leveldb::ConcurrentTest
214 ConcurrentTest() : list_(Comparator(), &arena_) { }
  /art/compiler/dex/quick/mips/
call_mips.cc 71 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
76 static_cast<LIR**>(arena_->Alloc(elements * sizeof(LIR*), kArenaAllocLIR));
148 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
152 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*),
227 reinterpret_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData),
343 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, spill_count * 4));
  /art/compiler/dex/quick/arm64/
call_arm64.cc 53 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
57 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR));
105 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
110 static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR));
163 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData));
398 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, frame_size_));
  /external/chromium_org/third_party/tcmalloc/chromium/src/
memory_region_map.h 99 // and initialize arena_ and our hook and locks, hence one can use
115 // These also protect use of arena_ if our Init() has been done.
253 return LowLevelAlloc::AllocWithArena(n, arena_);
297 static LowLevelAlloc::Arena* arena_; member in class:MemoryRegionMap
301 // Hence we protect the non-recursive lock used inside of arena_
memory_region_map.cc 143 LowLevelAlloc::Arena* MemoryRegionMap::arena_ = NULL; member in class:MemoryRegionMap
219 arena_ = LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena());
269 bool deleted_arena = LowLevelAlloc::DeleteArena(arena_);
271 arena_ = 0;
    [all...]
  /external/chromium_org/third_party/tcmalloc/vendor/src/
memory_region_map.h 91 // and initialize arena_ and our hook and locks, hence one can use
103 // These also protect use of arena_ if our Init() has been done.
229 return LowLevelAlloc::AllocWithArena(n, arena_);
273 static LowLevelAlloc::Arena* arena_; member in class:MemoryRegionMap
277 // Hence we protect the non-recursive lock used inside of arena_
memory_region_map.cc 142 LowLevelAlloc::Arena* MemoryRegionMap::arena_ = NULL; member in class:MemoryRegionMap
211 arena_ = LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena());
237 bool deleted_arena = LowLevelAlloc::DeleteArena(arena_);
239 arena_ = 0;
  /art/compiler/dex/quick/x86/
call_x86.cc 70 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
74 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*),
141 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData));
280 new(arena_)StackOverflowSlowPath(this, branch,
  /art/compiler/dex/quick/arm/
call_arm.cc 53 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
57 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR));
101 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData));
106 static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR));
154 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData));
436 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, true, spill_size));
451 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, false, frame_size_));
  /external/chromium_org/third_party/tcmalloc/chromium/src/base/
low_level_alloc.cc 219 : left_(false), mask_valid_(false), arena_(arena) { function in class:__anon20167::ArenaLock
233 this->arena_->mu.Lock();
237 this->arena_->mu.Unlock();
251 LowLevelAlloc::Arena *arena_; member in class:__anon20167::ArenaLock
  /external/chromium_org/third_party/tcmalloc/vendor/src/base/
low_level_alloc.cc 219 : left_(false), mask_valid_(false), arena_(arena) { function in class:__anon20211::ArenaLock
233 this->arena_->mu.Lock();
237 this->arena_->mu.Unlock();
251 LowLevelAlloc::Arena *arena_; member in class:__anon20211::ArenaLock
  /art/compiler/dex/quick/
gen_common.cc 81 AddSlowPath(new (arena_) DivZeroCheckSlowPath(this, branch));
105 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length));
135 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length));
154 AddSlowPath(new (arena_) NullCheckSlowPath(this, branch));
580 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
665 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
    [all...]

Completed in 395 milliseconds

1 2