HomeSort by relevance Sort by last modified time
    Searched refs:kRegionSize (Results 1 - 10 of 10) sorted by null

  /art/runtime/gc/accounting/
read_barrier_table.h 36 size_t capacity = static_cast<size_t>(kHeapCapacity / kRegionSize);
37 DCHECK_EQ(kHeapCapacity / kRegionSize,
38 static_cast<uint64_t>(static_cast<size_t>(kHeapCapacity / kRegionSize)));
54 DCHECK_ALIGNED(start_addr, kRegionSize);
55 DCHECK_ALIGNED(end_addr, kRegionSize);
82 // This should match RegionSpace::kRegionSize. static_assert'ed in concurrent_copying.h.
83 static constexpr size_t kRegionSize = 1 * MB;
91 uint8_t* entry_addr = mem_map_->Begin() + reinterpret_cast<uintptr_t>(heap_addr) / kRegionSize;
  /art/runtime/gc/space/
region_space.cc 33 capacity = RoundUp(capacity, kRegionSize);
52 CHECK_ALIGNED(mem_map_size, kRegionSize);
53 CHECK_ALIGNED(mem_map->Begin(), kRegionSize);
54 num_regions_ = mem_map_size / kRegionSize;
59 for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) {
60 regions_[i] = Region(i, region_addr, region_addr + kRegionSize);
66 CHECK_EQ(static_cast<size_t>(regions_[i].End() - regions_[i].Begin()), kRegionSize);
91 return num_regions * kRegionSize;
103 return num_regions * kRegionSize;
115 return num_regions * kRegionSize;
    [all...]
region_space-inl.h 48 if (LIKELY(num_bytes <= kRegionSize)) {
144 if (LIKELY(num_bytes <= kRegionSize)) {
149 *usable_size = RoundUp(num_bytes, kRegionSize);
269 DCHECK_GT(num_bytes, kRegionSize);
270 size_t num_regs = RoundUp(num_bytes, kRegionSize) / kRegionSize;
272 DCHECK_LT((num_regs - 1) * kRegionSize, num_bytes);
273 DCHECK_LE(num_bytes, num_regs * kRegionSize);
312 *usable_size = num_regs * kRegionSize;
region_space.h 171 static constexpr size_t kRegionSize = 1 * MB;
248 DCHECK_EQ(static_cast<size_t>(end - begin), kRegionSize);
394 size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize);
403 DCHECK_LT(begin_ + kRegionSize, top_);
412 DCHECK_LE(bytes, kRegionSize);
500 size_t reg_idx = offset / kRegionSize;
  /external/compiler-rt/lib/sanitizer_common/
sanitizer_allocator.h 368 return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded;
376 uptr reg_beg = (uptr)p & ~(kRegionSize - 1);
397 return reinterpret_cast<void*>(kSpaceBeg + (kRegionSize * (class_id + 1)) -
458 uptr region_beg = kSpaceBeg + class_id * kRegionSize;
478 static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
481 // kRegionSize must be >= 2^32.
482 COMPILER_CHECK((kRegionSize) >= (1ULL << (SANITIZER_WORDSIZE / 2)));
509 uptr offset = chunk % kRegionSize;
527 uptr region_beg = kSpaceBeg + kRegionSize * class_id;
547 MapWithCallback(region_beg + kRegionSize
    [all...]
  /art/runtime/gc/
heap-inl.h 347 if (space::RegionSpace::kRegionSize >= alloc_size) {
349 if (LIKELY(!IsOutOfMemoryOnAllocation<kGrow>(allocator_type, space::RegionSpace::kRegionSize))) {
357 *bytes_tl_bulk_allocated = space::RegionSpace::kRegionSize;
  /external/compiler-rt/lib/sanitizer_common/tests/
sanitizer_allocator_test.cc 779 const uptr kRegionSize =
791 ASSERT_LT(2 * kAllocationSize, kRegionSize);
792 ASSERT_GT(3 * kAllocationSize, kRegionSize);
  /external/google-breakpad/src/processor/
minidump_unittest.cc 645 const uint64_t kRegionSize = 0x2000;
650 .D64(kRegionSize) // region_size
676 ASSERT_EQ(kRegionSize, info1->GetSize());
682 info_list->GetMemoryInfoForAddress(kBaseAddress + kRegionSize / 2);
684 ASSERT_EQ(kRegionSize, info2->GetSize());
    [all...]
  /art/runtime/gc/collector/
concurrent_copying.cc 58 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize,
    [all...]
  /external/v8/src/heap/
spaces.h     [all...]

Completed in 1184 milliseconds