HomeSort by relevance Sort by last modified time
    Searched refs:kPageSize (Results 1 - 25 of 97) sorted by null

1 2 3 4

  /external/compiler-rt/test/tsan/
large_malloc_meta.cc 16 const int kPageSize = 4 << 10;
18 for (int j = 0; j < kSize; j += kPageSize / sizeof(*p))
21 mmap(0, kSize * sizeof(*p) + kPageSize, PROT_NONE, MAP_PRIVATE | MAP_ANON,
  /external/compiler-rt/test/asan/TestCases/Posix/
large_allocator_unpoisons_on_free.cc 27 const long kPageSize = sysconf(_SC_PAGESIZE);
28 void *p = my_memalign(kPageSize, 1024 * 1024);
31 char *q = (char *)mmap(p, kPageSize, PROT_READ | PROT_WRITE,
35 memset(q, 42, kPageSize);
37 munmap(q, kPageSize);
  /art/runtime/
mem_map_test.cc 54 const size_t page_size = static_cast<size_t>(kPageSize);
132 uintptr_t random_start = CreateStartPos(i * kPageSize);
158 kPageSize,
171 reinterpret_cast<uint8_t*>(kPageSize),
186 kPageSize,
199 constexpr size_t kMapSize = kPageSize;
221 uint8_t* valid_address = GetValidMapAddress(kPageSize, /*low_4gb*/false);
225 kPageSize,
236 kPageSize,
247 kPageSize,
    [all...]
globals.h 41 static constexpr int kPageSize = 4096;
45 static constexpr size_t kLargeObjectAlignment = kPageSize;
mem_map.cc 89 // & ~(kPageSize - 1) =~0000000000000001111
112 constexpr uintptr_t mask = mask_ones & ~(kPageSize - 1);
289 size_t page_aligned_byte_count = RoundUp(byte_count, kPageSize);
366 const size_t page_aligned_byte_count = RoundUp(byte_count, kPageSize);
403 int page_offset = start % kPageSize;
406 size_t page_aligned_byte_count = RoundUp(byte_count + page_offset, kPageSize);
414 redzone_size = kPageSize;
520 DCHECK_ALIGNED(begin_, kPageSize);
521 DCHECK_ALIGNED(base_begin_, kPageSize);
522 DCHECK_ALIGNED(reinterpret_cast<uint8_t*>(base_begin_) + base_size_, kPageSize);
    [all...]
oat.cc 105 if (!IsAligned<kPageSize>(executable_offset_)) {
108 if (!IsAligned<kPageSize>(image_patch_delta_)) {
130 if (!IsAligned<kPageSize>(executable_offset_)) {
133 if (!IsAligned<kPageSize>(image_patch_delta_)) {
205 DCHECK_ALIGNED(executable_offset_, kPageSize);
211 DCHECK_ALIGNED(executable_offset, kPageSize);
353 CHECK_ALIGNED(delta, kPageSize);
362 CHECK_ALIGNED(off, kPageSize);
383 CHECK_ALIGNED(image_file_location_oat_data_begin, kPageSize);
image.cc 65 CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize));
66 CHECK_EQ(oat_file_begin, RoundUp(oat_file_begin, kPageSize));
67 CHECK_EQ(oat_data_begin, RoundUp(oat_data_begin, kPageSize));
79 CHECK_ALIGNED(delta, kPageSize) << " patch delta must be page aligned";
120 if (!IsAligned<kPageSize>(patch_delta_)) {
thread_pool.cc 41 stack_size += kPageSize;
46 CHECK_ALIGNED(stack_->Begin(), kPageSize);
47 int mprotect_result = mprotect(stack_->Begin(), kPageSize, PROT_NONE);
  /art/runtime/gc/allocator/
rosalloc.cc 47 size_t RosAlloc::dedicated_full_run_storage_[kPageSize / sizeof(size_t)] = { 0 };
61 DCHECK_ALIGNED(base, kPageSize);
62 DCHECK_EQ(RoundUp(capacity, kPageSize), capacity);
63 DCHECK_EQ(RoundUp(max_capacity, kPageSize), max_capacity);
65 CHECK_ALIGNED(page_release_size_threshold_, kPageSize);
86 size_t num_of_pages = footprint_ / kPageSize;
87 size_t max_num_of_pages = max_capacity_ / kPageSize;
90 RoundUp(max_num_of_pages, kPageSize),
102 DCHECK_EQ(capacity_ % kPageSize, static_cast<size_t>(0));
127 const size_t req_byte_size = num_pages * kPageSize;
    [all...]
dlmalloc.cc 65 start = reinterpret_cast<void*>(art::RoundUp(reinterpret_cast<uintptr_t>(start), art::kPageSize));
66 end = reinterpret_cast<void*>(art::RoundDown(reinterpret_cast<uintptr_t>(end), art::kPageSize));
  /external/compiler-rt/lib/tsan/rtl/
tsan_sync.cc 124 const uptr kPageSize = GetPageSizeCached() * kMetaRatio;
125 if (sz <= 4 * kPageSize) {
131 uptr diff = RoundUp(p, kPageSize) - p;
137 diff = p + sz - RoundDown(p + sz, kPageSize);
144 CHECK_EQ(p, RoundUp(p, kPageSize));
145 CHECK_EQ(sz, RoundUp(sz, kPageSize));
150 bool has_something = FreeRange(thr, pc, p, kPageSize);
151 p += kPageSize;
152 sz -= kPageSize;
158 bool has_something = FreeRange(thr, pc, p - kPageSize, kPageSize)
    [all...]
tsan_mman.cc 45 const uptr kPageSize = GetPageSizeCached() * kMetaRatio;
48 CHECK_GE(size, 2 * kPageSize);
49 uptr diff = RoundUp(p, kPageSize) - p;
54 diff = p + size - RoundDown(p + size, kPageSize);
  /art/runtime/gc/collector/
immune_spaces_test.cc 71 reinterpret_cast<uint8_t*>(kPageSize),
72 kPageSize));
208 constexpr size_t kImageSize = 123 * kPageSize;
209 constexpr size_t kImageOatSize = 321 * kPageSize;
210 constexpr size_t kOtherSpaceSize= 100 * kPageSize;
255 constexpr size_t kImage1Size = kPageSize * 17;
256 constexpr size_t kImage2Size = kPageSize * 13;
257 constexpr size_t kImage3Size = kPageSize * 3;
258 constexpr size_t kImage1OatSize = kPageSize * 5;
259 constexpr size_t kImage2OatSize = kPageSize * 8
    [all...]
semi_space-inl.h 37 CHECK_ALIGNED(obj, kPageSize);
  /system/core/libmemunreachable/
Allocator.cpp 57 static constexpr size_t kPageSize = 4096;
59 static constexpr size_t kUsableChunkSize = kChunkSize - kPageSize;
65 / kPageSize;
125 size = (size + kPageSize - 1) & ~(kPageSize - 1);
128 size_t map_size = size + align - kPageSize;
223 static_assert(sizeof(Chunk) <= kPageSize, "header must fit in page");
268 unsigned int page = n * allocation_size_ / kPageSize;
303 if (frees_since_purge_++ * allocation_size_ > 16 * kPageSize) {
311 //unsigned int allocsPerPage = kPageSize / allocation_size_
    [all...]
  /bionic/linker/tests/
linker_block_allocator_test.cpp 48 static size_t kPageSize = sysconf(_SC_PAGE_SIZE);
96 size_t n = kPageSize/sizeof(test_struct_larger) + 1 - 2;
111 size_t n = kPageSize/sizeof(test_struct_larger) - 1;
linker_memory_allocator_test.cpp 50 static size_t kPageSize = sysconf(_SC_PAGE_SIZE);
156 reinterpret_cast<uintptr_t>(ptr1)/kPageSize != reinterpret_cast<uintptr_t>(ptr2)/kPageSize);
177 size_t n = kPageSize / sizeof(test_struct_large) + 1 - 2;
  /external/libchrome/base/
security_unittest.cc 139 size_t kPageSize = 4096; // We support x86_64 only.
146 mmap(0, kPageSize, PROT_READ|PROT_WRITE,
150 ASSERT_EQ(munmap(default_mmap_heap_address, kPageSize), 0);
  /art/runtime/gc/space/
malloc_space.cc 88 *growth_limit = RoundUp(*growth_limit, kPageSize);
89 *capacity = RoundUp(*capacity, kPageSize);
123 growth_limit = RoundUp(growth_limit, kPageSize);
166 SetEnd(reinterpret_cast<uint8_t*>(RoundUp(reinterpret_cast<uintptr_t>(End()), kPageSize)));
169 DCHECK_ALIGNED(begin_, kPageSize);
170 DCHECK_ALIGNED(End(), kPageSize);
171 size_t size = RoundUp(Size(), kPageSize);
183 SetGrowthLimit(RoundUp(size, kPageSize));
  /art/compiler/
elf_builder.h 131 header_.sh_addralign = kPageSize;
465 rodata_(this, ".rodata", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0),
466 text_(this, ".text", SHT_PROGBITS, SHF_ALLOC | SHF_EXECINSTR, nullptr, 0, kPageSize, 0),
467 bss_(this, ".bss", SHT_NOBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0),
468 dynstr_(this, ".dynstr", SHF_ALLOC, kPageSize),
471 dynamic_(this, ".dynamic", SHT_DYNAMIC, SHF_ALLOC, &dynstr_, 0, kPageSize, sizeof(Elf_Dyn)),
472 eh_frame_(this, ".eh_frame", SHT_PROGBITS, SHF_ALLOC, nullptr, 0, kPageSize, 0),
480 abiflags_(this, ".MIPS.abiflags", SHT_MIPS_ABIFLAGS, SHF_ALLOC, nullptr, 0, kPageSize, 0,
562 CHECK(loaded_size_ == 0 || loaded_size_ == RoundUp(virtual_address_, kPageSize))
630 DCHECK_EQ(rodata_.header_.sh_addralign, static_cast<Elf_Word>(kPageSize));
    [all...]
  /external/google-breakpad/src/client/windows/unittests/
exception_handler_death_test.cc 416 const DWORD kPageSize = sSysInfo.dwPageSize;
423 kPageSize * 2,
427 char* memory = all_memory + kPageSize;
428 ASSERT_TRUE(VirtualAlloc(memory, kPageSize,
508 const DWORD kPageSize = sSysInfo.dwPageSize;
511 const int kOffset = kPageSize - sizeof(instructions);
515 kPageSize * 2,
519 ASSERT_TRUE(VirtualAlloc(memory, kPageSize,
  /art/compiler/linker/
multi_oat_relative_patcher.cc 39 DCHECK_ALIGNED(adjustment, kPageSize);
  /external/compiler-rt/lib/asan/tests/
asan_test_utils.h 63 static const int kPageSize = 4096;
  /art/runtime/gc/accounting/
card_table.cc 122 uint8_t* round_start = AlignUp(start_card, kPageSize);
123 uint8_t* round_end = AlignDown(end_card, kPageSize);
  /external/v8/test/cctest/heap/
test-spaces.cc 72 byte* mem = NewArray<byte>(2*Page::kPageSize);
76 Address page_start = RoundUp(start, Page::kPageSize);
89 CHECK(p->ObjectAreaEnd() == page_start + Page::kPageSize);
93 CHECK(p->Offset(page_start + Page::kPageSize) == Page::kPageSize);
96 CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd());
208 const int pageSize = Page::kPageSize;
673 const int kAdditionalObjects = (Page::kPageSize / kObjectSize / 2);
687 int lo_size = Page::kPageSize;
749 FLAG_target_semi_space_size = 2 * (Page::kPageSize / MB)
    [all...]

Completed in 581 milliseconds

1 2 3 4