Home | History | Annotate | Download | only in src

Lines Matching refs:Address

58   // address and end as NULL, then the first iteration will move on
87 Address cur, Address end,
148 LOG(isolate_, NewEvent("CodeRange", code_range_->address(), requested));
149 Address base = reinterpret_cast<Address>(code_range_->address());
150 Address aligned_base =
151 RoundUp(reinterpret_cast<Address>(code_range_->address()),
210 Address CodeRange::AllocateRawMemory(const size_t requested_size,
247 bool CodeRange::CommitRawMemory(Address start, size_t length) {
252 bool CodeRange::UncommitRawMemory(Address start, size_t length) {
257 void CodeRange::FreeRawMemory(Address address, size_t length) {
258 ASSERT(IsAddressAligned(address, MemoryChunk::kAlignment));
259 free_list_.Add(FreeBlock(address, length));
260 code_range_->Uncommit(address, length);
323 static_cast<Address>(reservation->address())));
329 void MemoryAllocator::FreeMemory(Address base,
342 if (isolate_->code_range()->contains(static_cast<Address>(base))) {
354 Address MemoryAllocator::ReserveAlignedMemory(size_t size,
361 Address base = RoundUp(static_cast<Address>(reservation.address()),
368 Address MemoryAllocator::AllocateAlignedMemory(size_t reserve_size,
375 Address base = ReserveAlignedMemory(reserve_size, alignment, &reservation);
411 Address start,
413 Address area_start = start + NewSpacePage::kObjectStartOffset;
414 Address area_end = start + Page::kPageSize;
448 Address base,
450 Address area_start,
451 Address area_end,
456 ASSERT(base == chunk->address());
500 size_t header_size = area_start() - address() - guard_size;
509 Address start = address() + committed_size + guard_size;
528 Address start = address() + committed_size + guard_size - length;
582 Address base = NULL;
584 Address area_start = NULL;
585 Address area_end = NULL;
747 reinterpret_cast<Address>(chunk), chunk->IsEvacuationCandidate());
756 FreeMemory(chunk->address(),
763 bool MemoryAllocator::CommitBlock(Address start,
777 bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
784 void MemoryAllocator::ZapBlock(Address start, size_t size) {
875 Address start,
909 void MemoryChunk::IncrementLiveBytesFromMutator(Address address, int by) {
910 MemoryChunk* chunk = MemoryChunk::FromAddress(address);
980 MaybeObject* PagedSpace::FindObject(Address addr) {
989 Address cur = obj->address();
990 Address next = cur + obj->Size();
1155 Address end_of_previous_object = page->area_start();
1156 Address top = page->area_end();
1159 CHECK(end_of_previous_object <= object->address());
1180 CHECK(object->address() + size <= top);
1181 end_of_previous_object = object->address() + size;
1202 Address base =
1332 Address new_limit =
1353 Address top = allocation_info_.top;
1369 Address limit = NewSpacePage::FromLimit(top)->area_end();
1385 Address old_top = allocation_info_.top;
1386 Address new_top = old_top + size_in_bytes;
1387 Address high = to_space_.page_high();
1419 // There should be objects packed in from the low address up to the
1421 Address current = to_space_.first_page()->area_start();
1471 void SemiSpace::SetUp(Address start,
1502 Address end = start_ + maximum_capacity_;
1503 Address start = end - pages * Page::kPageSize;
1526 Address start = start_ + maximum_capacity_ - capacity_;
1559 Address end = start_ + maximum_capacity_;
1560 Address start = end - new_capacity;
1572 Address page_address = end - i * Page::kPageSize;
1595 Address space_end = start_ + maximum_capacity_;
1596 Address old_start = space_end - capacity_;
1621 // Fixup back-pointers to anchor. Address of anchor changes
1665 // Fixup back-pointers to the page list anchor now that its address
1676 void SemiSpace::set_age_mark(Address mark) {
1724 void SemiSpace::AssertValidRange(Address start, Address end) {
1730 // Start address is before end address, either on same page,
1731 // or end address is on a later page in the linked list of
1758 SemiSpaceIterator::SemiSpaceIterator(NewSpace* space, Address start) {
1763 SemiSpaceIterator::SemiSpaceIterator(Address from, Address to) {
1768 void SemiSpaceIterator::Initialize(Address start,
1769 Address end,
2004 Memory::Address_at(address() + kNextOffset));
2007 Memory::Address_at(address() + kPointerSize));
2016 return reinterpret_cast<FreeListNode**>(address() + kNextOffset);
2018 return reinterpret_cast<FreeListNode**>(address() + kPointerSize);
2030 Memory::Address_at(address() + kNextOffset) =
2031 reinterpret_cast<Address>(next);
2033 Memory::Address_at(address() + kPointerSize) =
2034 reinterpret_cast<Address>(next);
2073 if (Page::FromAddress((*n)->address()) == p) {
2095 Page::FromAddress(node->address())->IsEvacuationCandidate()) {
2141 Map** map_location = reinterpret_cast<Map**>(n->address());
2176 int FreeList::Free(Address start, int size_in_bytes) {
2218 page = Page::FromAddress(node->address());
2229 page = Page::FromAddress(node->address());
2240 page = Page::FromAddress(node->address());
2253 Page::FromAddress(cur_node->address())->IsEvacuationCandidate()) {
2256 page = Page::FromAddress(cur_node->address());
2276 page = Page::FromAddress(node->address());
2296 page = Page::FromAddress(node->address());
2303 page = Page::FromAddress(node->address());
2310 page = Page::FromAddress(node->address());
2352 reinterpret_cast<Object**>(new_node->address())[i] =
2375 owner_->Free(new_node->address() + size_in_bytes + linear_size,
2377 owner_->SetTop(new_node->address() + size_in_bytes,
2378 new_node->address() + size_in_bytes + linear_size);
2382 owner_->SetTop(new_node->address() + size_in_bytes,
2383 new_node->address() + new_node_size);
2485 Address top = allocation_info_.top;
2486 if ((top - bytes) == allocation->address()) {
2487 allocation_info_.top = allocation->address();
2533 Address current_top = allocation_info_.top;
2534 Address new_top = current_top + size_in_bytes;
2547 SetTop(new_area->address(), new_area->address() + size_in_bytes);
2938 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address()));
2990 reinterpret_cast<Object**>(object->address())[0] =
2992 reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
3013 Address a) {
3022 LargePage* LargeObjectSpace::FindPage(Address a) {
3050 Page::FromAddress(object->address())->ResetProgressBar();
3051 Page::FromAddress(object->address())->ResetLiveBytes();
3094 Address address = object->address();
3095 MemoryChunk* chunk = MemoryChunk::FromAddress(address);
3099 SLOW_ASSERT(!owned || !FindObject(address)->IsFailure());
3115 Page* page = Page::FromAddress(object->address());
3116 CHECK(object->address() == page->area_start());
3196 this->address(),