Home | History | Annotate | Download | only in heap
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_SPACES_INL_H_
      6 #define V8_HEAP_SPACES_INL_H_
      7 
      8 #include "src/base/v8-fallthrough.h"
      9 #include "src/heap/incremental-marking.h"
     10 #include "src/heap/spaces.h"
     11 #include "src/msan.h"
     12 #include "src/objects/code-inl.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 template <class PAGE_TYPE>
     18 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
     19   p_ = p_->next_page();
     20   return *this;
     21 }
     22 
     23 template <class PAGE_TYPE>
     24 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
     25   PageIteratorImpl<PAGE_TYPE> tmp(*this);
     26   operator++();
     27   return tmp;
     28 }
     29 
     30 PageRange::PageRange(Address start, Address limit)
     31     : begin_(Page::FromAddress(start)),
     32       end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
     33 #ifdef DEBUG
     34   if (begin_->InNewSpace()) {
     35     SemiSpace::AssertValidRange(start, limit);
     36   }
     37 #endif  // DEBUG
     38 }
     39 
     40 // -----------------------------------------------------------------------------
     41 // SemiSpaceIterator
     42 
     43 HeapObject* SemiSpaceIterator::Next() {
     44   while (current_ != limit_) {
     45     if (Page::IsAlignedToPageSize(current_)) {
     46       Page* page = Page::FromAllocationAreaAddress(current_);
     47       page = page->next_page();
     48       DCHECK(page);
     49       current_ = page->area_start();
     50       if (current_ == limit_) return nullptr;
     51     }
     52     HeapObject* object = HeapObject::FromAddress(current_);
     53     current_ += object->Size();
     54     if (!object->IsFiller()) {
     55       return object;
     56     }
     57   }
     58   return nullptr;
     59 }
     60 
     61 // -----------------------------------------------------------------------------
     62 // HeapObjectIterator
     63 
     64 HeapObject* HeapObjectIterator::Next() {
     65   do {
     66     HeapObject* next_obj = FromCurrentPage();
     67     if (next_obj != nullptr) return next_obj;
     68   } while (AdvanceToNextPage());
     69   return nullptr;
     70 }
     71 
     72 HeapObject* HeapObjectIterator::FromCurrentPage() {
     73   while (cur_addr_ != cur_end_) {
     74     if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
     75       cur_addr_ = space_->limit();
     76       continue;
     77     }
     78     HeapObject* obj = HeapObject::FromAddress(cur_addr_);
     79     const int obj_size = obj->Size();
     80     cur_addr_ += obj_size;
     81     DCHECK_LE(cur_addr_, cur_end_);
     82     if (!obj->IsFiller()) {
     83       if (obj->IsCode()) {
     84         DCHECK_EQ(space_, space_->heap()->code_space());
     85         DCHECK_CODEOBJECT_SIZE(obj_size, space_);
     86       } else {
     87         DCHECK_OBJECT_SIZE(obj_size);
     88       }
     89       return obj;
     90     }
     91   }
     92   return nullptr;
     93 }
     94 
     95 // -----------------------------------------------------------------------------
     96 // SemiSpace
     97 
     98 bool SemiSpace::Contains(HeapObject* o) {
     99   return id_ == kToSpace
    100              ? MemoryChunk::FromAddress(o->address())->InToSpace()
    101              : MemoryChunk::FromAddress(o->address())->InFromSpace();
    102 }
    103 
    104 bool SemiSpace::Contains(Object* o) {
    105   return o->IsHeapObject() && Contains(HeapObject::cast(o));
    106 }
    107 
    108 bool SemiSpace::ContainsSlow(Address a) {
    109   for (Page* p : *this) {
    110     if (p == MemoryChunk::FromAddress(a)) return true;
    111   }
    112   return false;
    113 }
    114 
    115 // --------------------------------------------------------------------------
    116 // NewSpace
    117 
    118 bool NewSpace::Contains(HeapObject* o) {
    119   return MemoryChunk::FromAddress(o->address())->InNewSpace();
    120 }
    121 
    122 bool NewSpace::Contains(Object* o) {
    123   return o->IsHeapObject() && Contains(HeapObject::cast(o));
    124 }
    125 
    126 bool NewSpace::ContainsSlow(Address a) {
    127   return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
    128 }
    129 
    130 bool NewSpace::ToSpaceContainsSlow(Address a) {
    131   return to_space_.ContainsSlow(a);
    132 }
    133 
    134 bool NewSpace::FromSpaceContainsSlow(Address a) {
    135   return from_space_.ContainsSlow(a);
    136 }
    137 
    138 bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
    139 bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
    140 
    141 bool PagedSpace::Contains(Address addr) {
    142   if (heap()->lo_space()->FindPage(addr)) return false;
    143   return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
    144 }
    145 
    146 bool PagedSpace::Contains(Object* o) {
    147   if (!o->IsHeapObject()) return false;
    148   return Page::FromAddress(HeapObject::cast(o)->address())->owner() == this;
    149 }
    150 
    151 void PagedSpace::UnlinkFreeListCategories(Page* page) {
    152   DCHECK_EQ(this, page->owner());
    153   page->ForAllFreeListCategories([this](FreeListCategory* category) {
    154     DCHECK_EQ(free_list(), category->owner());
    155     category->set_free_list(nullptr);
    156     free_list()->RemoveCategory(category);
    157   });
    158 }
    159 
    160 size_t PagedSpace::RelinkFreeListCategories(Page* page) {
    161   DCHECK_EQ(this, page->owner());
    162   size_t added = 0;
    163   page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
    164     category->set_free_list(&free_list_);
    165     added += category->available();
    166     category->Relink();
    167   });
    168   DCHECK_EQ(page->AvailableInFreeList(),
    169             page->AvailableInFreeListFromAllocatedBytes());
    170   return added;
    171 }
    172 
    173 bool PagedSpace::TryFreeLast(HeapObject* object, int object_size) {
    174   if (allocation_info_.top() != kNullAddress) {
    175     const Address object_address = object->address();
    176     if ((allocation_info_.top() - object_size) == object_address) {
    177       allocation_info_.set_top(object_address);
    178       return true;
    179     }
    180   }
    181   return false;
    182 }
    183 
    184 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) {
    185   MemoryChunk* chunk = heap->lo_space()->FindPage(addr);
    186   if (chunk == nullptr) {
    187     chunk = MemoryChunk::FromAddress(addr);
    188   }
    189   return chunk;
    190 }
    191 
    192 void Page::MarkNeverAllocateForTesting() {
    193   DCHECK(this->owner()->identity() != NEW_SPACE);
    194   DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
    195   SetFlag(NEVER_ALLOCATE_ON_PAGE);
    196   SetFlag(NEVER_EVACUATE);
    197   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
    198 }
    199 
    200 void Page::MarkEvacuationCandidate() {
    201   DCHECK(!IsFlagSet(NEVER_EVACUATE));
    202   DCHECK_NULL(slot_set<OLD_TO_OLD>());
    203   DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
    204   SetFlag(EVACUATION_CANDIDATE);
    205   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
    206 }
    207 
    208 void Page::ClearEvacuationCandidate() {
    209   if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
    210     DCHECK_NULL(slot_set<OLD_TO_OLD>());
    211     DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
    212   }
    213   ClearFlag(EVACUATION_CANDIDATE);
    214   InitializeFreeListCategories();
    215 }
    216 
    217 MemoryChunkIterator::MemoryChunkIterator(Heap* heap)
    218     : heap_(heap),
    219       state_(kOldSpaceState),
    220       old_iterator_(heap->old_space()->begin()),
    221       code_iterator_(heap->code_space()->begin()),
    222       map_iterator_(heap->map_space()->begin()),
    223       lo_iterator_(heap->lo_space()->begin()) {}
    224 
    225 MemoryChunk* MemoryChunkIterator::next() {
    226   switch (state_) {
    227     case kOldSpaceState: {
    228       if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
    229       state_ = kMapState;
    230       V8_FALLTHROUGH;
    231     }
    232     case kMapState: {
    233       if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
    234       state_ = kCodeState;
    235       V8_FALLTHROUGH;
    236     }
    237     case kCodeState: {
    238       if (code_iterator_ != heap_->code_space()->end())
    239         return *(code_iterator_++);
    240       state_ = kLargeObjectState;
    241       V8_FALLTHROUGH;
    242     }
    243     case kLargeObjectState: {
    244       if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
    245       state_ = kFinishedState;
    246       V8_FALLTHROUGH;
    247     }
    248     case kFinishedState:
    249       return nullptr;
    250     default:
    251       break;
    252   }
    253   UNREACHABLE();
    254 }
    255 
    256 Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
    257   return top(type) ? top(type)->page() : nullptr;
    258 }
    259 
    260 FreeList* FreeListCategory::owner() { return free_list_; }
    261 
    262 bool FreeListCategory::is_linked() {
    263   return prev_ != nullptr || next_ != nullptr;
    264 }
    265 
    266 AllocationResult LocalAllocationBuffer::AllocateRawAligned(
    267     int size_in_bytes, AllocationAlignment alignment) {
    268   Address current_top = allocation_info_.top();
    269   int filler_size = Heap::GetFillToAlign(current_top, alignment);
    270 
    271   Address new_top = current_top + filler_size + size_in_bytes;
    272   if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
    273 
    274   allocation_info_.set_top(new_top);
    275   if (filler_size > 0) {
    276     return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
    277                                     filler_size);
    278   }
    279 
    280   return AllocationResult(HeapObject::FromAddress(current_top));
    281 }
    282 
    283 bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
    284   if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
    285     return true;
    286   }
    287   return SlowRefillLinearAllocationArea(size_in_bytes);
    288 }
    289 
    290 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
    291   Address current_top = allocation_info_.top();
    292   Address new_top = current_top + size_in_bytes;
    293   DCHECK_LE(new_top, allocation_info_.limit());
    294   allocation_info_.set_top(new_top);
    295   return HeapObject::FromAddress(current_top);
    296 }
    297 
    298 HeapObject* PagedSpace::TryAllocateLinearlyAligned(
    299     int* size_in_bytes, AllocationAlignment alignment) {
    300   Address current_top = allocation_info_.top();
    301   int filler_size = Heap::GetFillToAlign(current_top, alignment);
    302 
    303   Address new_top = current_top + filler_size + *size_in_bytes;
    304   if (new_top > allocation_info_.limit()) return nullptr;
    305 
    306   allocation_info_.set_top(new_top);
    307   if (filler_size > 0) {
    308     *size_in_bytes += filler_size;
    309     return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
    310                                      filler_size);
    311   }
    312 
    313   return HeapObject::FromAddress(current_top);
    314 }
    315 
    316 AllocationResult PagedSpace::AllocateRawUnaligned(
    317     int size_in_bytes, UpdateSkipList update_skip_list) {
    318   DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
    319   if (!EnsureLinearAllocationArea(size_in_bytes)) {
    320     return AllocationResult::Retry(identity());
    321   }
    322   HeapObject* object = AllocateLinearly(size_in_bytes);
    323   DCHECK_NOT_NULL(object);
    324   if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
    325     SkipList::Update(object->address(), size_in_bytes);
    326   }
    327   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
    328   return object;
    329 }
    330 
    331 
    332 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
    333                                                 AllocationAlignment alignment) {
    334   DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
    335   DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
    336   int allocation_size = size_in_bytes;
    337   HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
    338   if (object == nullptr) {
    339     // We don't know exactly how much filler we need to align until space is
    340     // allocated, so assume the worst case.
    341     int filler_size = Heap::GetMaximumFillToAlign(alignment);
    342     allocation_size += filler_size;
    343     if (!EnsureLinearAllocationArea(allocation_size)) {
    344       return AllocationResult::Retry(identity());
    345     }
    346     allocation_size = size_in_bytes;
    347     object = TryAllocateLinearlyAligned(&allocation_size, alignment);
    348     DCHECK_NOT_NULL(object);
    349   }
    350   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
    351   return object;
    352 }
    353 
    354 
    355 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
    356                                          AllocationAlignment alignment) {
    357   if (top_on_previous_step_ && top() < top_on_previous_step_ &&
    358       SupportsInlineAllocation()) {
    359     // Generated code decreased the top() pointer to do folded allocations.
    360     // The top_on_previous_step_ can be one byte beyond the current page.
    361     DCHECK_NE(top(), kNullAddress);
    362     DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
    363               Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
    364     top_on_previous_step_ = top();
    365   }
    366   size_t bytes_since_last =
    367       top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
    368 
    369   DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
    370 #ifdef V8_HOST_ARCH_32_BIT
    371   AllocationResult result =
    372       alignment == kDoubleAligned
    373           ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
    374           : AllocateRawUnaligned(size_in_bytes);
    375 #else
    376   AllocationResult result = AllocateRawUnaligned(size_in_bytes);
    377 #endif
    378   HeapObject* heap_obj = nullptr;
    379   if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
    380     DCHECK_IMPLIES(
    381         heap()->incremental_marking()->black_allocation(),
    382         heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
    383     AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
    384                    heap_obj->address(), size_in_bytes);
    385     StartNextInlineAllocationStep();
    386   }
    387   return result;
    388 }
    389 
    390 
    391 // -----------------------------------------------------------------------------
    392 // NewSpace
    393 
    394 
    395 AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
    396                                               AllocationAlignment alignment) {
    397   Address top = allocation_info_.top();
    398   int filler_size = Heap::GetFillToAlign(top, alignment);
    399   int aligned_size_in_bytes = size_in_bytes + filler_size;
    400 
    401   if (allocation_info_.limit() - top <
    402       static_cast<uintptr_t>(aligned_size_in_bytes)) {
    403     // See if we can create room.
    404     if (!EnsureAllocation(size_in_bytes, alignment)) {
    405       return AllocationResult::Retry();
    406     }
    407 
    408     top = allocation_info_.top();
    409     filler_size = Heap::GetFillToAlign(top, alignment);
    410     aligned_size_in_bytes = size_in_bytes + filler_size;
    411   }
    412 
    413   HeapObject* obj = HeapObject::FromAddress(top);
    414   allocation_info_.set_top(top + aligned_size_in_bytes);
    415   DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
    416 
    417   if (filler_size > 0) {
    418     obj = heap()->PrecedeWithFiller(obj, filler_size);
    419   }
    420 
    421   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
    422 
    423   return obj;
    424 }
    425 
    426 
    427 AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
    428   Address top = allocation_info_.top();
    429   if (allocation_info_.limit() < top + size_in_bytes) {
    430     // See if we can create room.
    431     if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
    432       return AllocationResult::Retry();
    433     }
    434 
    435     top = allocation_info_.top();
    436   }
    437 
    438   HeapObject* obj = HeapObject::FromAddress(top);
    439   allocation_info_.set_top(top + size_in_bytes);
    440   DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
    441 
    442   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
    443 
    444   return obj;
    445 }
    446 
    447 
    448 AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
    449                                        AllocationAlignment alignment) {
    450   if (top() < top_on_previous_step_) {
    451     // Generated code decreased the top() pointer to do folded allocations
    452     DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
    453               Page::FromAllocationAreaAddress(top_on_previous_step_));
    454     top_on_previous_step_ = top();
    455   }
    456 #ifdef V8_HOST_ARCH_32_BIT
    457   return alignment == kDoubleAligned
    458              ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
    459              : AllocateRawUnaligned(size_in_bytes);
    460 #else
    461   return AllocateRawUnaligned(size_in_bytes);
    462 #endif
    463 }
    464 
    465 V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
    466     int size_in_bytes, AllocationAlignment alignment) {
    467   base::LockGuard<base::Mutex> guard(&mutex_);
    468   return AllocateRaw(size_in_bytes, alignment);
    469 }
    470 
    471 LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
    472                                                         AllocationResult result,
    473                                                         intptr_t size) {
    474   if (result.IsRetry()) return InvalidBuffer();
    475   HeapObject* obj = nullptr;
    476   bool ok = result.To(&obj);
    477   USE(ok);
    478   DCHECK(ok);
    479   Address top = HeapObject::cast(obj)->address();
    480   return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
    481 }
    482 
    483 
    484 bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
    485   if (allocation_info_.top() == other->allocation_info_.limit()) {
    486     allocation_info_.set_top(other->allocation_info_.top());
    487     other->allocation_info_.Reset(kNullAddress, kNullAddress);
    488     return true;
    489   }
    490   return false;
    491 }
    492 
    493 bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
    494   if (IsValid()) {
    495     const Address object_address = object->address();
    496     if ((allocation_info_.top() - object_size) == object_address) {
    497       allocation_info_.set_top(object_address);
    498       return true;
    499     }
    500   }
    501   return false;
    502 }
    503 
    504 }  // namespace internal
    505 }  // namespace v8
    506 
    507 #endif  // V8_HEAP_SPACES_INL_H_
    508