Home | History | Annotate | Download | only in heap
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/heap/scavenger.h"
      6 
      7 #include "src/heap/barrier.h"
      8 #include "src/heap/heap-inl.h"
      9 #include "src/heap/mark-compact-inl.h"
     10 #include "src/heap/objects-visiting-inl.h"
     11 #include "src/heap/scavenger-inl.h"
     12 #include "src/heap/sweeper.h"
     13 #include "src/objects-body-descriptors-inl.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
     19  public:
     20   IterateAndScavengePromotedObjectsVisitor(Heap* heap, Scavenger* scavenger,
     21                                            bool record_slots)
     22       : heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
     23 
     24   inline void VisitPointers(HeapObject* host, Object** start,
     25                             Object** end) final {
     26     for (Object** slot = start; slot < end; ++slot) {
     27       Object* target = *slot;
     28       DCHECK(!HasWeakHeapObjectTag(target));
     29       if (target->IsHeapObject()) {
     30         HandleSlot(host, reinterpret_cast<Address>(slot),
     31                    HeapObject::cast(target));
     32       }
     33     }
     34   }
     35 
     36   inline void VisitPointers(HeapObject* host, MaybeObject** start,
     37                             MaybeObject** end) final {
     38     // Treat weak references as strong. TODO(marja): Proper weakness handling in
     39     // the young generation.
     40     for (MaybeObject** slot = start; slot < end; ++slot) {
     41       MaybeObject* target = *slot;
     42       HeapObject* heap_object;
     43       if (target->ToStrongOrWeakHeapObject(&heap_object)) {
     44         HandleSlot(host, reinterpret_cast<Address>(slot), heap_object);
     45       }
     46     }
     47   }
     48 
     49   inline void HandleSlot(HeapObject* host, Address slot_address,
     50                          HeapObject* target) {
     51     HeapObjectReference** slot =
     52         reinterpret_cast<HeapObjectReference**>(slot_address);
     53     scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
     54 
     55     if (Heap::InFromSpace(target)) {
     56       scavenger_->ScavengeObject(slot, target);
     57       bool success = (*slot)->ToStrongOrWeakHeapObject(&target);
     58       USE(success);
     59       DCHECK(success);
     60       scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
     61 
     62       if (Heap::InNewSpace(target)) {
     63         SLOW_DCHECK(target->IsHeapObject());
     64         SLOW_DCHECK(Heap::InToSpace(target));
     65         RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
     66                                           slot_address);
     67       }
     68       SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
     69           HeapObject::cast(target)));
     70     } else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
     71                                     HeapObject::cast(target))) {
     72       heap_->mark_compact_collector()->RecordSlot(host, slot, target);
     73     }
     74   }
     75 
     76  private:
     77   Heap* const heap_;
     78   Scavenger* const scavenger_;
     79   const bool record_slots_;
     80 };
     81 
     82 Scavenger::Scavenger(Heap* heap, bool is_logging, CopiedList* copied_list,
     83                      PromotionList* promotion_list, int task_id)
     84     : heap_(heap),
     85       promotion_list_(promotion_list, task_id),
     86       copied_list_(copied_list, task_id),
     87       local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
     88       copied_size_(0),
     89       promoted_size_(0),
     90       allocator_(heap),
     91       is_logging_(is_logging),
     92       is_incremental_marking_(heap->incremental_marking()->IsMarking()),
     93       is_compacting_(heap->incremental_marking()->IsCompacting()) {}
     94 
     95 void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
     96   // We are not collecting slots on new space objects during mutation thus we
     97   // have to scan for pointers to evacuation candidates when we promote
     98   // objects. But we should not record any slots in non-black objects. Grey
     99   // object's slots would be rescanned. White object might not survive until
    100   // the end of collection it would be a violation of the invariant to record
    101   // its slots.
    102   const bool record_slots =
    103       is_compacting_ &&
    104       heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
    105   IterateAndScavengePromotedObjectsVisitor visitor(heap(), this, record_slots);
    106   target->IterateBodyFast(target->map(), size, &visitor);
    107 }
    108 
    109 void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
    110   AllocationSpace space = page->owner()->identity();
    111   if ((space == OLD_SPACE) && !page->SweepingDone()) {
    112     heap()->mark_compact_collector()->sweeper()->AddPage(
    113         space, reinterpret_cast<Page*>(page),
    114         Sweeper::READD_TEMPORARY_REMOVED_PAGE);
    115   }
    116 }
    117 
    118 void Scavenger::ScavengePage(MemoryChunk* page) {
    119   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "Scavenger::ScavengePage");
    120   CodePageMemoryModificationScope memory_modification_scope(page);
    121   RememberedSet<OLD_TO_NEW>::Iterate(
    122       page,
    123       [this](Address addr) { return CheckAndScavengeObject(heap_, addr); },
    124       SlotSet::KEEP_EMPTY_BUCKETS);
    125   RememberedSet<OLD_TO_NEW>::IterateTyped(
    126       page, [this](SlotType type, Address host_addr, Address addr) {
    127         return UpdateTypedSlotHelper::UpdateTypedSlot(
    128             heap_, type, addr, [this](MaybeObject** addr) {
    129               return CheckAndScavengeObject(heap(),
    130                                             reinterpret_cast<Address>(addr));
    131             });
    132       });
    133 
    134   AddPageToSweeperIfNecessary(page);
    135 }
    136 
    137 void Scavenger::Process(OneshotBarrier* barrier) {
    138   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "Scavenger::Process");
    139   // Threshold when to switch processing the promotion list to avoid
    140   // allocating too much backing store in the worklist.
    141   const int kProcessPromotionListThreshold = kPromotionListSegmentSize / 2;
    142   ScavengeVisitor scavenge_visitor(this);
    143 
    144   const bool have_barrier = barrier != nullptr;
    145   bool done;
    146   size_t objects = 0;
    147   do {
    148     done = true;
    149     ObjectAndSize object_and_size;
    150     while ((promotion_list_.LocalPushSegmentSize() <
    151             kProcessPromotionListThreshold) &&
    152            copied_list_.Pop(&object_and_size)) {
    153       scavenge_visitor.Visit(object_and_size.first);
    154       done = false;
    155       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
    156         if (!copied_list_.IsGlobalPoolEmpty()) {
    157           barrier->NotifyAll();
    158         }
    159       }
    160     }
    161 
    162     while (promotion_list_.Pop(&object_and_size)) {
    163       HeapObject* target = object_and_size.first;
    164       int size = object_and_size.second;
    165       DCHECK(!target->IsMap());
    166       IterateAndScavengePromotedObject(target, size);
    167       done = false;
    168       if (have_barrier && ((++objects % kInterruptThreshold) == 0)) {
    169         if (!promotion_list_.IsGlobalPoolEmpty()) {
    170           barrier->NotifyAll();
    171         }
    172       }
    173     }
    174   } while (!done);
    175 }
    176 
    177 void Scavenger::Finalize() {
    178   heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
    179   heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
    180   heap()->IncrementPromotedObjectsSize(promoted_size_);
    181   allocator_.Finalize();
    182 }
    183 
    184 void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
    185                                            Object** p) {
    186   DCHECK(!HasWeakHeapObjectTag(*p));
    187   ScavengePointer(p);
    188 }
    189 
    190 void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
    191                                             Object** start, Object** end) {
    192   // Copy all HeapObject pointers in [start, end)
    193   for (Object** p = start; p < end; p++) ScavengePointer(p);
    194 }
    195 
    196 void RootScavengeVisitor::ScavengePointer(Object** p) {
    197   Object* object = *p;
    198   DCHECK(!HasWeakHeapObjectTag(object));
    199   if (!Heap::InNewSpace(object)) return;
    200 
    201   scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
    202                              reinterpret_cast<HeapObject*>(object));
    203 }
    204 
    205 RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
    206     : scavenger_(scavenger) {}
    207 
    208 ScavengeVisitor::ScavengeVisitor(Scavenger* scavenger)
    209     : scavenger_(scavenger) {}
    210 
    211 }  // namespace internal
    212 }  // namespace v8
    213