Home | History | Annotate | Download | only in heap
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_SCAVENGER_INL_H_
      6 #define V8_HEAP_SCAVENGER_INL_H_
      7 
      8 #include "src/heap/scavenger.h"
      9 
     10 #include "src/heap/local-allocator-inl.h"
     11 #include "src/objects-inl.h"
     12 #include "src/objects/map.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 // White list for objects that for sure only contain data.
     18 bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
     19   switch (visitor_id) {
     20     case kVisitSeqOneByteString:
     21       return true;
     22     case kVisitSeqTwoByteString:
     23       return true;
     24     case kVisitByteArray:
     25       return true;
     26     case kVisitFixedDoubleArray:
     27       return true;
     28     case kVisitDataObject:
     29       return true;
     30     default:
     31       break;
     32   }
     33   return false;
     34 }
     35 
     36 void Scavenger::PageMemoryFence(MaybeObject* object) {
     37 #ifdef THREAD_SANITIZER
     38   // Perform a dummy acquire load to tell TSAN that there is no data race
     39   // with  page initialization.
     40   HeapObject* heap_object;
     41   if (object->ToStrongOrWeakHeapObject(&heap_object)) {
     42     MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
     43     CHECK_NOT_NULL(chunk->synchronized_heap());
     44   }
     45 #endif
     46 }
     47 
     48 bool Scavenger::MigrateObject(Map* map, HeapObject* source, HeapObject* target,
     49                               int size) {
     50   // Copy the content of source to target.
     51   target->set_map_word(MapWord::FromMap(map));
     52   heap()->CopyBlock(target->address() + kPointerSize,
     53                     source->address() + kPointerSize, size - kPointerSize);
     54 
     55   HeapObject* old = base::AsAtomicPointer::Release_CompareAndSwap(
     56       reinterpret_cast<HeapObject**>(source->address()), map,
     57       MapWord::FromForwardingAddress(target).ToMap());
     58   if (old != map) {
     59     // Other task migrated the object.
     60     return false;
     61   }
     62 
     63   if (V8_UNLIKELY(is_logging_)) {
     64     heap()->OnMoveEvent(target, source, size);
     65   }
     66 
     67   if (is_incremental_marking_) {
     68     heap()->incremental_marking()->TransferColor(source, target);
     69   }
     70   heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_);
     71   return true;
     72 }
     73 
     74 bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
     75                                     HeapObject* object, int object_size) {
     76   DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
     77   AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
     78   AllocationResult allocation =
     79       allocator_.Allocate(NEW_SPACE, object_size, alignment);
     80 
     81   HeapObject* target = nullptr;
     82   if (allocation.To(&target)) {
     83     DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
     84         target));
     85     const bool self_success = MigrateObject(map, object, target, object_size);
     86     if (!self_success) {
     87       allocator_.FreeLast(NEW_SPACE, target, object_size);
     88       MapWord map_word = object->map_word();
     89       HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
     90       return true;
     91     }
     92     HeapObjectReference::Update(slot, target);
     93 
     94     copied_list_.Push(ObjectAndSize(target, object_size));
     95     copied_size_ += object_size;
     96     return true;
     97   }
     98   return false;
     99 }
    100 
    101 bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
    102                               HeapObject* object, int object_size) {
    103   AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
    104   AllocationResult allocation =
    105       allocator_.Allocate(OLD_SPACE, object_size, alignment);
    106 
    107   HeapObject* target = nullptr;
    108   if (allocation.To(&target)) {
    109     DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
    110         target));
    111     const bool self_success = MigrateObject(map, object, target, object_size);
    112     if (!self_success) {
    113       allocator_.FreeLast(OLD_SPACE, target, object_size);
    114       MapWord map_word = object->map_word();
    115       HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
    116       return true;
    117     }
    118     HeapObjectReference::Update(slot, target);
    119     if (!ContainsOnlyData(map->visitor_id())) {
    120       promotion_list_.Push(ObjectAndSize(target, object_size));
    121     }
    122     promoted_size_ += object_size;
    123     return true;
    124   }
    125   return false;
    126 }
    127 
    128 void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
    129                                       HeapObject* object, int object_size) {
    130   SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
    131   SLOW_DCHECK(object->SizeFromMap(map) == object_size);
    132 
    133   if (!heap()->ShouldBePromoted(object->address())) {
    134     // A semi-space copy may fail due to fragmentation. In that case, we
    135     // try to promote the object.
    136     if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
    137   }
    138 
    139   if (PromoteObject(map, slot, object, object_size)) return;
    140 
    141   // If promotion failed, we try to copy the object to the other semi-space
    142   if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
    143 
    144   heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
    145 }
    146 
    147 void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
    148                                    ThinString* object, int object_size) {
    149   if (!is_incremental_marking_) {
    150     // Loading actual is fine in a parallel setting is there is no write.
    151     String* actual = object->actual();
    152     object->set_length(0);
    153     *slot = actual;
    154     // ThinStrings always refer to internalized strings, which are
    155     // always in old space.
    156     DCHECK(!Heap::InNewSpace(actual));
    157     base::AsAtomicPointer::Relaxed_Store(
    158         reinterpret_cast<Map**>(object->address()),
    159         MapWord::FromForwardingAddress(actual).ToMap());
    160     return;
    161   }
    162 
    163   EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
    164                         object, object_size);
    165 }
    166 
    167 void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
    168                                           ConsString* object, int object_size) {
    169   DCHECK(IsShortcutCandidate(map->instance_type()));
    170   if (!is_incremental_marking_ &&
    171       object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
    172     HeapObject* first = HeapObject::cast(object->unchecked_first());
    173 
    174     *slot = first;
    175 
    176     if (!Heap::InNewSpace(first)) {
    177       base::AsAtomicPointer::Relaxed_Store(
    178           reinterpret_cast<Map**>(object->address()),
    179           MapWord::FromForwardingAddress(first).ToMap());
    180       return;
    181     }
    182 
    183     MapWord first_word = first->map_word();
    184     if (first_word.IsForwardingAddress()) {
    185       HeapObject* target = first_word.ToForwardingAddress();
    186 
    187       *slot = target;
    188       base::AsAtomicPointer::Relaxed_Store(
    189           reinterpret_cast<Map**>(object->address()),
    190           MapWord::FromForwardingAddress(target).ToMap());
    191       return;
    192     }
    193     Map* map = first_word.ToMap();
    194     EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
    195                           first, first->SizeFromMap(map));
    196     base::AsAtomicPointer::Relaxed_Store(
    197         reinterpret_cast<Map**>(object->address()),
    198         MapWord::FromForwardingAddress(*slot).ToMap());
    199     return;
    200   }
    201 
    202   EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
    203                         object, object_size);
    204 }
    205 
    206 void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
    207                                HeapObject* source) {
    208   SLOW_DCHECK(Heap::InFromSpace(source));
    209   SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
    210   int size = source->SizeFromMap(map);
    211   // Cannot use ::cast() below because that would add checks in debug mode
    212   // that require re-reading the map.
    213   switch (map->visitor_id()) {
    214     case kVisitThinString:
    215       // At the moment we don't allow weak pointers to thin strings.
    216       DCHECK(!(*slot)->IsWeakHeapObject());
    217       EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
    218                          reinterpret_cast<ThinString*>(source), size);
    219       break;
    220     case kVisitShortcutCandidate:
    221       DCHECK(!(*slot)->IsWeakHeapObject());
    222       // At the moment we don't allow weak pointers to cons strings.
    223       EvacuateShortcutCandidate(map, reinterpret_cast<HeapObject**>(slot),
    224                                 reinterpret_cast<ConsString*>(source), size);
    225       break;
    226     default:
    227       EvacuateObjectDefault(map, slot, source, size);
    228       break;
    229   }
    230 }
    231 
    232 void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
    233   DCHECK(Heap::InFromSpace(object));
    234 
    235   // Synchronized load that consumes the publishing CAS of MigrateObject.
    236   MapWord first_word = object->synchronized_map_word();
    237 
    238   // If the first word is a forwarding address, the object has already been
    239   // copied.
    240   if (first_word.IsForwardingAddress()) {
    241     HeapObject* dest = first_word.ToForwardingAddress();
    242     DCHECK(Heap::InFromSpace(*p));
    243     if ((*p)->IsWeakHeapObject()) {
    244       *p = HeapObjectReference::Weak(dest);
    245     } else {
    246       DCHECK((*p)->IsStrongHeapObject());
    247       *p = HeapObjectReference::Strong(dest);
    248     }
    249     return;
    250   }
    251 
    252   Map* map = first_word.ToMap();
    253   // AllocationMementos are unrooted and shouldn't survive a scavenge
    254   DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
    255   // Call the slow part of scavenge object.
    256   EvacuateObject(p, map, object);
    257 }
    258 
    259 SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
    260                                                      Address slot_address) {
    261   MaybeObject** slot = reinterpret_cast<MaybeObject**>(slot_address);
    262   MaybeObject* object = *slot;
    263   if (Heap::InFromSpace(object)) {
    264     HeapObject* heap_object;
    265     bool success = object->ToStrongOrWeakHeapObject(&heap_object);
    266     USE(success);
    267     DCHECK(success);
    268     DCHECK(heap_object->IsHeapObject());
    269 
    270     ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), heap_object);
    271 
    272     object = *slot;
    273     // If the object was in from space before and is after executing the
    274     // callback in to space, the object is still live.
    275     // Unfortunately, we do not know about the slot. It could be in a
    276     // just freed free space object.
    277     PageMemoryFence(object);
    278     if (Heap::InToSpace(object)) {
    279       return KEEP_SLOT;
    280     }
    281   } else if (Heap::InToSpace(object)) {
    282     // Already updated slot. This can happen when processing of the work list
    283     // is interleaved with processing roots.
    284     return KEEP_SLOT;
    285   }
    286   // Slots can point to "to" space if the slot has been recorded multiple
    287   // times in the remembered set. We remove the redundant slot now.
    288   return REMOVE_SLOT;
    289 }
    290 
    291 void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start,
    292                                     Object** end) {
    293   for (Object** p = start; p < end; p++) {
    294     Object* object = *p;
    295     if (!Heap::InNewSpace(object)) continue;
    296     scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
    297                                reinterpret_cast<HeapObject*>(object));
    298   }
    299 }
    300 
    301 void ScavengeVisitor::VisitPointers(HeapObject* host, MaybeObject** start,
    302                                     MaybeObject** end) {
    303   for (MaybeObject** p = start; p < end; p++) {
    304     MaybeObject* object = *p;
    305     if (!Heap::InNewSpace(object)) continue;
    306     // Treat the weak reference as strong.
    307     HeapObject* heap_object;
    308     if (object->ToStrongOrWeakHeapObject(&heap_object)) {
    309       scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
    310                                  heap_object);
    311     } else {
    312       UNREACHABLE();
    313     }
    314   }
    315 }
    316 
    317 }  // namespace internal
    318 }  // namespace v8
    319 
    320 #endif  // V8_HEAP_SCAVENGER_INL_H_
    321