Home | History | Annotate | Download | only in heap
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/heap/scavenger.h"
      6 
      7 #include "src/contexts.h"
      8 #include "src/heap/heap-inl.h"
      9 #include "src/heap/incremental-marking.h"
     10 #include "src/heap/objects-visiting-inl.h"
     11 #include "src/heap/scavenger-inl.h"
     12 #include "src/isolate.h"
     13 #include "src/log.h"
     14 #include "src/profiler/heap-profiler.h"
     15 
     16 namespace v8 {
     17 namespace internal {
     18 
     19 enum LoggingAndProfiling {
     20   LOGGING_AND_PROFILING_ENABLED,
     21   LOGGING_AND_PROFILING_DISABLED
     22 };
     23 
     24 
     25 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
     26 
     27 template <MarksHandling marks_handling,
     28           LoggingAndProfiling logging_and_profiling_mode>
     29 class ScavengingVisitor : public StaticVisitorBase {
     30  public:
     31   static void Initialize() {
     32     table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
     33     table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
     34     table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
     35     table_.Register(kVisitThinString, &EvacuateThinString);
     36     table_.Register(kVisitByteArray, &EvacuateByteArray);
     37     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
     38     table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
     39     table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
     40     table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
     41     table_.Register(kVisitJSArrayBuffer,
     42                     &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
     43 
     44     table_.Register(
     45         kVisitNativeContext,
     46         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
     47             Context::kSize>);
     48 
     49     table_.Register(
     50         kVisitConsString,
     51         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
     52             ConsString::kSize>);
     53 
     54     table_.Register(
     55         kVisitSlicedString,
     56         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
     57             SlicedString::kSize>);
     58 
     59     table_.Register(
     60         kVisitSymbol,
     61         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
     62             Symbol::kSize>);
     63 
     64     table_.Register(
     65         kVisitSharedFunctionInfo,
     66         &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
     67             SharedFunctionInfo::kSize>);
     68 
     69     table_.Register(kVisitJSWeakCollection,
     70                     &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
     71 
     72     table_.Register(kVisitJSRegExp,
     73                     &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
     74 
     75     table_.Register(kVisitJSFunction, &EvacuateJSFunction);
     76 
     77     table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
     78                                    kVisitDataObject, kVisitDataObjectGeneric>();
     79 
     80     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
     81                                    kVisitJSObject, kVisitJSObjectGeneric>();
     82 
     83     table_
     84         .RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
     85                                  kVisitJSApiObject, kVisitJSApiObjectGeneric>();
     86 
     87     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
     88                                    kVisitStruct, kVisitStructGeneric>();
     89   }
     90 
     91   static VisitorDispatchTable<ScavengingCallback>* GetTable() {
     92     return &table_;
     93   }
     94 
     95   static void EvacuateThinStringNoShortcut(Map* map, HeapObject** slot,
     96                                            HeapObject* object) {
     97     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
     98                                                  ThinString::kSize);
     99   }
    100 
    101  private:
    102   enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
    103 
    104   static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
    105     bool should_record = false;
    106 #ifdef DEBUG
    107     should_record = FLAG_heap_stats;
    108 #endif
    109     should_record = should_record || FLAG_log_gc;
    110     if (should_record) {
    111       if (heap->new_space()->Contains(obj)) {
    112         heap->new_space()->RecordAllocation(obj);
    113       } else {
    114         heap->new_space()->RecordPromotion(obj);
    115       }
    116     }
    117   }
    118 
    119   // Helper function used by CopyObject to copy a source object to an
    120   // allocated target object and update the forwarding pointer in the source
    121   // object.  Returns the target object.
    122   INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
    123                                    HeapObject* target, int size)) {
    124     // If we migrate into to-space, then the to-space top pointer should be
    125     // right after the target object. Incorporate double alignment
    126     // over-allocation.
    127     DCHECK(!heap->InToSpace(target) ||
    128            target->address() + size == heap->new_space()->top() ||
    129            target->address() + size + kPointerSize == heap->new_space()->top());
    130 
    131     // Make sure that we do not overwrite the promotion queue which is at
    132     // the end of to-space.
    133     DCHECK(!heap->InToSpace(target) ||
    134            heap->promotion_queue()->IsBelowPromotionQueue(
    135                heap->new_space()->top()));
    136 
    137     // Copy the content of source to target.
    138     heap->CopyBlock(target->address(), source->address(), size);
    139 
    140     // Set the forwarding address.
    141     source->set_map_word(MapWord::FromForwardingAddress(target));
    142 
    143     if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
    144       // Update NewSpace stats if necessary.
    145       RecordCopiedObject(heap, target);
    146       heap->OnMoveEvent(target, source, size);
    147     }
    148 
    149     if (marks_handling == TRANSFER_MARKS) {
    150       if (IncrementalMarking::TransferColor(source, target, size)) {
    151         MemoryChunk::IncrementLiveBytes(target, size);
    152       }
    153     }
    154   }
    155 
    156   template <AllocationAlignment alignment>
    157   static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
    158                                          HeapObject* object, int object_size) {
    159     Heap* heap = map->GetHeap();
    160 
    161     DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
    162     AllocationResult allocation =
    163         heap->new_space()->AllocateRaw(object_size, alignment);
    164 
    165     HeapObject* target = NULL;  // Initialization to please compiler.
    166     if (allocation.To(&target)) {
    167       // Order is important here: Set the promotion limit before storing a
    168       // filler for double alignment or migrating the object. Otherwise we
    169       // may end up overwriting promotion queue entries when we migrate the
    170       // object.
    171       heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
    172 
    173       MigrateObject(heap, object, target, object_size);
    174 
    175       // Update slot to new target.
    176       *slot = target;
    177 
    178       heap->IncrementSemiSpaceCopiedObjectSize(object_size);
    179       return true;
    180     }
    181     return false;
    182   }
    183 
    184 
    185   template <ObjectContents object_contents, AllocationAlignment alignment>
    186   static inline bool PromoteObject(Map* map, HeapObject** slot,
    187                                    HeapObject* object, int object_size) {
    188     Heap* heap = map->GetHeap();
    189 
    190     AllocationResult allocation =
    191         heap->old_space()->AllocateRaw(object_size, alignment);
    192 
    193     HeapObject* target = NULL;  // Initialization to please compiler.
    194     if (allocation.To(&target)) {
    195       MigrateObject(heap, object, target, object_size);
    196 
    197       // Update slot to new target using CAS. A concurrent sweeper thread my
    198       // filter the slot concurrently.
    199       HeapObject* old = *slot;
    200       base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
    201                                    reinterpret_cast<base::AtomicWord>(old),
    202                                    reinterpret_cast<base::AtomicWord>(target));
    203 
    204       if (object_contents == POINTER_OBJECT) {
    205         heap->promotion_queue()->insert(target, object_size,
    206                                         ObjectMarking::IsBlack(object));
    207       }
    208       heap->IncrementPromotedObjectsSize(object_size);
    209       return true;
    210     }
    211     return false;
    212   }
    213 
    214   template <ObjectContents object_contents, AllocationAlignment alignment>
    215   static inline void EvacuateObject(Map* map, HeapObject** slot,
    216                                     HeapObject* object, int object_size) {
    217     SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
    218     SLOW_DCHECK(object->Size() == object_size);
    219     Heap* heap = map->GetHeap();
    220 
    221     if (!heap->ShouldBePromoted(object->address(), object_size)) {
    222       // A semi-space copy may fail due to fragmentation. In that case, we
    223       // try to promote the object.
    224       if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
    225         return;
    226       }
    227     }
    228 
    229     if (PromoteObject<object_contents, alignment>(map, slot, object,
    230                                                   object_size)) {
    231       return;
    232     }
    233 
    234     // If promotion failed, we try to copy the object to the other semi-space
    235     if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
    236 
    237     FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
    238   }
    239 
    240   static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
    241                                         HeapObject* object) {
    242     ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
    243 
    244     if (marks_handling == IGNORE_MARKS) return;
    245 
    246     MapWord map_word = object->map_word();
    247     DCHECK(map_word.IsForwardingAddress());
    248     HeapObject* target = map_word.ToForwardingAddress();
    249 
    250     if (ObjectMarking::IsBlack(target)) {
    251       // This object is black and it might not be rescanned by marker.
    252       // We should explicitly record code entry slot for compaction because
    253       // promotion queue processing (IteratePromotedObjectPointers) will
    254       // miss it as it is not HeapObject-tagged.
    255       Address code_entry_slot =
    256           target->address() + JSFunction::kCodeEntryOffset;
    257       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
    258       map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
    259           target, code_entry_slot, code);
    260     }
    261   }
    262 
    263   static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
    264                                         HeapObject* object) {
    265     int length = reinterpret_cast<FixedArray*>(object)->synchronized_length();
    266     int object_size = FixedArray::SizeFor(length);
    267     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
    268                                                  object_size);
    269   }
    270 
    271   static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
    272                                               HeapObject* object) {
    273     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
    274     int object_size = FixedDoubleArray::SizeFor(length);
    275     EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
    276   }
    277 
    278   static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
    279                                              HeapObject* object) {
    280     int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
    281     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
    282                                                  object_size);
    283   }
    284 
    285   static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
    286                                                HeapObject* object) {
    287     int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
    288     EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object,
    289                                                    object_size);
    290   }
    291 
    292   static inline void EvacuateByteArray(Map* map, HeapObject** slot,
    293                                        HeapObject* object) {
    294     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
    295     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
    296   }
    297 
    298   static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
    299                                               HeapObject* object) {
    300     int object_size = SeqOneByteString::cast(object)
    301                           ->SeqOneByteStringSize(map->instance_type());
    302     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
    303   }
    304 
    305   static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
    306                                               HeapObject* object) {
    307     int object_size = SeqTwoByteString::cast(object)
    308                           ->SeqTwoByteStringSize(map->instance_type());
    309     EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
    310   }
    311 
    312   static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
    313                                                HeapObject* object) {
    314     DCHECK(IsShortcutCandidate(map->instance_type()));
    315 
    316     Heap* heap = map->GetHeap();
    317 
    318     if (marks_handling == IGNORE_MARKS &&
    319         ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
    320       HeapObject* first =
    321           HeapObject::cast(ConsString::cast(object)->unchecked_first());
    322 
    323       *slot = first;
    324 
    325       if (!heap->InNewSpace(first)) {
    326         object->set_map_word(MapWord::FromForwardingAddress(first));
    327         return;
    328       }
    329 
    330       MapWord first_word = first->map_word();
    331       if (first_word.IsForwardingAddress()) {
    332         HeapObject* target = first_word.ToForwardingAddress();
    333 
    334         *slot = target;
    335         object->set_map_word(MapWord::FromForwardingAddress(target));
    336         return;
    337       }
    338 
    339       Scavenger::ScavengeObjectSlow(slot, first);
    340       object->set_map_word(MapWord::FromForwardingAddress(*slot));
    341       return;
    342     }
    343 
    344     int object_size = ConsString::kSize;
    345     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
    346                                                  object_size);
    347   }
    348 
    349   static inline void EvacuateThinString(Map* map, HeapObject** slot,
    350                                         HeapObject* object) {
    351     if (marks_handling == IGNORE_MARKS) {
    352       HeapObject* actual = ThinString::cast(object)->actual();
    353       *slot = actual;
    354       // ThinStrings always refer to internalized strings, which are
    355       // always in old space.
    356       DCHECK(!map->GetHeap()->InNewSpace(actual));
    357       object->set_map_word(MapWord::FromForwardingAddress(actual));
    358       return;
    359     }
    360 
    361     EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
    362                                                  ThinString::kSize);
    363   }
    364 
    365   template <ObjectContents object_contents>
    366   class ObjectEvacuationStrategy {
    367    public:
    368     template <int object_size>
    369     static inline void VisitSpecialized(Map* map, HeapObject** slot,
    370                                         HeapObject* object) {
    371       EvacuateObject<object_contents, kWordAligned>(map, slot, object,
    372                                                     object_size);
    373     }
    374 
    375     static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
    376       int object_size = map->instance_size();
    377       EvacuateObject<object_contents, kWordAligned>(map, slot, object,
    378                                                     object_size);
    379     }
    380   };
    381 
    382   static VisitorDispatchTable<ScavengingCallback> table_;
    383 };
    384 
    385 template <MarksHandling marks_handling,
    386           LoggingAndProfiling logging_and_profiling_mode>
    387 VisitorDispatchTable<ScavengingCallback>
    388     ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
    389 
    390 // static
    391 void Scavenger::Initialize() {
    392   ScavengingVisitor<TRANSFER_MARKS,
    393                     LOGGING_AND_PROFILING_DISABLED>::Initialize();
    394   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
    395   ScavengingVisitor<TRANSFER_MARKS,
    396                     LOGGING_AND_PROFILING_ENABLED>::Initialize();
    397   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
    398 }
    399 
    400 
    401 // static
    402 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
    403   SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
    404   MapWord first_word = object->map_word();
    405   SLOW_DCHECK(!first_word.IsForwardingAddress());
    406   Map* map = first_word.ToMap();
    407   Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
    408   scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
    409 }
    410 
    411 
    412 void Scavenger::SelectScavengingVisitorsTable() {
    413   bool logging_and_profiling =
    414       FLAG_verify_predictable || isolate()->logger()->is_logging() ||
    415       isolate()->is_profiling() ||
    416       (isolate()->heap_profiler() != NULL &&
    417        isolate()->heap_profiler()->is_tracking_object_moves());
    418 
    419   if (!heap()->incremental_marking()->IsMarking()) {
    420     if (!logging_and_profiling) {
    421       scavenging_visitors_table_.CopyFrom(
    422           ScavengingVisitor<IGNORE_MARKS,
    423                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
    424     } else {
    425       scavenging_visitors_table_.CopyFrom(
    426           ScavengingVisitor<IGNORE_MARKS,
    427                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
    428     }
    429   } else {
    430     if (!logging_and_profiling) {
    431       scavenging_visitors_table_.CopyFrom(
    432           ScavengingVisitor<TRANSFER_MARKS,
    433                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
    434     } else {
    435       scavenging_visitors_table_.CopyFrom(
    436           ScavengingVisitor<TRANSFER_MARKS,
    437                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
    438     }
    439 
    440     if (heap()->incremental_marking()->IsCompacting()) {
    441       // When compacting forbid short-circuiting of cons-strings.
    442       // Scavenging code relies on the fact that new space object
    443       // can't be evacuated into evacuation candidate but
    444       // short-circuiting violates this assumption.
    445       scavenging_visitors_table_.Register(
    446           StaticVisitorBase::kVisitShortcutCandidate,
    447           scavenging_visitors_table_.GetVisitorById(
    448               StaticVisitorBase::kVisitConsString));
    449       scavenging_visitors_table_.Register(
    450           StaticVisitorBase::kVisitThinString,
    451           &ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::
    452               EvacuateThinStringNoShortcut);
    453     }
    454   }
    455 }
    456 
    457 
    458 Isolate* Scavenger::isolate() { return heap()->isolate(); }
    459 
    460 
    461 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); }
    462 
    463 
    464 void ScavengeVisitor::VisitPointers(Object** start, Object** end) {
    465   // Copy all HeapObject pointers in [start, end)
    466   for (Object** p = start; p < end; p++) ScavengePointer(p);
    467 }
    468 
    469 
    470 void ScavengeVisitor::ScavengePointer(Object** p) {
    471   Object* object = *p;
    472   if (!heap_->InNewSpace(object)) return;
    473 
    474   Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
    475                             reinterpret_cast<HeapObject*>(object));
    476 }
    477 
    478 }  // namespace internal
    479 }  // namespace v8
    480