Home | History | Annotate | Download | only in heap
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_OBJECTS_VISITING_INL_H_
      6 #define V8_OBJECTS_VISITING_INL_H_
      7 
      8 #include "src/heap/array-buffer-tracker.h"
      9 #include "src/heap/objects-visiting.h"
     10 #include "src/ic/ic-state.h"
     11 #include "src/macro-assembler.h"
     12 #include "src/objects-body-descriptors-inl.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 
     18 template <typename Callback>
     19 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
     20   return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
     21 }
     22 
     23 
     24 template <typename StaticVisitor>
     25 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
     26   table_.Register(
     27       kVisitShortcutCandidate,
     28       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
     29 
     30   table_.Register(
     31       kVisitConsString,
     32       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
     33 
     34   table_.Register(kVisitSlicedString,
     35                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
     36                                     int>::Visit);
     37 
     38   table_.Register(
     39       kVisitSymbol,
     40       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
     41 
     42   table_.Register(kVisitFixedArray,
     43                   &FlexibleBodyVisitor<StaticVisitor,
     44                                        FixedArray::BodyDescriptor, int>::Visit);
     45 
     46   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
     47   table_.Register(
     48       kVisitFixedTypedArray,
     49       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
     50                            int>::Visit);
     51 
     52   table_.Register(
     53       kVisitFixedFloat64Array,
     54       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
     55                            int>::Visit);
     56 
     57   table_.Register(
     58       kVisitNativeContext,
     59       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
     60                         int>::Visit);
     61 
     62   table_.Register(kVisitByteArray, &VisitByteArray);
     63   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
     64 
     65   table_.Register(
     66       kVisitSharedFunctionInfo,
     67       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
     68                         int>::Visit);
     69 
     70   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
     71 
     72   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
     73 
     74   // Don't visit code entry. We are using this visitor only during scavenges.
     75   table_.Register(
     76       kVisitJSFunction,
     77       &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
     78                            int>::Visit);
     79 
     80   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
     81 
     82   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
     83 
     84   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
     85 
     86   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
     87 
     88   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
     89                                           kVisitDataObjectGeneric>();
     90 
     91   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
     92                                           kVisitJSObjectGeneric>();
     93   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
     94                                           kVisitStructGeneric>();
     95 }
     96 
     97 
     98 template <typename StaticVisitor>
     99 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
    100     Map* map, HeapObject* object) {
    101   typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
    102       JSArrayBufferBodyVisitor;
    103 
    104   if (!JSArrayBuffer::cast(object)->is_external()) {
    105     Heap* heap = map->GetHeap();
    106     heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
    107   }
    108   return JSArrayBufferBodyVisitor::Visit(map, object);
    109 }
    110 
    111 
    112 template <typename StaticVisitor>
    113 int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
    114     Map* map, HeapObject* object) {
    115   VisitPointers(
    116       map->GetHeap(), object,
    117       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
    118       HeapObject::RawField(object, BytecodeArray::kHeaderSize));
    119   return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
    120 }
    121 
    122 
    123 template <typename StaticVisitor>
    124 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
    125   table_.Register(kVisitShortcutCandidate,
    126                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
    127                                     void>::Visit);
    128 
    129   table_.Register(kVisitConsString,
    130                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
    131                                     void>::Visit);
    132 
    133   table_.Register(kVisitSlicedString,
    134                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
    135                                     void>::Visit);
    136 
    137   table_.Register(
    138       kVisitSymbol,
    139       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
    140 
    141   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
    142 
    143   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
    144 
    145   table_.Register(
    146       kVisitFixedTypedArray,
    147       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
    148                            void>::Visit);
    149 
    150   table_.Register(
    151       kVisitFixedFloat64Array,
    152       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
    153                            void>::Visit);
    154 
    155   table_.Register(kVisitNativeContext, &VisitNativeContext);
    156 
    157   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
    158 
    159   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
    160 
    161   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
    162 
    163   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
    164 
    165   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
    166 
    167   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
    168 
    169   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
    170 
    171   table_.Register(
    172       kVisitOddball,
    173       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
    174 
    175   table_.Register(kVisitMap, &VisitMap);
    176 
    177   table_.Register(kVisitCode, &VisitCode);
    178 
    179   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
    180 
    181   table_.Register(kVisitJSFunction, &VisitJSFunction);
    182 
    183   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
    184 
    185   // Registration for kVisitJSRegExp is done by StaticVisitor.
    186 
    187   table_.Register(
    188       kVisitCell,
    189       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
    190 
    191   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
    192 
    193   table_.Register(kVisitWeakCell, &VisitWeakCell);
    194 
    195   table_.Register(kVisitTransitionArray, &VisitTransitionArray);
    196 
    197   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
    198                                           kVisitDataObjectGeneric>();
    199 
    200   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
    201                                           kVisitJSObjectGeneric>();
    202 
    203   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
    204                                           kVisitStructGeneric>();
    205 }
    206 
    207 
    208 template <typename StaticVisitor>
    209 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
    210     Heap* heap, HeapObject* object, Address entry_address) {
    211   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
    212   heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
    213                                                       code);
    214   StaticVisitor::MarkObject(heap, code);
    215 }
    216 
    217 
    218 template <typename StaticVisitor>
    219 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
    220     Heap* heap, RelocInfo* rinfo) {
    221   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
    222   HeapObject* object = HeapObject::cast(rinfo->target_object());
    223   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
    224   // TODO(ulan): It could be better to record slots only for strongly embedded
    225   // objects here and record slots for weakly embedded object during clearing
    226   // of non-live references in mark-compact.
    227   if (!rinfo->host()->IsWeakObject(object)) {
    228     StaticVisitor::MarkObject(heap, object);
    229   }
    230 }
    231 
    232 
    233 template <typename StaticVisitor>
    234 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
    235                                                     RelocInfo* rinfo) {
    236   DCHECK(rinfo->rmode() == RelocInfo::CELL);
    237   Cell* cell = rinfo->target_cell();
    238   heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
    239   if (!rinfo->host()->IsWeakObject(cell)) {
    240     StaticVisitor::MarkObject(heap, cell);
    241   }
    242 }
    243 
    244 
    245 template <typename StaticVisitor>
    246 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
    247                                                            RelocInfo* rinfo) {
    248   DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
    249          rinfo->IsPatchedDebugBreakSlotSequence());
    250   Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
    251   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    252   StaticVisitor::MarkObject(heap, target);
    253 }
    254 
    255 
    256 template <typename StaticVisitor>
    257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
    258                                                           RelocInfo* rinfo) {
    259   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
    260   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    261   // Monomorphic ICs are preserved when possible, but need to be flushed
    262   // when they might be keeping a Context alive, or when the heap is about
    263   // to be serialized.
    264   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
    265       !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
    266                                   target->ic_age() != heap->global_ic_age())) {
    267     ICUtility::Clear(heap->isolate(), rinfo->pc(),
    268                      rinfo->host()->constant_pool());
    269     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    270   }
    271   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    272   StaticVisitor::MarkObject(heap, target);
    273 }
    274 
    275 
    276 template <typename StaticVisitor>
    277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
    278     Heap* heap, RelocInfo* rinfo) {
    279   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
    280   Code* target = rinfo->code_age_stub();
    281   DCHECK(target != NULL);
    282   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    283   StaticVisitor::MarkObject(heap, target);
    284 }
    285 
    286 
    287 template <typename StaticVisitor>
    288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
    289     Map* map, HeapObject* object) {
    290   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
    291                    void>::Visit(map, object);
    292 }
    293 
    294 
    295 template <typename StaticVisitor>
    296 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
    297                                                    HeapObject* object) {
    298   Heap* heap = map->GetHeap();
    299   Map* map_object = Map::cast(object);
    300 
    301   // Clears the cache of ICs related to this map.
    302   if (FLAG_cleanup_code_caches_at_gc) {
    303     map_object->ClearCodeCache(heap);
    304   }
    305 
    306   // When map collection is enabled we have to mark through map's transitions
    307   // and back pointers in a special way to make these links weak.
    308   if (map_object->CanTransition()) {
    309     MarkMapContents(heap, map_object);
    310   } else {
    311     StaticVisitor::VisitPointers(
    312         heap, object,
    313         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
    314         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
    315   }
    316 }
    317 
    318 
    319 template <typename StaticVisitor>
    320 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
    321     Map* map, HeapObject* object) {
    322   Heap* heap = map->GetHeap();
    323 
    324   StaticVisitor::VisitPointers(
    325       heap, object,
    326       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
    327       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
    328 }
    329 
    330 
    331 template <typename StaticVisitor>
    332 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
    333                                                         HeapObject* object) {
    334   Heap* heap = map->GetHeap();
    335   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
    336   // Enqueue weak cell in linked list of encountered weak collections.
    337   // We can ignore weak cells with cleared values because they will always
    338   // contain smi zero.
    339   if (weak_cell->next_cleared() && !weak_cell->cleared()) {
    340     HeapObject* value = HeapObject::cast(weak_cell->value());
    341     if (MarkCompactCollector::IsMarked(value)) {
    342       // Weak cells with live values are directly processed here to reduce
    343       // the processing time of weak cells during the main GC pause.
    344       Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
    345       map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
    346                                                            *slot);
    347     } else {
    348       // If we do not know about liveness of values of weak cells, we have to
    349       // process them when we know the liveness of the whole transitive
    350       // closure.
    351       weak_cell->set_next(heap->encountered_weak_cells(),
    352                           UPDATE_WEAK_WRITE_BARRIER);
    353       heap->set_encountered_weak_cells(weak_cell);
    354     }
    355   }
    356 }
    357 
    358 
    359 template <typename StaticVisitor>
    360 void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
    361     Map* map, HeapObject* object) {
    362   TransitionArray* array = TransitionArray::cast(object);
    363   Heap* heap = array->GetHeap();
    364   // Visit strong references.
    365   if (array->HasPrototypeTransitions()) {
    366     StaticVisitor::VisitPointer(heap, array,
    367                                 array->GetPrototypeTransitionsSlot());
    368   }
    369   int num_transitions = TransitionArray::NumberOfTransitions(array);
    370   for (int i = 0; i < num_transitions; ++i) {
    371     StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
    372   }
    373   // Enqueue the array in linked list of encountered transition arrays if it is
    374   // not already in the list.
    375   if (array->next_link()->IsUndefined()) {
    376     Heap* heap = map->GetHeap();
    377     array->set_next_link(heap->encountered_transition_arrays(),
    378                          UPDATE_WEAK_WRITE_BARRIER);
    379     heap->set_encountered_transition_arrays(array);
    380   }
    381 }
    382 
    383 
    384 template <typename StaticVisitor>
    385 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
    386     Map* map, HeapObject* object) {
    387   Heap* heap = map->GetHeap();
    388 
    389   StaticVisitor::VisitPointers(
    390       heap, object,
    391       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
    392       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
    393 }
    394 
    395 
    396 template <typename StaticVisitor>
    397 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
    398     Map* map, HeapObject* object) {
    399   typedef FlexibleBodyVisitor<StaticVisitor,
    400                               JSWeakCollection::BodyDescriptorWeak,
    401                               void> JSWeakCollectionBodyVisitor;
    402   Heap* heap = map->GetHeap();
    403   JSWeakCollection* weak_collection =
    404       reinterpret_cast<JSWeakCollection*>(object);
    405 
    406   // Enqueue weak collection in linked list of encountered weak collections.
    407   if (weak_collection->next() == heap->undefined_value()) {
    408     weak_collection->set_next(heap->encountered_weak_collections());
    409     heap->set_encountered_weak_collections(weak_collection);
    410   }
    411 
    412   // Skip visiting the backing hash table containing the mappings and the
    413   // pointer to the other enqueued weak collections, both are post-processed.
    414   JSWeakCollectionBodyVisitor::Visit(map, object);
    415 
    416   // Partially initialized weak collection is enqueued, but table is ignored.
    417   if (!weak_collection->table()->IsHashTable()) return;
    418 
    419   // Mark the backing hash table without pushing it on the marking stack.
    420   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
    421   HeapObject* obj = HeapObject::cast(*slot);
    422   heap->mark_compact_collector()->RecordSlot(object, slot, obj);
    423   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    424 }
    425 
    426 
    427 template <typename StaticVisitor>
    428 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
    429                                                     HeapObject* object) {
    430   typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
    431       CodeBodyVisitor;
    432   Heap* heap = map->GetHeap();
    433   Code* code = Code::cast(object);
    434   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
    435     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
    436   }
    437   CodeBodyVisitor::Visit(map, object);
    438 }
    439 
    440 
    441 template <typename StaticVisitor>
    442 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
    443     Map* map, HeapObject* object) {
    444   Heap* heap = map->GetHeap();
    445   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
    446   if (shared->ic_age() != heap->global_ic_age()) {
    447     shared->ResetForNewContext(heap->global_ic_age());
    448   }
    449   if (FLAG_cleanup_code_caches_at_gc) {
    450     shared->ClearTypeFeedbackInfoAtGCTime();
    451   }
    452   if (FLAG_flush_optimized_code_cache) {
    453     if (!shared->OptimizedCodeMapIsCleared()) {
    454       // Always flush the optimized code map if requested by flag.
    455       shared->ClearOptimizedCodeMap();
    456     }
    457   }
    458   MarkCompactCollector* collector = heap->mark_compact_collector();
    459   if (collector->is_code_flushing_enabled()) {
    460     if (IsFlushable(heap, shared)) {
    461       // This function's code looks flushable. But we have to postpone
    462       // the decision until we see all functions that point to the same
    463       // SharedFunctionInfo because some of them might be optimized.
    464       // That would also make the non-optimized version of the code
    465       // non-flushable, because it is required for bailing out from
    466       // optimized code.
    467       collector->code_flusher()->AddCandidate(shared);
    468       // Treat the reference to the code object weakly.
    469       VisitSharedFunctionInfoWeakCode(heap, object);
    470       return;
    471     }
    472   }
    473   VisitSharedFunctionInfoStrongCode(heap, object);
    474 }
    475 
    476 
    477 template <typename StaticVisitor>
    478 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
    479                                                           HeapObject* object) {
    480   Heap* heap = map->GetHeap();
    481   JSFunction* function = JSFunction::cast(object);
    482   MarkCompactCollector* collector = heap->mark_compact_collector();
    483   if (collector->is_code_flushing_enabled()) {
    484     if (IsFlushable(heap, function)) {
    485       // This function's code looks flushable. But we have to postpone
    486       // the decision until we see all functions that point to the same
    487       // SharedFunctionInfo because some of them might be optimized.
    488       // That would also make the non-optimized version of the code
    489       // non-flushable, because it is required for bailing out from
    490       // optimized code.
    491       collector->code_flusher()->AddCandidate(function);
    492       // Treat the reference to the code object weakly.
    493       VisitJSFunctionWeakCode(map, object);
    494       return;
    495     } else {
    496       // Visit all unoptimized code objects to prevent flushing them.
    497       StaticVisitor::MarkObject(heap, function->shared()->code());
    498     }
    499   }
    500   VisitJSFunctionStrongCode(map, object);
    501 }
    502 
    503 
    504 template <typename StaticVisitor>
    505 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
    506                                                         HeapObject* object) {
    507   JSObjectVisitor::Visit(map, object);
    508 }
    509 
    510 
    511 template <typename StaticVisitor>
    512 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
    513     Map* map, HeapObject* object) {
    514   Heap* heap = map->GetHeap();
    515 
    516   typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
    517                               void> JSArrayBufferBodyVisitor;
    518 
    519   JSArrayBufferBodyVisitor::Visit(map, object);
    520 
    521   if (!JSArrayBuffer::cast(object)->is_external() &&
    522       !heap->InNewSpace(object)) {
    523     heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
    524   }
    525 }
    526 
    527 
    528 template <typename StaticVisitor>
    529 void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
    530     Map* map, HeapObject* object) {
    531   StaticVisitor::VisitPointers(
    532       map->GetHeap(), object,
    533       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
    534       HeapObject::RawField(object, BytecodeArray::kHeaderSize));
    535 }
    536 
    537 
    538 template <typename StaticVisitor>
    539 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
    540                                                           Map* map) {
    541   // Since descriptor arrays are potentially shared, ensure that only the
    542   // descriptors that belong to this map are marked. The first time a non-empty
    543   // descriptor array is marked, its header is also visited. The slot holding
    544   // the descriptor array will be implicitly recorded when the pointer fields of
    545   // this map are visited.  Prototype maps don't keep track of transitions, so
    546   // just mark the entire descriptor array.
    547   if (!map->is_prototype_map()) {
    548     DescriptorArray* descriptors = map->instance_descriptors();
    549     if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
    550         descriptors->length() > 0) {
    551       StaticVisitor::VisitPointers(heap, descriptors,
    552                                    descriptors->GetFirstElementAddress(),
    553                                    descriptors->GetDescriptorEndSlot(0));
    554     }
    555     int start = 0;
    556     int end = map->NumberOfOwnDescriptors();
    557     if (start < end) {
    558       StaticVisitor::VisitPointers(heap, descriptors,
    559                                    descriptors->GetDescriptorStartSlot(start),
    560                                    descriptors->GetDescriptorEndSlot(end));
    561     }
    562   }
    563 
    564   // Mark the pointer fields of the Map. Since the transitions array has
    565   // been marked already, it is fine that one of these fields contains a
    566   // pointer to it.
    567   StaticVisitor::VisitPointers(
    568       heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
    569       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
    570 }
    571 
    572 
    573 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
    574   Object* undefined = heap->undefined_value();
    575   return (info->script() != undefined) &&
    576          (reinterpret_cast<Script*>(info->script())->source() != undefined);
    577 }
    578 
    579 
    580 template <typename StaticVisitor>
    581 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
    582                                                       JSFunction* function) {
    583   SharedFunctionInfo* shared_info = function->shared();
    584 
    585   // Code is either on stack, in compilation cache or referenced
    586   // by optimized version of function.
    587   MarkBit code_mark = Marking::MarkBitFrom(function->code());
    588   if (Marking::IsBlackOrGrey(code_mark)) {
    589     return false;
    590   }
    591 
    592   // We do not (yet) flush code for optimized functions.
    593   if (function->code() != shared_info->code()) {
    594     return false;
    595   }
    596 
    597   // Check age of optimized code.
    598   if (FLAG_age_code && !function->code()->IsOld()) {
    599     return false;
    600   }
    601 
    602   return IsFlushable(heap, shared_info);
    603 }
    604 
    605 
    606 template <typename StaticVisitor>
    607 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    608     Heap* heap, SharedFunctionInfo* shared_info) {
    609   // Code is either on stack, in compilation cache or referenced
    610   // by optimized version of function.
    611   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
    612   if (Marking::IsBlackOrGrey(code_mark)) {
    613     return false;
    614   }
    615 
    616   // The function must be compiled and have the source code available,
    617   // to be able to recompile it in case we need the function again.
    618   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
    619     return false;
    620   }
    621 
    622   // We never flush code for API functions.
    623   Object* function_data = shared_info->function_data();
    624   if (function_data->IsFunctionTemplateInfo()) {
    625     return false;
    626   }
    627 
    628   // Only flush code for functions.
    629   if (shared_info->code()->kind() != Code::FUNCTION) {
    630     return false;
    631   }
    632 
    633   // Function must be lazy compilable.
    634   if (!shared_info->allows_lazy_compilation()) {
    635     return false;
    636   }
    637 
    638   // We do not (yet?) flush code for generator functions, because we don't know
    639   // if there are still live activations (generator objects) on the heap.
    640   if (shared_info->is_generator()) {
    641     return false;
    642   }
    643 
    644   // If this is a full script wrapped in a function we do not flush the code.
    645   if (shared_info->is_toplevel()) {
    646     return false;
    647   }
    648 
    649   // The function must not be a builtin.
    650   if (shared_info->IsBuiltin()) {
    651     return false;
    652   }
    653 
    654   // Maintain debug break slots in the code.
    655   if (shared_info->HasDebugCode()) {
    656     return false;
    657   }
    658 
    659   // If this is a function initialized with %SetCode then the one-to-one
    660   // relation between SharedFunctionInfo and Code is broken.
    661   if (shared_info->dont_flush()) {
    662     return false;
    663   }
    664 
    665   // Check age of code. If code aging is disabled we never flush.
    666   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
    667     return false;
    668   }
    669 
    670   return true;
    671 }
    672 
    673 
    674 template <typename StaticVisitor>
    675 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
    676     Heap* heap, HeapObject* object) {
    677   Object** start_slot = HeapObject::RawField(
    678       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
    679   Object** end_slot = HeapObject::RawField(
    680       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
    681   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
    682 }
    683 
    684 
    685 template <typename StaticVisitor>
    686 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
    687     Heap* heap, HeapObject* object) {
    688   Object** name_slot =
    689       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
    690   StaticVisitor::VisitPointer(heap, object, name_slot);
    691 
    692   // Skip visiting kCodeOffset as it is treated weakly here.
    693   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
    694                 SharedFunctionInfo::kCodeOffset);
    695   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
    696                 SharedFunctionInfo::kOptimizedCodeMapOffset);
    697 
    698   Object** start_slot =
    699       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
    700   Object** end_slot = HeapObject::RawField(
    701       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
    702   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
    703 }
    704 
    705 
    706 template <typename StaticVisitor>
    707 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
    708     Map* map, HeapObject* object) {
    709   typedef FlexibleBodyVisitor<StaticVisitor,
    710                               JSFunction::BodyDescriptorStrongCode,
    711                               void> JSFunctionStrongCodeBodyVisitor;
    712   JSFunctionStrongCodeBodyVisitor::Visit(map, object);
    713 }
    714 
    715 
    716 template <typename StaticVisitor>
    717 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
    718     Map* map, HeapObject* object) {
    719   typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
    720                               void> JSFunctionWeakCodeBodyVisitor;
    721   JSFunctionWeakCodeBodyVisitor::Visit(map, object);
    722 }
    723 
    724 
    725 }  // namespace internal
    726 }  // namespace v8
    727 
    728 #endif  // V8_OBJECTS_VISITING_INL_H_
    729