Home | History | Annotate | Download | only in heap
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_OBJECTS_VISITING_INL_H_
      6 #define V8_OBJECTS_VISITING_INL_H_
      7 
      8 
      9 namespace v8 {
     10 namespace internal {
     11 
     12 template <typename StaticVisitor>
     13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
     14   table_.Register(
     15       kVisitShortcutCandidate,
     16       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
     17 
     18   table_.Register(
     19       kVisitConsString,
     20       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
     21 
     22   table_.Register(kVisitSlicedString,
     23                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
     24                                     int>::Visit);
     25 
     26   table_.Register(
     27       kVisitSymbol,
     28       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
     29 
     30   table_.Register(kVisitFixedArray,
     31                   &FlexibleBodyVisitor<StaticVisitor,
     32                                        FixedArray::BodyDescriptor, int>::Visit);
     33 
     34   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
     35   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
     36   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
     37 
     38   table_.Register(
     39       kVisitNativeContext,
     40       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
     41                         int>::Visit);
     42 
     43   table_.Register(kVisitByteArray, &VisitByteArray);
     44 
     45   table_.Register(
     46       kVisitSharedFunctionInfo,
     47       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
     48                         int>::Visit);
     49 
     50   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
     51 
     52   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
     53 
     54   table_.Register(kVisitJSFunction, &VisitJSFunction);
     55 
     56   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
     57 
     58   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
     59 
     60   table_.Register(kVisitJSDataView, &VisitJSDataView);
     61 
     62   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
     63 
     64   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
     65 
     66   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
     67 
     68   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
     69                                           kVisitDataObjectGeneric>();
     70 
     71   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
     72                                           kVisitJSObjectGeneric>();
     73   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
     74                                           kVisitStructGeneric>();
     75 }
     76 
     77 
     78 template <typename StaticVisitor>
     79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
     80     Map* map, HeapObject* object) {
     81   Heap* heap = map->GetHeap();
     82 
     83   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
     84                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
     85   VisitPointers(heap, HeapObject::RawField(
     86                           object, JSArrayBuffer::BodyDescriptor::kStartOffset),
     87                 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
     88   VisitPointers(
     89       heap, HeapObject::RawField(
     90                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
     91       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
     92   return JSArrayBuffer::kSizeWithInternalFields;
     93 }
     94 
     95 
     96 template <typename StaticVisitor>
     97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
     98     Map* map, HeapObject* object) {
     99   VisitPointers(
    100       map->GetHeap(),
    101       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    102       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    103   VisitPointers(
    104       map->GetHeap(), HeapObject::RawField(
    105                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
    106       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    107   return JSTypedArray::kSizeWithInternalFields;
    108 }
    109 
    110 
    111 template <typename StaticVisitor>
    112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
    113                                                           HeapObject* object) {
    114   VisitPointers(
    115       map->GetHeap(),
    116       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    117       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    118   VisitPointers(
    119       map->GetHeap(),
    120       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
    121       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    122   return JSDataView::kSizeWithInternalFields;
    123 }
    124 
    125 
    126 template <typename StaticVisitor>
    127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
    128   table_.Register(kVisitShortcutCandidate,
    129                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
    130                                     void>::Visit);
    131 
    132   table_.Register(kVisitConsString,
    133                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
    134                                     void>::Visit);
    135 
    136   table_.Register(kVisitSlicedString,
    137                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
    138                                     void>::Visit);
    139 
    140   table_.Register(
    141       kVisitSymbol,
    142       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
    143 
    144   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
    145 
    146   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
    147 
    148   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
    149 
    150   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
    151 
    152   table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
    153 
    154   table_.Register(kVisitNativeContext, &VisitNativeContext);
    155 
    156   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
    157 
    158   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
    159 
    160   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
    161 
    162   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
    163 
    164   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
    165 
    166   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
    167 
    168   table_.Register(
    169       kVisitOddball,
    170       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
    171 
    172   table_.Register(kVisitMap, &VisitMap);
    173 
    174   table_.Register(kVisitCode, &VisitCode);
    175 
    176   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
    177 
    178   table_.Register(kVisitJSFunction, &VisitJSFunction);
    179 
    180   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
    181 
    182   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
    183 
    184   table_.Register(kVisitJSDataView, &VisitJSDataView);
    185 
    186   // Registration for kVisitJSRegExp is done by StaticVisitor.
    187 
    188   table_.Register(
    189       kVisitCell,
    190       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
    191 
    192   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
    193 
    194   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
    195                                           kVisitDataObjectGeneric>();
    196 
    197   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
    198                                           kVisitJSObjectGeneric>();
    199 
    200   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
    201                                           kVisitStructGeneric>();
    202 }
    203 
    204 
    205 template <typename StaticVisitor>
    206 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
    207     Heap* heap, Address entry_address) {
    208   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
    209   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
    210   StaticVisitor::MarkObject(heap, code);
    211 }
    212 
    213 
    214 template <typename StaticVisitor>
    215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
    216     Heap* heap, RelocInfo* rinfo) {
    217   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
    218   HeapObject* object = HeapObject::cast(rinfo->target_object());
    219   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
    220   // TODO(ulan): It could be better to record slots only for strongly embedded
    221   // objects here and record slots for weakly embedded object during clearing
    222   // of non-live references in mark-compact.
    223   if (!rinfo->host()->IsWeakObject(object)) {
    224     StaticVisitor::MarkObject(heap, object);
    225   }
    226 }
    227 
    228 
    229 template <typename StaticVisitor>
    230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
    231                                                     RelocInfo* rinfo) {
    232   DCHECK(rinfo->rmode() == RelocInfo::CELL);
    233   Cell* cell = rinfo->target_cell();
    234   // No need to record slots because the cell space is not compacted during GC.
    235   if (!rinfo->host()->IsWeakObject(cell)) {
    236     StaticVisitor::MarkObject(heap, cell);
    237   }
    238 }
    239 
    240 
    241 template <typename StaticVisitor>
    242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
    243                                                            RelocInfo* rinfo) {
    244   DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
    245           rinfo->IsPatchedReturnSequence()) ||
    246          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
    247           rinfo->IsPatchedDebugBreakSlotSequence()));
    248   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
    249   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    250   StaticVisitor::MarkObject(heap, target);
    251 }
    252 
    253 
    254 template <typename StaticVisitor>
    255 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
    256                                                           RelocInfo* rinfo) {
    257   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
    258   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    259   // Monomorphic ICs are preserved when possible, but need to be flushed
    260   // when they might be keeping a Context alive, or when the heap is about
    261   // to be serialized.
    262   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
    263       (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
    264        target->ic_state() == POLYMORPHIC ||
    265        (heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
    266        heap->isolate()->serializer_enabled() ||
    267        target->ic_age() != heap->global_ic_age() ||
    268        target->is_invalidated_weak_stub())) {
    269     ICUtility::Clear(heap->isolate(), rinfo->pc(),
    270                      rinfo->host()->constant_pool());
    271     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    272   }
    273   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    274   StaticVisitor::MarkObject(heap, target);
    275 }
    276 
    277 
    278 template <typename StaticVisitor>
    279 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
    280     Heap* heap, RelocInfo* rinfo) {
    281   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
    282   Code* target = rinfo->code_age_stub();
    283   DCHECK(target != NULL);
    284   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    285   StaticVisitor::MarkObject(heap, target);
    286 }
    287 
    288 
    289 template <typename StaticVisitor>
    290 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
    291     Map* map, HeapObject* object) {
    292   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
    293                    void>::Visit(map, object);
    294 
    295   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
    296   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
    297        ++idx) {
    298     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
    299     collector->RecordSlot(slot, slot, *slot);
    300   }
    301 }
    302 
    303 
    304 template <typename StaticVisitor>
    305 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
    306                                                    HeapObject* object) {
    307   Heap* heap = map->GetHeap();
    308   Map* map_object = Map::cast(object);
    309 
    310   // Clears the cache of ICs related to this map.
    311   if (FLAG_cleanup_code_caches_at_gc) {
    312     map_object->ClearCodeCache(heap);
    313   }
    314 
    315   // When map collection is enabled we have to mark through map's transitions
    316   // and back pointers in a special way to make these links weak.
    317   if (FLAG_collect_maps && map_object->CanTransition()) {
    318     MarkMapContents(heap, map_object);
    319   } else {
    320     StaticVisitor::VisitPointers(
    321         heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
    322         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
    323   }
    324 }
    325 
    326 
    327 template <typename StaticVisitor>
    328 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
    329     Map* map, HeapObject* object) {
    330   Heap* heap = map->GetHeap();
    331 
    332   Object** slot =
    333       HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
    334   if (FLAG_collect_maps) {
    335     // Mark property cell dependent codes array but do not push it onto marking
    336     // stack, this will make references from it weak. We will clean dead
    337     // codes when we iterate over property cells in ClearNonLiveReferences.
    338     HeapObject* obj = HeapObject::cast(*slot);
    339     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    340     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    341   } else {
    342     StaticVisitor::VisitPointer(heap, slot);
    343   }
    344 
    345   StaticVisitor::VisitPointers(
    346       heap,
    347       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
    348       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
    349 }
    350 
    351 
    352 template <typename StaticVisitor>
    353 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
    354     Map* map, HeapObject* object) {
    355   Heap* heap = map->GetHeap();
    356 
    357   Object** slot =
    358       HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
    359   if (FLAG_collect_maps) {
    360     // Mark allocation site dependent codes array but do not push it onto
    361     // marking stack, this will make references from it weak. We will clean
    362     // dead codes when we iterate over allocation sites in
    363     // ClearNonLiveReferences.
    364     HeapObject* obj = HeapObject::cast(*slot);
    365     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    366     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    367   } else {
    368     StaticVisitor::VisitPointer(heap, slot);
    369   }
    370 
    371   StaticVisitor::VisitPointers(
    372       heap,
    373       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
    374       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
    375 }
    376 
    377 
    378 template <typename StaticVisitor>
    379 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
    380     Map* map, HeapObject* object) {
    381   Heap* heap = map->GetHeap();
    382   JSWeakCollection* weak_collection =
    383       reinterpret_cast<JSWeakCollection*>(object);
    384 
    385   // Enqueue weak collection in linked list of encountered weak collections.
    386   if (weak_collection->next() == heap->undefined_value()) {
    387     weak_collection->set_next(heap->encountered_weak_collections());
    388     heap->set_encountered_weak_collections(weak_collection);
    389   }
    390 
    391   // Skip visiting the backing hash table containing the mappings and the
    392   // pointer to the other enqueued weak collections, both are post-processed.
    393   StaticVisitor::VisitPointers(
    394       heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
    395       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
    396   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
    397                 JSWeakCollection::kNextOffset);
    398   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
    399                 JSWeakCollection::kSize);
    400 
    401   // Partially initialized weak collection is enqueued, but table is ignored.
    402   if (!weak_collection->table()->IsHashTable()) return;
    403 
    404   // Mark the backing hash table without pushing it on the marking stack.
    405   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
    406   HeapObject* obj = HeapObject::cast(*slot);
    407   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    408   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    409 }
    410 
    411 
    412 template <typename StaticVisitor>
    413 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
    414                                                     HeapObject* object) {
    415   Heap* heap = map->GetHeap();
    416   Code* code = Code::cast(object);
    417   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
    418     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
    419   }
    420   code->CodeIterateBody<StaticVisitor>(heap);
    421 }
    422 
    423 
    424 template <typename StaticVisitor>
    425 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
    426     Map* map, HeapObject* object) {
    427   Heap* heap = map->GetHeap();
    428   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
    429   if (shared->ic_age() != heap->global_ic_age()) {
    430     shared->ResetForNewContext(heap->global_ic_age());
    431   }
    432   if (FLAG_cleanup_code_caches_at_gc) {
    433     shared->ClearTypeFeedbackInfo();
    434   }
    435   if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
    436       !shared->optimized_code_map()->IsSmi()) {
    437     // Always flush the optimized code map if requested by flag.
    438     shared->ClearOptimizedCodeMap();
    439   }
    440   MarkCompactCollector* collector = heap->mark_compact_collector();
    441   if (collector->is_code_flushing_enabled()) {
    442     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    443       // Add the shared function info holding an optimized code map to
    444       // the code flusher for processing of code maps after marking.
    445       collector->code_flusher()->AddOptimizedCodeMap(shared);
    446       // Treat all references within the code map weakly by marking the
    447       // code map itself but not pushing it onto the marking deque.
    448       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
    449       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
    450     }
    451     if (IsFlushable(heap, shared)) {
    452       // This function's code looks flushable. But we have to postpone
    453       // the decision until we see all functions that point to the same
    454       // SharedFunctionInfo because some of them might be optimized.
    455       // That would also make the non-optimized version of the code
    456       // non-flushable, because it is required for bailing out from
    457       // optimized code.
    458       collector->code_flusher()->AddCandidate(shared);
    459       // Treat the reference to the code object weakly.
    460       VisitSharedFunctionInfoWeakCode(heap, object);
    461       return;
    462     }
    463   } else {
    464     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    465       // Flush optimized code map on major GCs without code flushing,
    466       // needed because cached code doesn't contain breakpoints.
    467       shared->ClearOptimizedCodeMap();
    468     }
    469   }
    470   VisitSharedFunctionInfoStrongCode(heap, object);
    471 }
    472 
    473 
    474 template <typename StaticVisitor>
    475 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
    476     Map* map, HeapObject* object) {
    477   Heap* heap = map->GetHeap();
    478   ConstantPoolArray* array = ConstantPoolArray::cast(object);
    479   ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
    480   while (!code_iter.is_finished()) {
    481     Address code_entry = reinterpret_cast<Address>(
    482         array->RawFieldOfElementAt(code_iter.next_index()));
    483     StaticVisitor::VisitCodeEntry(heap, code_entry);
    484   }
    485 
    486   ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
    487   while (!heap_iter.is_finished()) {
    488     Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
    489     HeapObject* object = HeapObject::cast(*slot);
    490     heap->mark_compact_collector()->RecordSlot(slot, slot, object);
    491     bool is_weak_object =
    492         (array->get_weak_object_state() ==
    493              ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
    494          Code::IsWeakObjectInOptimizedCode(object)) ||
    495         (array->get_weak_object_state() ==
    496              ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
    497          Code::IsWeakObjectInIC(object));
    498     if (!is_weak_object) {
    499       StaticVisitor::MarkObject(heap, object);
    500     }
    501   }
    502 }
    503 
    504 
    505 template <typename StaticVisitor>
    506 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
    507                                                           HeapObject* object) {
    508   Heap* heap = map->GetHeap();
    509   JSFunction* function = JSFunction::cast(object);
    510   MarkCompactCollector* collector = heap->mark_compact_collector();
    511   if (collector->is_code_flushing_enabled()) {
    512     if (IsFlushable(heap, function)) {
    513       // This function's code looks flushable. But we have to postpone
    514       // the decision until we see all functions that point to the same
    515       // SharedFunctionInfo because some of them might be optimized.
    516       // That would also make the non-optimized version of the code
    517       // non-flushable, because it is required for bailing out from
    518       // optimized code.
    519       collector->code_flusher()->AddCandidate(function);
    520       // Visit shared function info immediately to avoid double checking
    521       // of its flushability later. This is just an optimization because
    522       // the shared function info would eventually be visited.
    523       SharedFunctionInfo* shared = function->shared();
    524       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
    525         StaticVisitor::MarkObject(heap, shared->map());
    526         VisitSharedFunctionInfoWeakCode(heap, shared);
    527       }
    528       // Treat the reference to the code object weakly.
    529       VisitJSFunctionWeakCode(heap, object);
    530       return;
    531     } else {
    532       // Visit all unoptimized code objects to prevent flushing them.
    533       StaticVisitor::MarkObject(heap, function->shared()->code());
    534       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
    535         MarkInlinedFunctionsCode(heap, function->code());
    536       }
    537     }
    538   }
    539   VisitJSFunctionStrongCode(heap, object);
    540 }
    541 
    542 
    543 template <typename StaticVisitor>
    544 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
    545                                                         HeapObject* object) {
    546   int last_property_offset =
    547       JSRegExp::kSize + kPointerSize * map->inobject_properties();
    548   StaticVisitor::VisitPointers(
    549       map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
    550       HeapObject::RawField(object, last_property_offset));
    551 }
    552 
    553 
    554 template <typename StaticVisitor>
    555 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
    556     Map* map, HeapObject* object) {
    557   Heap* heap = map->GetHeap();
    558 
    559   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
    560                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
    561   StaticVisitor::VisitPointers(
    562       heap,
    563       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
    564       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
    565   StaticVisitor::VisitPointers(
    566       heap, HeapObject::RawField(
    567                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
    568       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
    569 }
    570 
    571 
    572 template <typename StaticVisitor>
    573 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
    574     Map* map, HeapObject* object) {
    575   StaticVisitor::VisitPointers(
    576       map->GetHeap(),
    577       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    578       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    579   StaticVisitor::VisitPointers(
    580       map->GetHeap(), HeapObject::RawField(
    581                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
    582       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    583 }
    584 
    585 
    586 template <typename StaticVisitor>
    587 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
    588                                                           HeapObject* object) {
    589   StaticVisitor::VisitPointers(
    590       map->GetHeap(),
    591       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    592       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    593   StaticVisitor::VisitPointers(
    594       map->GetHeap(),
    595       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
    596       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    597 }
    598 
    599 
    600 template <typename StaticVisitor>
    601 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
    602                                                           Map* map) {
    603   // Make sure that the back pointer stored either in the map itself or
    604   // inside its transitions array is marked. Skip recording the back
    605   // pointer slot since map space is not compacted.
    606   StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
    607 
    608   // Treat pointers in the transitions array as weak and also mark that
    609   // array to prevent visiting it later. Skip recording the transition
    610   // array slot, since it will be implicitly recorded when the pointer
    611   // fields of this map are visited.
    612   if (map->HasTransitionArray()) {
    613     TransitionArray* transitions = map->transitions();
    614     MarkTransitionArray(heap, transitions);
    615   }
    616 
    617   // Since descriptor arrays are potentially shared, ensure that only the
    618   // descriptors that belong to this map are marked. The first time a
    619   // non-empty descriptor array is marked, its header is also visited. The slot
    620   // holding the descriptor array will be implicitly recorded when the pointer
    621   // fields of this map are visited.
    622   DescriptorArray* descriptors = map->instance_descriptors();
    623   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
    624       descriptors->length() > 0) {
    625     StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
    626                                  descriptors->GetDescriptorEndSlot(0));
    627   }
    628   int start = 0;
    629   int end = map->NumberOfOwnDescriptors();
    630   if (start < end) {
    631     StaticVisitor::VisitPointers(heap,
    632                                  descriptors->GetDescriptorStartSlot(start),
    633                                  descriptors->GetDescriptorEndSlot(end));
    634   }
    635 
    636   // Mark prototype dependent codes array but do not push it onto marking
    637   // stack, this will make references from it weak. We will clean dead
    638   // codes when we iterate over maps in ClearNonLiveTransitions.
    639   Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
    640   HeapObject* obj = HeapObject::cast(*slot);
    641   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    642   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    643 
    644   // Mark the pointer fields of the Map. Since the transitions array has
    645   // been marked already, it is fine that one of these fields contains a
    646   // pointer to it.
    647   StaticVisitor::VisitPointers(
    648       heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
    649       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
    650 }
    651 
    652 
    653 template <typename StaticVisitor>
    654 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
    655     Heap* heap, TransitionArray* transitions) {
    656   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
    657 
    658   // Simple transitions do not have keys nor prototype transitions.
    659   if (transitions->IsSimpleTransition()) return;
    660 
    661   if (transitions->HasPrototypeTransitions()) {
    662     // Mark prototype transitions array but do not push it onto marking
    663     // stack, this will make references from it weak. We will clean dead
    664     // prototype transitions in ClearNonLiveTransitions.
    665     Object** slot = transitions->GetPrototypeTransitionsSlot();
    666     HeapObject* obj = HeapObject::cast(*slot);
    667     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    668     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    669   }
    670 
    671   for (int i = 0; i < transitions->number_of_transitions(); ++i) {
    672     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
    673   }
    674 }
    675 
    676 
    677 template <typename StaticVisitor>
    678 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
    679                                                                    Code* code) {
    680   // Skip in absence of inlining.
    681   // TODO(turbofan): Revisit once we support inlining.
    682   if (code->is_turbofanned()) return;
    683   // For optimized functions we should retain both non-optimized version
    684   // of its code and non-optimized version of all inlined functions.
    685   // This is required to support bailing out from inlined code.
    686   DeoptimizationInputData* data =
    687       DeoptimizationInputData::cast(code->deoptimization_data());
    688   FixedArray* literals = data->LiteralArray();
    689   for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
    690        i++) {
    691     JSFunction* inlined = JSFunction::cast(literals->get(i));
    692     StaticVisitor::MarkObject(heap, inlined->shared()->code());
    693   }
    694 }
    695 
    696 
    697 inline static bool IsValidNonBuiltinContext(Object* context) {
    698   return context->IsContext() &&
    699          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
    700 }
    701 
    702 
    703 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
    704   Object* undefined = heap->undefined_value();
    705   return (info->script() != undefined) &&
    706          (reinterpret_cast<Script*>(info->script())->source() != undefined);
    707 }
    708 
    709 
    710 template <typename StaticVisitor>
    711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
    712                                                       JSFunction* function) {
    713   SharedFunctionInfo* shared_info = function->shared();
    714 
    715   // Code is either on stack, in compilation cache or referenced
    716   // by optimized version of function.
    717   MarkBit code_mark = Marking::MarkBitFrom(function->code());
    718   if (code_mark.Get()) {
    719     return false;
    720   }
    721 
    722   // The function must have a valid context and not be a builtin.
    723   if (!IsValidNonBuiltinContext(function->context())) {
    724     return false;
    725   }
    726 
    727   // We do not (yet) flush code for optimized functions.
    728   if (function->code() != shared_info->code()) {
    729     return false;
    730   }
    731 
    732   // Check age of optimized code.
    733   if (FLAG_age_code && !function->code()->IsOld()) {
    734     return false;
    735   }
    736 
    737   return IsFlushable(heap, shared_info);
    738 }
    739 
    740 
    741 template <typename StaticVisitor>
    742 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    743     Heap* heap, SharedFunctionInfo* shared_info) {
    744   // Code is either on stack, in compilation cache or referenced
    745   // by optimized version of function.
    746   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
    747   if (code_mark.Get()) {
    748     return false;
    749   }
    750 
    751   // The function must be compiled and have the source code available,
    752   // to be able to recompile it in case we need the function again.
    753   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
    754     return false;
    755   }
    756 
    757   // We never flush code for API functions.
    758   Object* function_data = shared_info->function_data();
    759   if (function_data->IsFunctionTemplateInfo()) {
    760     return false;
    761   }
    762 
    763   // Only flush code for functions.
    764   if (shared_info->code()->kind() != Code::FUNCTION) {
    765     return false;
    766   }
    767 
    768   // Function must be lazy compilable.
    769   if (!shared_info->allows_lazy_compilation()) {
    770     return false;
    771   }
    772 
    773   // We do not (yet?) flush code for generator functions, because we don't know
    774   // if there are still live activations (generator objects) on the heap.
    775   if (shared_info->is_generator()) {
    776     return false;
    777   }
    778 
    779   // If this is a full script wrapped in a function we do not flush the code.
    780   if (shared_info->is_toplevel()) {
    781     return false;
    782   }
    783 
    784   // If this is a function initialized with %SetCode then the one-to-one
    785   // relation between SharedFunctionInfo and Code is broken.
    786   if (shared_info->dont_flush()) {
    787     return false;
    788   }
    789 
    790   // Check age of code. If code aging is disabled we never flush.
    791   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
    792     return false;
    793   }
    794 
    795   return true;
    796 }
    797 
    798 
    799 template <typename StaticVisitor>
    800 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
    801     Heap* heap, HeapObject* object) {
    802   Object** start_slot = HeapObject::RawField(
    803       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
    804   Object** end_slot = HeapObject::RawField(
    805       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
    806   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    807 }
    808 
    809 
    810 template <typename StaticVisitor>
    811 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
    812     Heap* heap, HeapObject* object) {
    813   Object** name_slot =
    814       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
    815   StaticVisitor::VisitPointer(heap, name_slot);
    816 
    817   // Skip visiting kCodeOffset as it is treated weakly here.
    818   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
    819                 SharedFunctionInfo::kCodeOffset);
    820   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
    821                 SharedFunctionInfo::kOptimizedCodeMapOffset);
    822 
    823   Object** start_slot =
    824       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
    825   Object** end_slot = HeapObject::RawField(
    826       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
    827   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    828 }
    829 
    830 
    831 template <typename StaticVisitor>
    832 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
    833     Heap* heap, HeapObject* object) {
    834   Object** start_slot =
    835       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    836   Object** end_slot =
    837       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    838   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    839 
    840   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
    841   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    842                 JSFunction::kPrototypeOrInitialMapOffset);
    843 
    844   start_slot =
    845       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    846   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    847   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    848 }
    849 
    850 
    851 template <typename StaticVisitor>
    852 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
    853     Heap* heap, HeapObject* object) {
    854   Object** start_slot =
    855       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    856   Object** end_slot =
    857       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    858   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    859 
    860   // Skip visiting kCodeEntryOffset as it is treated weakly here.
    861   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    862                 JSFunction::kPrototypeOrInitialMapOffset);
    863 
    864   start_slot =
    865       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    866   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    867   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    868 }
    869 
    870 
    871 void Code::CodeIterateBody(ObjectVisitor* v) {
    872   int mode_mask = RelocInfo::kCodeTargetMask |
    873                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    874                   RelocInfo::ModeMask(RelocInfo::CELL) |
    875                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    876                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    877                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    878                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    879 
    880   // There are two places where we iterate code bodies: here and the
    881   // templated CodeIterateBody (below). They should be kept in sync.
    882   IteratePointer(v, kRelocationInfoOffset);
    883   IteratePointer(v, kHandlerTableOffset);
    884   IteratePointer(v, kDeoptimizationDataOffset);
    885   IteratePointer(v, kTypeFeedbackInfoOffset);
    886   IterateNextCodeLink(v, kNextCodeLinkOffset);
    887   IteratePointer(v, kConstantPoolOffset);
    888 
    889   RelocIterator it(this, mode_mask);
    890   Isolate* isolate = this->GetIsolate();
    891   for (; !it.done(); it.next()) {
    892     it.rinfo()->Visit(isolate, v);
    893   }
    894 }
    895 
    896 
    897 template <typename StaticVisitor>
    898 void Code::CodeIterateBody(Heap* heap) {
    899   int mode_mask = RelocInfo::kCodeTargetMask |
    900                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    901                   RelocInfo::ModeMask(RelocInfo::CELL) |
    902                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    903                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    904                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    905                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    906 
    907   // There are two places where we iterate code bodies: here and the non-
    908   // templated CodeIterateBody (above). They should be kept in sync.
    909   StaticVisitor::VisitPointer(
    910       heap,
    911       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
    912   StaticVisitor::VisitPointer(
    913       heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
    914   StaticVisitor::VisitPointer(
    915       heap,
    916       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
    917   StaticVisitor::VisitPointer(
    918       heap,
    919       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
    920   StaticVisitor::VisitNextCodeLink(
    921       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
    922   StaticVisitor::VisitPointer(
    923       heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
    924 
    925 
    926   RelocIterator it(this, mode_mask);
    927   for (; !it.done(); it.next()) {
    928     it.rinfo()->template Visit<StaticVisitor>(heap);
    929   }
    930 }
    931 }
    932 }  // namespace v8::internal
    933 
    934 #endif  // V8_OBJECTS_VISITING_INL_H_
    935