Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_OBJECTS_VISITING_INL_H_
      6 #define V8_OBJECTS_VISITING_INL_H_
      7 
      8 
      9 namespace v8 {
     10 namespace internal {
     11 
     12 template<typename StaticVisitor>
     13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
     14   table_.Register(kVisitShortcutCandidate,
     15                   &FixedBodyVisitor<StaticVisitor,
     16                   ConsString::BodyDescriptor,
     17                   int>::Visit);
     18 
     19   table_.Register(kVisitConsString,
     20                   &FixedBodyVisitor<StaticVisitor,
     21                   ConsString::BodyDescriptor,
     22                   int>::Visit);
     23 
     24   table_.Register(kVisitSlicedString,
     25                   &FixedBodyVisitor<StaticVisitor,
     26                   SlicedString::BodyDescriptor,
     27                   int>::Visit);
     28 
     29   table_.Register(kVisitSymbol,
     30                   &FixedBodyVisitor<StaticVisitor,
     31                   Symbol::BodyDescriptor,
     32                   int>::Visit);
     33 
     34   table_.Register(kVisitFixedArray,
     35                   &FlexibleBodyVisitor<StaticVisitor,
     36                   FixedArray::BodyDescriptor,
     37                   int>::Visit);
     38 
     39   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
     40   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
     41   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
     42 
     43   table_.Register(kVisitNativeContext,
     44                   &FixedBodyVisitor<StaticVisitor,
     45                   Context::ScavengeBodyDescriptor,
     46                   int>::Visit);
     47 
     48   table_.Register(kVisitByteArray, &VisitByteArray);
     49 
     50   table_.Register(kVisitSharedFunctionInfo,
     51                   &FixedBodyVisitor<StaticVisitor,
     52                   SharedFunctionInfo::BodyDescriptor,
     53                   int>::Visit);
     54 
     55   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
     56 
     57   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
     58 
     59   table_.Register(kVisitJSFunction, &VisitJSFunction);
     60 
     61   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
     62 
     63   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
     64 
     65   table_.Register(kVisitJSDataView, &VisitJSDataView);
     66 
     67   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
     68 
     69   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
     70 
     71   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
     72 
     73   table_.template RegisterSpecializations<DataObjectVisitor,
     74                                           kVisitDataObject,
     75                                           kVisitDataObjectGeneric>();
     76 
     77   table_.template RegisterSpecializations<JSObjectVisitor,
     78                                           kVisitJSObject,
     79                                           kVisitJSObjectGeneric>();
     80   table_.template RegisterSpecializations<StructVisitor,
     81                                           kVisitStruct,
     82                                           kVisitStructGeneric>();
     83 }
     84 
     85 
     86 template<typename StaticVisitor>
     87 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
     88     Map* map, HeapObject* object) {
     89   Heap* heap = map->GetHeap();
     90 
     91   STATIC_ASSERT(
     92       JSArrayBuffer::kWeakFirstViewOffset ==
     93       JSArrayBuffer::kWeakNextOffset + kPointerSize);
     94   VisitPointers(
     95       heap,
     96       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
     97       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
     98   VisitPointers(
     99       heap,
    100       HeapObject::RawField(object,
    101           JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
    102       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
    103   return JSArrayBuffer::kSizeWithInternalFields;
    104 }
    105 
    106 
    107 template<typename StaticVisitor>
    108 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
    109     Map* map, HeapObject* object) {
    110   VisitPointers(
    111       map->GetHeap(),
    112       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    113       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    114   VisitPointers(
    115       map->GetHeap(),
    116       HeapObject::RawField(object,
    117           JSTypedArray::kWeakNextOffset + kPointerSize),
    118       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    119   return JSTypedArray::kSizeWithInternalFields;
    120 }
    121 
    122 
    123 template<typename StaticVisitor>
    124 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
    125     Map* map, HeapObject* object) {
    126   VisitPointers(
    127       map->GetHeap(),
    128       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    129       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    130   VisitPointers(
    131       map->GetHeap(),
    132       HeapObject::RawField(object,
    133           JSDataView::kWeakNextOffset + kPointerSize),
    134       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    135   return JSDataView::kSizeWithInternalFields;
    136 }
    137 
    138 
    139 template<typename StaticVisitor>
    140 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
    141   table_.Register(kVisitShortcutCandidate,
    142                   &FixedBodyVisitor<StaticVisitor,
    143                   ConsString::BodyDescriptor,
    144                   void>::Visit);
    145 
    146   table_.Register(kVisitConsString,
    147                   &FixedBodyVisitor<StaticVisitor,
    148                   ConsString::BodyDescriptor,
    149                   void>::Visit);
    150 
    151   table_.Register(kVisitSlicedString,
    152                   &FixedBodyVisitor<StaticVisitor,
    153                   SlicedString::BodyDescriptor,
    154                   void>::Visit);
    155 
    156   table_.Register(kVisitSymbol,
    157                   &FixedBodyVisitor<StaticVisitor,
    158                   Symbol::BodyDescriptor,
    159                   void>::Visit);
    160 
    161   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
    162 
    163   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
    164 
    165   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
    166 
    167   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
    168 
    169   table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
    170 
    171   table_.Register(kVisitNativeContext, &VisitNativeContext);
    172 
    173   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
    174 
    175   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
    176 
    177   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
    178 
    179   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
    180 
    181   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
    182 
    183   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
    184 
    185   table_.Register(kVisitOddball,
    186                   &FixedBodyVisitor<StaticVisitor,
    187                   Oddball::BodyDescriptor,
    188                   void>::Visit);
    189 
    190   table_.Register(kVisitMap, &VisitMap);
    191 
    192   table_.Register(kVisitCode, &VisitCode);
    193 
    194   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
    195 
    196   table_.Register(kVisitJSFunction, &VisitJSFunction);
    197 
    198   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
    199 
    200   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
    201 
    202   table_.Register(kVisitJSDataView, &VisitJSDataView);
    203 
    204   // Registration for kVisitJSRegExp is done by StaticVisitor.
    205 
    206   table_.Register(kVisitCell,
    207                   &FixedBodyVisitor<StaticVisitor,
    208                   Cell::BodyDescriptor,
    209                   void>::Visit);
    210 
    211   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
    212 
    213   table_.template RegisterSpecializations<DataObjectVisitor,
    214                                           kVisitDataObject,
    215                                           kVisitDataObjectGeneric>();
    216 
    217   table_.template RegisterSpecializations<JSObjectVisitor,
    218                                           kVisitJSObject,
    219                                           kVisitJSObjectGeneric>();
    220 
    221   table_.template RegisterSpecializations<StructObjectVisitor,
    222                                           kVisitStruct,
    223                                           kVisitStructGeneric>();
    224 }
    225 
    226 
    227 template<typename StaticVisitor>
    228 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
    229     Heap* heap, Address entry_address) {
    230   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
    231   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
    232   StaticVisitor::MarkObject(heap, code);
    233 }
    234 
    235 
    236 template<typename StaticVisitor>
    237 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
    238     Heap* heap, RelocInfo* rinfo) {
    239   ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
    240   ASSERT(!rinfo->target_object()->IsConsString());
    241   HeapObject* object = HeapObject::cast(rinfo->target_object());
    242   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
    243   // TODO(ulan): It could be better to record slots only for strongly embedded
    244   // objects here and record slots for weakly embedded object during clearing
    245   // of non-live references in mark-compact.
    246   if (!rinfo->host()->IsWeakObject(object)) {
    247     StaticVisitor::MarkObject(heap, object);
    248   }
    249 }
    250 
    251 
    252 template<typename StaticVisitor>
    253 void StaticMarkingVisitor<StaticVisitor>::VisitCell(
    254     Heap* heap, RelocInfo* rinfo) {
    255   ASSERT(rinfo->rmode() == RelocInfo::CELL);
    256   Cell* cell = rinfo->target_cell();
    257   // No need to record slots because the cell space is not compacted during GC.
    258   if (!rinfo->host()->IsWeakObject(cell)) {
    259     StaticVisitor::MarkObject(heap, cell);
    260   }
    261 }
    262 
    263 
    264 template<typename StaticVisitor>
    265 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
    266     Heap* heap, RelocInfo* rinfo) {
    267   ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
    268           rinfo->IsPatchedReturnSequence()) ||
    269          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
    270           rinfo->IsPatchedDebugBreakSlotSequence()));
    271   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
    272   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    273   StaticVisitor::MarkObject(heap, target);
    274 }
    275 
    276 
    277 template<typename StaticVisitor>
    278 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
    279     Heap* heap, RelocInfo* rinfo) {
    280   ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
    281   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    282   // Monomorphic ICs are preserved when possible, but need to be flushed
    283   // when they might be keeping a Context alive, or when the heap is about
    284   // to be serialized.
    285   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
    286       && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
    287           target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
    288           heap->isolate()->serializer_enabled() ||
    289           target->ic_age() != heap->global_ic_age() ||
    290           target->is_invalidated_weak_stub())) {
    291     IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool());
    292     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    293   }
    294   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    295   StaticVisitor::MarkObject(heap, target);
    296 }
    297 
    298 
    299 template<typename StaticVisitor>
    300 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
    301     Heap* heap, RelocInfo* rinfo) {
    302   ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
    303   Code* target = rinfo->code_age_stub();
    304   ASSERT(target != NULL);
    305   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    306   StaticVisitor::MarkObject(heap, target);
    307 }
    308 
    309 
    310 template<typename StaticVisitor>
    311 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
    312     Map* map, HeapObject* object) {
    313   FixedBodyVisitor<StaticVisitor,
    314                    Context::MarkCompactBodyDescriptor,
    315                    void>::Visit(map, object);
    316 
    317   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
    318   for (int idx = Context::FIRST_WEAK_SLOT;
    319        idx < Context::NATIVE_CONTEXT_SLOTS;
    320        ++idx) {
    321     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
    322     collector->RecordSlot(slot, slot, *slot);
    323   }
    324 }
    325 
    326 
    327 template<typename StaticVisitor>
    328 void StaticMarkingVisitor<StaticVisitor>::VisitMap(
    329     Map* map, HeapObject* object) {
    330   Heap* heap = map->GetHeap();
    331   Map* map_object = Map::cast(object);
    332 
    333   // Clears the cache of ICs related to this map.
    334   if (FLAG_cleanup_code_caches_at_gc) {
    335     map_object->ClearCodeCache(heap);
    336   }
    337 
    338   // When map collection is enabled we have to mark through map's transitions
    339   // and back pointers in a special way to make these links weak.
    340   if (FLAG_collect_maps && map_object->CanTransition()) {
    341     MarkMapContents(heap, map_object);
    342   } else {
    343     StaticVisitor::VisitPointers(heap,
    344         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
    345         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
    346   }
    347 }
    348 
    349 
    350 template<typename StaticVisitor>
    351 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
    352     Map* map, HeapObject* object) {
    353   Heap* heap = map->GetHeap();
    354 
    355   Object** slot =
    356       HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
    357   if (FLAG_collect_maps) {
    358     // Mark property cell dependent codes array but do not push it onto marking
    359     // stack, this will make references from it weak. We will clean dead
    360     // codes when we iterate over property cells in ClearNonLiveReferences.
    361     HeapObject* obj = HeapObject::cast(*slot);
    362     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    363     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    364   } else {
    365     StaticVisitor::VisitPointer(heap, slot);
    366   }
    367 
    368   StaticVisitor::VisitPointers(heap,
    369       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
    370       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
    371 }
    372 
    373 
    374 template<typename StaticVisitor>
    375 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
    376     Map* map, HeapObject* object) {
    377   Heap* heap = map->GetHeap();
    378 
    379   Object** slot =
    380       HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
    381   if (FLAG_collect_maps) {
    382     // Mark allocation site dependent codes array but do not push it onto
    383     // marking stack, this will make references from it weak. We will clean
    384     // dead codes when we iterate over allocation sites in
    385     // ClearNonLiveReferences.
    386     HeapObject* obj = HeapObject::cast(*slot);
    387     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    388     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    389   } else {
    390     StaticVisitor::VisitPointer(heap, slot);
    391   }
    392 
    393   StaticVisitor::VisitPointers(heap,
    394       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
    395       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
    396 }
    397 
    398 
    399 template<typename StaticVisitor>
    400 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
    401     Map* map, HeapObject* object) {
    402   Heap* heap = map->GetHeap();
    403   JSWeakCollection* weak_collection =
    404       reinterpret_cast<JSWeakCollection*>(object);
    405 
    406   // Enqueue weak collection in linked list of encountered weak collections.
    407   if (weak_collection->next() == heap->undefined_value()) {
    408     weak_collection->set_next(heap->encountered_weak_collections());
    409     heap->set_encountered_weak_collections(weak_collection);
    410   }
    411 
    412   // Skip visiting the backing hash table containing the mappings and the
    413   // pointer to the other enqueued weak collections, both are post-processed.
    414   StaticVisitor::VisitPointers(heap,
    415       HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
    416       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
    417   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
    418       JSWeakCollection::kNextOffset);
    419   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
    420       JSWeakCollection::kSize);
    421 
    422   // Partially initialized weak collection is enqueued, but table is ignored.
    423   if (!weak_collection->table()->IsHashTable()) return;
    424 
    425   // Mark the backing hash table without pushing it on the marking stack.
    426   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
    427   HeapObject* obj = HeapObject::cast(*slot);
    428   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    429   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    430 }
    431 
    432 
    433 template<typename StaticVisitor>
    434 void StaticMarkingVisitor<StaticVisitor>::VisitCode(
    435     Map* map, HeapObject* object) {
    436   Heap* heap = map->GetHeap();
    437   Code* code = Code::cast(object);
    438   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
    439     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
    440   }
    441   code->CodeIterateBody<StaticVisitor>(heap);
    442 }
    443 
    444 
    445 template<typename StaticVisitor>
    446 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
    447     Map* map, HeapObject* object) {
    448   Heap* heap = map->GetHeap();
    449   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
    450   if (shared->ic_age() != heap->global_ic_age()) {
    451     shared->ResetForNewContext(heap->global_ic_age());
    452   }
    453   if (FLAG_cleanup_code_caches_at_gc) {
    454     shared->ClearTypeFeedbackInfo();
    455   }
    456   if (FLAG_cache_optimized_code &&
    457       FLAG_flush_optimized_code_cache &&
    458       !shared->optimized_code_map()->IsSmi()) {
    459     // Always flush the optimized code map if requested by flag.
    460     shared->ClearOptimizedCodeMap();
    461   }
    462   MarkCompactCollector* collector = heap->mark_compact_collector();
    463   if (collector->is_code_flushing_enabled()) {
    464     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    465       // Add the shared function info holding an optimized code map to
    466       // the code flusher for processing of code maps after marking.
    467       collector->code_flusher()->AddOptimizedCodeMap(shared);
    468       // Treat all references within the code map weakly by marking the
    469       // code map itself but not pushing it onto the marking deque.
    470       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
    471       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
    472     }
    473     if (IsFlushable(heap, shared)) {
    474       // This function's code looks flushable. But we have to postpone
    475       // the decision until we see all functions that point to the same
    476       // SharedFunctionInfo because some of them might be optimized.
    477       // That would also make the non-optimized version of the code
    478       // non-flushable, because it is required for bailing out from
    479       // optimized code.
    480       collector->code_flusher()->AddCandidate(shared);
    481       // Treat the reference to the code object weakly.
    482       VisitSharedFunctionInfoWeakCode(heap, object);
    483       return;
    484     }
    485   } else {
    486     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    487       // Flush optimized code map on major GCs without code flushing,
    488       // needed because cached code doesn't contain breakpoints.
    489       shared->ClearOptimizedCodeMap();
    490     }
    491   }
    492   VisitSharedFunctionInfoStrongCode(heap, object);
    493 }
    494 
    495 
    496 template<typename StaticVisitor>
    497 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
    498     Map* map, HeapObject* object) {
    499   Heap* heap = map->GetHeap();
    500   ConstantPoolArray* array = ConstantPoolArray::cast(object);
    501   ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
    502   while (!code_iter.is_finished()) {
    503     Address code_entry = reinterpret_cast<Address>(
    504         array->RawFieldOfElementAt(code_iter.next_index()));
    505     StaticVisitor::VisitCodeEntry(heap, code_entry);
    506   }
    507 
    508   ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
    509   while (!heap_iter.is_finished()) {
    510     Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
    511     HeapObject* object = HeapObject::cast(*slot);
    512     heap->mark_compact_collector()->RecordSlot(slot, slot, object);
    513     bool is_weak_object =
    514         (array->get_weak_object_state() ==
    515               ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
    516          Code::IsWeakObjectInOptimizedCode(object)) ||
    517         (array->get_weak_object_state() ==
    518               ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
    519          Code::IsWeakObjectInIC(object));
    520     if (!is_weak_object) {
    521       StaticVisitor::MarkObject(heap, object);
    522     }
    523   }
    524 }
    525 
    526 
    527 template<typename StaticVisitor>
    528 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
    529     Map* map, HeapObject* object) {
    530   Heap* heap = map->GetHeap();
    531   JSFunction* function = JSFunction::cast(object);
    532   MarkCompactCollector* collector = heap->mark_compact_collector();
    533   if (collector->is_code_flushing_enabled()) {
    534     if (IsFlushable(heap, function)) {
    535       // This function's code looks flushable. But we have to postpone
    536       // the decision until we see all functions that point to the same
    537       // SharedFunctionInfo because some of them might be optimized.
    538       // That would also make the non-optimized version of the code
    539       // non-flushable, because it is required for bailing out from
    540       // optimized code.
    541       collector->code_flusher()->AddCandidate(function);
    542       // Visit shared function info immediately to avoid double checking
    543       // of its flushability later. This is just an optimization because
    544       // the shared function info would eventually be visited.
    545       SharedFunctionInfo* shared = function->shared();
    546       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
    547         StaticVisitor::MarkObject(heap, shared->map());
    548         VisitSharedFunctionInfoWeakCode(heap, shared);
    549       }
    550       // Treat the reference to the code object weakly.
    551       VisitJSFunctionWeakCode(heap, object);
    552       return;
    553     } else {
    554       // Visit all unoptimized code objects to prevent flushing them.
    555       StaticVisitor::MarkObject(heap, function->shared()->code());
    556       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
    557         MarkInlinedFunctionsCode(heap, function->code());
    558       }
    559     }
    560   }
    561   VisitJSFunctionStrongCode(heap, object);
    562 }
    563 
    564 
    565 template<typename StaticVisitor>
    566 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
    567     Map* map, HeapObject* object) {
    568   int last_property_offset =
    569       JSRegExp::kSize + kPointerSize * map->inobject_properties();
    570   StaticVisitor::VisitPointers(map->GetHeap(),
    571       HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
    572       HeapObject::RawField(object, last_property_offset));
    573 }
    574 
    575 
    576 template<typename StaticVisitor>
    577 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
    578     Map* map, HeapObject* object) {
    579   Heap* heap = map->GetHeap();
    580 
    581   STATIC_ASSERT(
    582       JSArrayBuffer::kWeakFirstViewOffset ==
    583       JSArrayBuffer::kWeakNextOffset + kPointerSize);
    584   StaticVisitor::VisitPointers(
    585       heap,
    586       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
    587       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
    588   StaticVisitor::VisitPointers(
    589       heap,
    590       HeapObject::RawField(object,
    591           JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
    592       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
    593 }
    594 
    595 
    596 template<typename StaticVisitor>
    597 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
    598     Map* map, HeapObject* object) {
    599   StaticVisitor::VisitPointers(
    600       map->GetHeap(),
    601       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    602       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    603   StaticVisitor::VisitPointers(
    604       map->GetHeap(),
    605       HeapObject::RawField(object,
    606         JSTypedArray::kWeakNextOffset + kPointerSize),
    607       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    608 }
    609 
    610 
    611 template<typename StaticVisitor>
    612 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
    613     Map* map, HeapObject* object) {
    614   StaticVisitor::VisitPointers(
    615       map->GetHeap(),
    616       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    617       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    618   StaticVisitor::VisitPointers(
    619       map->GetHeap(),
    620       HeapObject::RawField(object,
    621         JSDataView::kWeakNextOffset + kPointerSize),
    622       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    623 }
    624 
    625 
    626 template<typename StaticVisitor>
    627 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
    628     Heap* heap, Map* map) {
    629   // Make sure that the back pointer stored either in the map itself or
    630   // inside its transitions array is marked. Skip recording the back
    631   // pointer slot since map space is not compacted.
    632   StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
    633 
    634   // Treat pointers in the transitions array as weak and also mark that
    635   // array to prevent visiting it later. Skip recording the transition
    636   // array slot, since it will be implicitly recorded when the pointer
    637   // fields of this map are visited.
    638   if (map->HasTransitionArray()) {
    639     TransitionArray* transitions = map->transitions();
    640     MarkTransitionArray(heap, transitions);
    641   }
    642 
    643   // Since descriptor arrays are potentially shared, ensure that only the
    644   // descriptors that belong to this map are marked. The first time a
    645   // non-empty descriptor array is marked, its header is also visited. The slot
    646   // holding the descriptor array will be implicitly recorded when the pointer
    647   // fields of this map are visited.
    648   DescriptorArray* descriptors = map->instance_descriptors();
    649   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
    650       descriptors->length() > 0) {
    651     StaticVisitor::VisitPointers(heap,
    652         descriptors->GetFirstElementAddress(),
    653         descriptors->GetDescriptorEndSlot(0));
    654   }
    655   int start = 0;
    656   int end = map->NumberOfOwnDescriptors();
    657   if (start < end) {
    658     StaticVisitor::VisitPointers(heap,
    659         descriptors->GetDescriptorStartSlot(start),
    660         descriptors->GetDescriptorEndSlot(end));
    661   }
    662 
    663   // Mark prototype dependent codes array but do not push it onto marking
    664   // stack, this will make references from it weak. We will clean dead
    665   // codes when we iterate over maps in ClearNonLiveTransitions.
    666   Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
    667   HeapObject* obj = HeapObject::cast(*slot);
    668   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    669   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    670 
    671   // Mark the pointer fields of the Map. Since the transitions array has
    672   // been marked already, it is fine that one of these fields contains a
    673   // pointer to it.
    674   StaticVisitor::VisitPointers(heap,
    675       HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
    676       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
    677 }
    678 
    679 
    680 template<typename StaticVisitor>
    681 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
    682     Heap* heap, TransitionArray* transitions) {
    683   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
    684 
    685   // Simple transitions do not have keys nor prototype transitions.
    686   if (transitions->IsSimpleTransition()) return;
    687 
    688   if (transitions->HasPrototypeTransitions()) {
    689     // Mark prototype transitions array but do not push it onto marking
    690     // stack, this will make references from it weak. We will clean dead
    691     // prototype transitions in ClearNonLiveTransitions.
    692     Object** slot = transitions->GetPrototypeTransitionsSlot();
    693     HeapObject* obj = HeapObject::cast(*slot);
    694     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    695     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    696   }
    697 
    698   for (int i = 0; i < transitions->number_of_transitions(); ++i) {
    699     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
    700   }
    701 }
    702 
    703 
    704 template<typename StaticVisitor>
    705 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
    706     Heap* heap, Code* code) {
    707   // For optimized functions we should retain both non-optimized version
    708   // of its code and non-optimized version of all inlined functions.
    709   // This is required to support bailing out from inlined code.
    710   DeoptimizationInputData* data =
    711       DeoptimizationInputData::cast(code->deoptimization_data());
    712   FixedArray* literals = data->LiteralArray();
    713   for (int i = 0, count = data->InlinedFunctionCount()->value();
    714        i < count;
    715        i++) {
    716     JSFunction* inlined = JSFunction::cast(literals->get(i));
    717     StaticVisitor::MarkObject(heap, inlined->shared()->code());
    718   }
    719 }
    720 
    721 
    722 inline static bool IsValidNonBuiltinContext(Object* context) {
    723   return context->IsContext() &&
    724       !Context::cast(context)->global_object()->IsJSBuiltinsObject();
    725 }
    726 
    727 
    728 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
    729   Object* undefined = heap->undefined_value();
    730   return (info->script() != undefined) &&
    731       (reinterpret_cast<Script*>(info->script())->source() != undefined);
    732 }
    733 
    734 
    735 template<typename StaticVisitor>
    736 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    737     Heap* heap, JSFunction* function) {
    738   SharedFunctionInfo* shared_info = function->shared();
    739 
    740   // Code is either on stack, in compilation cache or referenced
    741   // by optimized version of function.
    742   MarkBit code_mark = Marking::MarkBitFrom(function->code());
    743   if (code_mark.Get()) {
    744     return false;
    745   }
    746 
    747   // The function must have a valid context and not be a builtin.
    748   if (!IsValidNonBuiltinContext(function->context())) {
    749     return false;
    750   }
    751 
    752   // We do not (yet) flush code for optimized functions.
    753   if (function->code() != shared_info->code()) {
    754     return false;
    755   }
    756 
    757   // Check age of optimized code.
    758   if (FLAG_age_code && !function->code()->IsOld()) {
    759     return false;
    760   }
    761 
    762   return IsFlushable(heap, shared_info);
    763 }
    764 
    765 
    766 template<typename StaticVisitor>
    767 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    768     Heap* heap, SharedFunctionInfo* shared_info) {
    769   // Code is either on stack, in compilation cache or referenced
    770   // by optimized version of function.
    771   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
    772   if (code_mark.Get()) {
    773     return false;
    774   }
    775 
    776   // The function must be compiled and have the source code available,
    777   // to be able to recompile it in case we need the function again.
    778   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
    779     return false;
    780   }
    781 
    782   // We never flush code for API functions.
    783   Object* function_data = shared_info->function_data();
    784   if (function_data->IsFunctionTemplateInfo()) {
    785     return false;
    786   }
    787 
    788   // Only flush code for functions.
    789   if (shared_info->code()->kind() != Code::FUNCTION) {
    790     return false;
    791   }
    792 
    793   // Function must be lazy compilable.
    794   if (!shared_info->allows_lazy_compilation()) {
    795     return false;
    796   }
    797 
    798   // We do not (yet?) flush code for generator functions, because we don't know
    799   // if there are still live activations (generator objects) on the heap.
    800   if (shared_info->is_generator()) {
    801     return false;
    802   }
    803 
    804   // If this is a full script wrapped in a function we do not flush the code.
    805   if (shared_info->is_toplevel()) {
    806     return false;
    807   }
    808 
    809   // If this is a function initialized with %SetCode then the one-to-one
    810   // relation between SharedFunctionInfo and Code is broken.
    811   if (shared_info->dont_flush()) {
    812     return false;
    813   }
    814 
    815   // Check age of code. If code aging is disabled we never flush.
    816   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
    817     return false;
    818   }
    819 
    820   return true;
    821 }
    822 
    823 
    824 template<typename StaticVisitor>
    825 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
    826     Heap* heap, HeapObject* object) {
    827   Object** start_slot =
    828       HeapObject::RawField(object,
    829                            SharedFunctionInfo::BodyDescriptor::kStartOffset);
    830   Object** end_slot =
    831       HeapObject::RawField(object,
    832                            SharedFunctionInfo::BodyDescriptor::kEndOffset);
    833   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    834 }
    835 
    836 
    837 template<typename StaticVisitor>
    838 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
    839     Heap* heap, HeapObject* object) {
    840   Object** name_slot =
    841       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
    842   StaticVisitor::VisitPointer(heap, name_slot);
    843 
    844   // Skip visiting kCodeOffset as it is treated weakly here.
    845   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
    846       SharedFunctionInfo::kCodeOffset);
    847   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
    848       SharedFunctionInfo::kOptimizedCodeMapOffset);
    849 
    850   Object** start_slot =
    851       HeapObject::RawField(object,
    852                            SharedFunctionInfo::kOptimizedCodeMapOffset);
    853   Object** end_slot =
    854       HeapObject::RawField(object,
    855                            SharedFunctionInfo::BodyDescriptor::kEndOffset);
    856   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    857 }
    858 
    859 
    860 template<typename StaticVisitor>
    861 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
    862     Heap* heap, HeapObject* object) {
    863   Object** start_slot =
    864       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    865   Object** end_slot =
    866       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    867   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    868 
    869   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
    870   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    871       JSFunction::kPrototypeOrInitialMapOffset);
    872 
    873   start_slot =
    874       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    875   end_slot =
    876       HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    877   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    878 }
    879 
    880 
    881 template<typename StaticVisitor>
    882 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
    883     Heap* heap, HeapObject* object) {
    884   Object** start_slot =
    885       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    886   Object** end_slot =
    887       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    888   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    889 
    890   // Skip visiting kCodeEntryOffset as it is treated weakly here.
    891   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    892       JSFunction::kPrototypeOrInitialMapOffset);
    893 
    894   start_slot =
    895       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    896   end_slot =
    897       HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    898   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    899 }
    900 
    901 
    902 void Code::CodeIterateBody(ObjectVisitor* v) {
    903   int mode_mask = RelocInfo::kCodeTargetMask |
    904                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    905                   RelocInfo::ModeMask(RelocInfo::CELL) |
    906                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    907                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    908                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    909                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    910 
    911   // There are two places where we iterate code bodies: here and the
    912   // templated CodeIterateBody (below). They should be kept in sync.
    913   IteratePointer(v, kRelocationInfoOffset);
    914   IteratePointer(v, kHandlerTableOffset);
    915   IteratePointer(v, kDeoptimizationDataOffset);
    916   IteratePointer(v, kTypeFeedbackInfoOffset);
    917   IterateNextCodeLink(v, kNextCodeLinkOffset);
    918   IteratePointer(v, kConstantPoolOffset);
    919 
    920   RelocIterator it(this, mode_mask);
    921   Isolate* isolate = this->GetIsolate();
    922   for (; !it.done(); it.next()) {
    923     it.rinfo()->Visit(isolate, v);
    924   }
    925 }
    926 
    927 
    928 template<typename StaticVisitor>
    929 void Code::CodeIterateBody(Heap* heap) {
    930   int mode_mask = RelocInfo::kCodeTargetMask |
    931                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    932                   RelocInfo::ModeMask(RelocInfo::CELL) |
    933                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    934                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    935                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    936                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    937 
    938   // There are two places where we iterate code bodies: here and the non-
    939   // templated CodeIterateBody (above). They should be kept in sync.
    940   StaticVisitor::VisitPointer(
    941       heap,
    942       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
    943   StaticVisitor::VisitPointer(
    944       heap,
    945       reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
    946   StaticVisitor::VisitPointer(
    947       heap,
    948       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
    949   StaticVisitor::VisitPointer(
    950       heap,
    951       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
    952   StaticVisitor::VisitNextCodeLink(
    953       heap,
    954       reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
    955   StaticVisitor::VisitPointer(
    956       heap,
    957       reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
    958 
    959 
    960   RelocIterator it(this, mode_mask);
    961   for (; !it.done(); it.next()) {
    962     it.rinfo()->template Visit<StaticVisitor>(heap);
    963   }
    964 }
    965 
    966 
    967 } }  // namespace v8::internal
    968 
    969 #endif  // V8_OBJECTS_VISITING_INL_H_
    970