Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_OBJECTS_VISITING_INL_H_
     29 #define V8_OBJECTS_VISITING_INL_H_
     30 
     31 
     32 namespace v8 {
     33 namespace internal {
     34 
     35 template<typename StaticVisitor>
     36 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
     37   table_.Register(kVisitShortcutCandidate,
     38                   &FixedBodyVisitor<StaticVisitor,
     39                   ConsString::BodyDescriptor,
     40                   int>::Visit);
     41 
     42   table_.Register(kVisitConsString,
     43                   &FixedBodyVisitor<StaticVisitor,
     44                   ConsString::BodyDescriptor,
     45                   int>::Visit);
     46 
     47   table_.Register(kVisitSlicedString,
     48                   &FixedBodyVisitor<StaticVisitor,
     49                   SlicedString::BodyDescriptor,
     50                   int>::Visit);
     51 
     52   table_.Register(kVisitSymbol,
     53                   &FixedBodyVisitor<StaticVisitor,
     54                   Symbol::BodyDescriptor,
     55                   int>::Visit);
     56 
     57   table_.Register(kVisitFixedArray,
     58                   &FlexibleBodyVisitor<StaticVisitor,
     59                   FixedArray::BodyDescriptor,
     60                   int>::Visit);
     61 
     62   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
     63 
     64   table_.Register(kVisitNativeContext,
     65                   &FixedBodyVisitor<StaticVisitor,
     66                   Context::ScavengeBodyDescriptor,
     67                   int>::Visit);
     68 
     69   table_.Register(kVisitByteArray, &VisitByteArray);
     70 
     71   table_.Register(kVisitSharedFunctionInfo,
     72                   &FixedBodyVisitor<StaticVisitor,
     73                   SharedFunctionInfo::BodyDescriptor,
     74                   int>::Visit);
     75 
     76   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
     77 
     78   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
     79 
     80   table_.Register(kVisitJSFunction, &VisitJSFunction);
     81 
     82   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
     83 
     84   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
     85 
     86   table_.Register(kVisitJSDataView, &VisitJSDataView);
     87 
     88   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
     89 
     90   table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
     91 
     92   table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit);
     93 
     94   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
     95 
     96   table_.template RegisterSpecializations<DataObjectVisitor,
     97                                           kVisitDataObject,
     98                                           kVisitDataObjectGeneric>();
     99 
    100   table_.template RegisterSpecializations<JSObjectVisitor,
    101                                           kVisitJSObject,
    102                                           kVisitJSObjectGeneric>();
    103   table_.template RegisterSpecializations<StructVisitor,
    104                                           kVisitStruct,
    105                                           kVisitStructGeneric>();
    106 }
    107 
    108 
    109 template<typename StaticVisitor>
    110 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
    111     Map* map, HeapObject* object) {
    112   Heap* heap = map->GetHeap();
    113 
    114   STATIC_ASSERT(
    115       JSArrayBuffer::kWeakFirstViewOffset ==
    116       JSArrayBuffer::kWeakNextOffset + kPointerSize);
    117   VisitPointers(
    118       heap,
    119       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
    120       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
    121   VisitPointers(
    122       heap,
    123       HeapObject::RawField(object,
    124           JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
    125       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
    126   return JSArrayBuffer::kSizeWithInternalFields;
    127 }
    128 
    129 
    130 template<typename StaticVisitor>
    131 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
    132     Map* map, HeapObject* object) {
    133   VisitPointers(
    134       map->GetHeap(),
    135       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    136       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    137   VisitPointers(
    138       map->GetHeap(),
    139       HeapObject::RawField(object,
    140           JSTypedArray::kWeakNextOffset + kPointerSize),
    141       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    142   return JSTypedArray::kSizeWithInternalFields;
    143 }
    144 
    145 
    146 template<typename StaticVisitor>
    147 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
    148     Map* map, HeapObject* object) {
    149   VisitPointers(
    150       map->GetHeap(),
    151       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    152       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    153   VisitPointers(
    154       map->GetHeap(),
    155       HeapObject::RawField(object,
    156           JSDataView::kWeakNextOffset + kPointerSize),
    157       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    158   return JSDataView::kSizeWithInternalFields;
    159 }
    160 
    161 
    162 template<typename StaticVisitor>
    163 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
    164   table_.Register(kVisitShortcutCandidate,
    165                   &FixedBodyVisitor<StaticVisitor,
    166                   ConsString::BodyDescriptor,
    167                   void>::Visit);
    168 
    169   table_.Register(kVisitConsString,
    170                   &FixedBodyVisitor<StaticVisitor,
    171                   ConsString::BodyDescriptor,
    172                   void>::Visit);
    173 
    174   table_.Register(kVisitSlicedString,
    175                   &FixedBodyVisitor<StaticVisitor,
    176                   SlicedString::BodyDescriptor,
    177                   void>::Visit);
    178 
    179   table_.Register(kVisitSymbol,
    180                   &FixedBodyVisitor<StaticVisitor,
    181                   Symbol::BodyDescriptor,
    182                   void>::Visit);
    183 
    184   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
    185 
    186   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
    187 
    188   table_.Register(kVisitNativeContext, &VisitNativeContext);
    189 
    190   table_.Register(kVisitAllocationSite,
    191                   &FixedBodyVisitor<StaticVisitor,
    192                   AllocationSite::BodyDescriptor,
    193                   void>::Visit);
    194 
    195   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
    196 
    197   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
    198 
    199   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
    200 
    201   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
    202 
    203   table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection);
    204 
    205   table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection);
    206 
    207   table_.Register(kVisitOddball,
    208                   &FixedBodyVisitor<StaticVisitor,
    209                   Oddball::BodyDescriptor,
    210                   void>::Visit);
    211 
    212   table_.Register(kVisitMap, &VisitMap);
    213 
    214   table_.Register(kVisitCode, &VisitCode);
    215 
    216   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
    217 
    218   table_.Register(kVisitJSFunction, &VisitJSFunction);
    219 
    220   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
    221 
    222   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
    223 
    224   table_.Register(kVisitJSDataView, &VisitJSDataView);
    225 
    226   // Registration for kVisitJSRegExp is done by StaticVisitor.
    227 
    228   table_.Register(kVisitCell,
    229                   &FixedBodyVisitor<StaticVisitor,
    230                   Cell::BodyDescriptor,
    231                   void>::Visit);
    232 
    233   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
    234 
    235   table_.template RegisterSpecializations<DataObjectVisitor,
    236                                           kVisitDataObject,
    237                                           kVisitDataObjectGeneric>();
    238 
    239   table_.template RegisterSpecializations<JSObjectVisitor,
    240                                           kVisitJSObject,
    241                                           kVisitJSObjectGeneric>();
    242 
    243   table_.template RegisterSpecializations<StructObjectVisitor,
    244                                           kVisitStruct,
    245                                           kVisitStructGeneric>();
    246 }
    247 
    248 
    249 template<typename StaticVisitor>
    250 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
    251     Heap* heap, Address entry_address) {
    252   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
    253   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
    254   StaticVisitor::MarkObject(heap, code);
    255 }
    256 
    257 
    258 template<typename StaticVisitor>
    259 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
    260     Heap* heap, RelocInfo* rinfo) {
    261   ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
    262   ASSERT(!rinfo->target_object()->IsConsString());
    263   HeapObject* object = HeapObject::cast(rinfo->target_object());
    264   if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
    265       rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
    266       !object->IsMap() || !Map::cast(object)->CanTransition()) {
    267     heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
    268     StaticVisitor::MarkObject(heap, object);
    269   }
    270 }
    271 
    272 
    273 template<typename StaticVisitor>
    274 void StaticMarkingVisitor<StaticVisitor>::VisitCell(
    275     Heap* heap, RelocInfo* rinfo) {
    276   ASSERT(rinfo->rmode() == RelocInfo::CELL);
    277   Cell* cell = rinfo->target_cell();
    278   StaticVisitor::MarkObject(heap, cell);
    279 }
    280 
    281 
    282 template<typename StaticVisitor>
    283 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
    284     Heap* heap, RelocInfo* rinfo) {
    285   ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
    286           rinfo->IsPatchedReturnSequence()) ||
    287          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
    288           rinfo->IsPatchedDebugBreakSlotSequence()));
    289   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
    290   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    291   StaticVisitor::MarkObject(heap, target);
    292 }
    293 
    294 
    295 template<typename StaticVisitor>
    296 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
    297     Heap* heap, RelocInfo* rinfo) {
    298   ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
    299   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    300   // Monomorphic ICs are preserved when possible, but need to be flushed
    301   // when they might be keeping a Context alive, or when the heap is about
    302   // to be serialized.
    303   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
    304       && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
    305           target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
    306           Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
    307     IC::Clear(rinfo->pc());
    308     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
    309   }
    310   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    311   StaticVisitor::MarkObject(heap, target);
    312 }
    313 
    314 
    315 template<typename StaticVisitor>
    316 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
    317     Heap* heap, RelocInfo* rinfo) {
    318   ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
    319   Code* target = rinfo->code_age_stub();
    320   ASSERT(target != NULL);
    321   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
    322   StaticVisitor::MarkObject(heap, target);
    323 }
    324 
    325 
    326 template<typename StaticVisitor>
    327 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
    328     Map* map, HeapObject* object) {
    329   FixedBodyVisitor<StaticVisitor,
    330                    Context::MarkCompactBodyDescriptor,
    331                    void>::Visit(map, object);
    332 
    333   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
    334   for (int idx = Context::FIRST_WEAK_SLOT;
    335        idx < Context::NATIVE_CONTEXT_SLOTS;
    336        ++idx) {
    337     Object** slot =
    338         HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
    339     collector->RecordSlot(slot, slot, *slot);
    340   }
    341 }
    342 
    343 
    344 template<typename StaticVisitor>
    345 void StaticMarkingVisitor<StaticVisitor>::VisitMap(
    346     Map* map, HeapObject* object) {
    347   Heap* heap = map->GetHeap();
    348   Map* map_object = Map::cast(object);
    349 
    350   // Clears the cache of ICs related to this map.
    351   if (FLAG_cleanup_code_caches_at_gc) {
    352     map_object->ClearCodeCache(heap);
    353   }
    354 
    355   // When map collection is enabled we have to mark through map's transitions
    356   // and back pointers in a special way to make these links weak.
    357   if (FLAG_collect_maps && map_object->CanTransition()) {
    358     MarkMapContents(heap, map_object);
    359   } else {
    360     StaticVisitor::VisitPointers(heap,
    361         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
    362         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
    363   }
    364 }
    365 
    366 
    367 template<typename StaticVisitor>
    368 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
    369     Map* map, HeapObject* object) {
    370   Heap* heap = map->GetHeap();
    371 
    372   Object** slot =
    373       HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
    374   if (FLAG_collect_maps) {
    375     // Mark property cell dependent codes array but do not push it onto marking
    376     // stack, this will make references from it weak. We will clean dead
    377     // codes when we iterate over property cells in ClearNonLiveReferences.
    378     HeapObject* obj = HeapObject::cast(*slot);
    379     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    380     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    381   } else {
    382     StaticVisitor::VisitPointer(heap, slot);
    383   }
    384 
    385   StaticVisitor::VisitPointers(heap,
    386       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
    387       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
    388 }
    389 
    390 
    391 template<typename StaticVisitor>
    392 void StaticMarkingVisitor<StaticVisitor>::VisitCode(
    393     Map* map, HeapObject* object) {
    394   Heap* heap = map->GetHeap();
    395   Code* code = Code::cast(object);
    396   if (FLAG_cleanup_code_caches_at_gc) {
    397     code->ClearTypeFeedbackCells(heap);
    398   }
    399   if (FLAG_age_code && !Serializer::enabled()) {
    400     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
    401   }
    402   code->CodeIterateBody<StaticVisitor>(heap);
    403 }
    404 
    405 
    406 template<typename StaticVisitor>
    407 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
    408     Map* map, HeapObject* object) {
    409   Heap* heap = map->GetHeap();
    410   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
    411   if (shared->ic_age() != heap->global_ic_age()) {
    412     shared->ResetForNewContext(heap->global_ic_age());
    413   }
    414   if (FLAG_cache_optimized_code &&
    415       FLAG_flush_optimized_code_cache &&
    416       !shared->optimized_code_map()->IsSmi()) {
    417     // Always flush the optimized code map if requested by flag.
    418     shared->ClearOptimizedCodeMap();
    419   }
    420   MarkCompactCollector* collector = heap->mark_compact_collector();
    421   if (collector->is_code_flushing_enabled()) {
    422     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    423       // Add the shared function info holding an optimized code map to
    424       // the code flusher for processing of code maps after marking.
    425       collector->code_flusher()->AddOptimizedCodeMap(shared);
    426       // Treat all references within the code map weakly by marking the
    427       // code map itself but not pushing it onto the marking deque.
    428       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
    429       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
    430     }
    431     if (IsFlushable(heap, shared)) {
    432       // This function's code looks flushable. But we have to postpone
    433       // the decision until we see all functions that point to the same
    434       // SharedFunctionInfo because some of them might be optimized.
    435       // That would also make the non-optimized version of the code
    436       // non-flushable, because it is required for bailing out from
    437       // optimized code.
    438       collector->code_flusher()->AddCandidate(shared);
    439       // Treat the reference to the code object weakly.
    440       VisitSharedFunctionInfoWeakCode(heap, object);
    441       return;
    442     }
    443   } else {
    444     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
    445       // Flush optimized code map on major GCs without code flushing,
    446       // needed because cached code doesn't contain breakpoints.
    447       shared->ClearOptimizedCodeMap();
    448     }
    449   }
    450   VisitSharedFunctionInfoStrongCode(heap, object);
    451 }
    452 
    453 
    454 template<typename StaticVisitor>
    455 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
    456     Map* map, HeapObject* object) {
    457   Heap* heap = map->GetHeap();
    458   JSFunction* function = JSFunction::cast(object);
    459   MarkCompactCollector* collector = heap->mark_compact_collector();
    460   if (collector->is_code_flushing_enabled()) {
    461     if (IsFlushable(heap, function)) {
    462       // This function's code looks flushable. But we have to postpone
    463       // the decision until we see all functions that point to the same
    464       // SharedFunctionInfo because some of them might be optimized.
    465       // That would also make the non-optimized version of the code
    466       // non-flushable, because it is required for bailing out from
    467       // optimized code.
    468       collector->code_flusher()->AddCandidate(function);
    469       // Visit shared function info immediately to avoid double checking
    470       // of its flushability later. This is just an optimization because
    471       // the shared function info would eventually be visited.
    472       SharedFunctionInfo* shared = function->shared();
    473       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
    474         StaticVisitor::MarkObject(heap, shared->map());
    475         VisitSharedFunctionInfoWeakCode(heap, shared);
    476       }
    477       // Treat the reference to the code object weakly.
    478       VisitJSFunctionWeakCode(heap, object);
    479       return;
    480     } else {
    481       // Visit all unoptimized code objects to prevent flushing them.
    482       StaticVisitor::MarkObject(heap, function->shared()->code());
    483       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
    484         MarkInlinedFunctionsCode(heap, function->code());
    485       }
    486     }
    487   }
    488   VisitJSFunctionStrongCode(heap, object);
    489 }
    490 
    491 
    492 template<typename StaticVisitor>
    493 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
    494     Map* map, HeapObject* object) {
    495   int last_property_offset =
    496       JSRegExp::kSize + kPointerSize * map->inobject_properties();
    497   StaticVisitor::VisitPointers(map->GetHeap(),
    498       HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
    499       HeapObject::RawField(object, last_property_offset));
    500 }
    501 
    502 
    503 template<typename StaticVisitor>
    504 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
    505     Map* map, HeapObject* object) {
    506   Heap* heap = map->GetHeap();
    507 
    508   STATIC_ASSERT(
    509       JSArrayBuffer::kWeakFirstViewOffset ==
    510       JSArrayBuffer::kWeakNextOffset + kPointerSize);
    511   StaticVisitor::VisitPointers(
    512       heap,
    513       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
    514       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
    515   StaticVisitor::VisitPointers(
    516       heap,
    517       HeapObject::RawField(object,
    518           JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
    519       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
    520 }
    521 
    522 
    523 template<typename StaticVisitor>
    524 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
    525     Map* map, HeapObject* object) {
    526   StaticVisitor::VisitPointers(
    527       map->GetHeap(),
    528       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
    529       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
    530   StaticVisitor::VisitPointers(
    531       map->GetHeap(),
    532       HeapObject::RawField(object,
    533         JSTypedArray::kWeakNextOffset + kPointerSize),
    534       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
    535 }
    536 
    537 
    538 template<typename StaticVisitor>
    539 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
    540     Map* map, HeapObject* object) {
    541   StaticVisitor::VisitPointers(
    542       map->GetHeap(),
    543       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
    544       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
    545   StaticVisitor::VisitPointers(
    546       map->GetHeap(),
    547       HeapObject::RawField(object,
    548         JSDataView::kWeakNextOffset + kPointerSize),
    549       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
    550 }
    551 
    552 
    553 template<typename StaticVisitor>
    554 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
    555     Heap* heap, Map* map) {
    556   // Make sure that the back pointer stored either in the map itself or
    557   // inside its transitions array is marked. Skip recording the back
    558   // pointer slot since map space is not compacted.
    559   StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
    560 
    561   // Treat pointers in the transitions array as weak and also mark that
    562   // array to prevent visiting it later. Skip recording the transition
    563   // array slot, since it will be implicitly recorded when the pointer
    564   // fields of this map are visited.
    565   TransitionArray* transitions = map->unchecked_transition_array();
    566   if (transitions->IsTransitionArray()) {
    567     MarkTransitionArray(heap, transitions);
    568   } else {
    569     // Already marked by marking map->GetBackPointer() above.
    570     ASSERT(transitions->IsMap() || transitions->IsUndefined());
    571   }
    572 
    573   // Since descriptor arrays are potentially shared, ensure that only the
    574   // descriptors that belong to this map are marked. The first time a
    575   // non-empty descriptor array is marked, its header is also visited. The slot
    576   // holding the descriptor array will be implicitly recorded when the pointer
    577   // fields of this map are visited.
    578   DescriptorArray* descriptors = map->instance_descriptors();
    579   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
    580       descriptors->length() > 0) {
    581     StaticVisitor::VisitPointers(heap,
    582         descriptors->GetFirstElementAddress(),
    583         descriptors->GetDescriptorEndSlot(0));
    584   }
    585   int start = 0;
    586   int end = map->NumberOfOwnDescriptors();
    587   if (start < end) {
    588     StaticVisitor::VisitPointers(heap,
    589         descriptors->GetDescriptorStartSlot(start),
    590         descriptors->GetDescriptorEndSlot(end));
    591   }
    592 
    593   // Mark prototype dependent codes array but do not push it onto marking
    594   // stack, this will make references from it weak. We will clean dead
    595   // codes when we iterate over maps in ClearNonLiveTransitions.
    596   Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
    597   HeapObject* obj = HeapObject::cast(*slot);
    598   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    599   StaticVisitor::MarkObjectWithoutPush(heap, obj);
    600 
    601   // Mark the pointer fields of the Map. Since the transitions array has
    602   // been marked already, it is fine that one of these fields contains a
    603   // pointer to it.
    604   StaticVisitor::VisitPointers(heap,
    605       HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
    606       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
    607 }
    608 
    609 
    610 template<typename StaticVisitor>
    611 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
    612     Heap* heap, TransitionArray* transitions) {
    613   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
    614 
    615   // Simple transitions do not have keys nor prototype transitions.
    616   if (transitions->IsSimpleTransition()) return;
    617 
    618   if (transitions->HasPrototypeTransitions()) {
    619     // Mark prototype transitions array but do not push it onto marking
    620     // stack, this will make references from it weak. We will clean dead
    621     // prototype transitions in ClearNonLiveTransitions.
    622     Object** slot = transitions->GetPrototypeTransitionsSlot();
    623     HeapObject* obj = HeapObject::cast(*slot);
    624     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
    625     StaticVisitor::MarkObjectWithoutPush(heap, obj);
    626   }
    627 
    628   for (int i = 0; i < transitions->number_of_transitions(); ++i) {
    629     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
    630   }
    631 }
    632 
    633 
    634 template<typename StaticVisitor>
    635 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
    636     Heap* heap, Code* code) {
    637   // For optimized functions we should retain both non-optimized version
    638   // of its code and non-optimized version of all inlined functions.
    639   // This is required to support bailing out from inlined code.
    640   DeoptimizationInputData* data =
    641       DeoptimizationInputData::cast(code->deoptimization_data());
    642   FixedArray* literals = data->LiteralArray();
    643   for (int i = 0, count = data->InlinedFunctionCount()->value();
    644        i < count;
    645        i++) {
    646     JSFunction* inlined = JSFunction::cast(literals->get(i));
    647     StaticVisitor::MarkObject(heap, inlined->shared()->code());
    648   }
    649 }
    650 
    651 
    652 inline static bool IsValidNonBuiltinContext(Object* context) {
    653   return context->IsContext() &&
    654       !Context::cast(context)->global_object()->IsJSBuiltinsObject();
    655 }
    656 
    657 
    658 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
    659   Object* undefined = heap->undefined_value();
    660   return (info->script() != undefined) &&
    661       (reinterpret_cast<Script*>(info->script())->source() != undefined);
    662 }
    663 
    664 
    665 template<typename StaticVisitor>
    666 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    667     Heap* heap, JSFunction* function) {
    668   SharedFunctionInfo* shared_info = function->shared();
    669 
    670   // Code is either on stack, in compilation cache or referenced
    671   // by optimized version of function.
    672   MarkBit code_mark = Marking::MarkBitFrom(function->code());
    673   if (code_mark.Get()) {
    674     return false;
    675   }
    676 
    677   // The function must have a valid context and not be a builtin.
    678   if (!IsValidNonBuiltinContext(function->context())) {
    679     return false;
    680   }
    681 
    682   // We do not (yet) flush code for optimized functions.
    683   if (function->code() != shared_info->code()) {
    684     return false;
    685   }
    686 
    687   // Check age of optimized code.
    688   if (FLAG_age_code && !function->code()->IsOld()) {
    689     return false;
    690   }
    691 
    692   return IsFlushable(heap, shared_info);
    693 }
    694 
    695 
    696 template<typename StaticVisitor>
    697 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
    698     Heap* heap, SharedFunctionInfo* shared_info) {
    699   // Code is either on stack, in compilation cache or referenced
    700   // by optimized version of function.
    701   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
    702   if (code_mark.Get()) {
    703     return false;
    704   }
    705 
    706   // The function must be compiled and have the source code available,
    707   // to be able to recompile it in case we need the function again.
    708   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
    709     return false;
    710   }
    711 
    712   // We never flush code for API functions.
    713   Object* function_data = shared_info->function_data();
    714   if (function_data->IsFunctionTemplateInfo()) {
    715     return false;
    716   }
    717 
    718   // Only flush code for functions.
    719   if (shared_info->code()->kind() != Code::FUNCTION) {
    720     return false;
    721   }
    722 
    723   // Function must be lazy compilable.
    724   if (!shared_info->allows_lazy_compilation()) {
    725     return false;
    726   }
    727 
    728   // We do not (yet?) flush code for generator functions, because we don't know
    729   // if there are still live activations (generator objects) on the heap.
    730   if (shared_info->is_generator()) {
    731     return false;
    732   }
    733 
    734   // If this is a full script wrapped in a function we do not flush the code.
    735   if (shared_info->is_toplevel()) {
    736     return false;
    737   }
    738 
    739   // If this is a function initialized with %SetCode then the one-to-one
    740   // relation between SharedFunctionInfo and Code is broken.
    741   if (shared_info->dont_flush()) {
    742     return false;
    743   }
    744 
    745   // Check age of code. If code aging is disabled we never flush.
    746   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
    747     return false;
    748   }
    749 
    750   return true;
    751 }
    752 
    753 
    754 template<typename StaticVisitor>
    755 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
    756     Heap* heap, HeapObject* object) {
    757   StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
    758   Object** start_slot =
    759       HeapObject::RawField(object,
    760                            SharedFunctionInfo::BodyDescriptor::kStartOffset);
    761   Object** end_slot =
    762       HeapObject::RawField(object,
    763                            SharedFunctionInfo::BodyDescriptor::kEndOffset);
    764   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    765 }
    766 
    767 
    768 template<typename StaticVisitor>
    769 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
    770     Heap* heap, HeapObject* object) {
    771   StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
    772   Object** name_slot =
    773       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
    774   StaticVisitor::VisitPointer(heap, name_slot);
    775 
    776   // Skip visiting kCodeOffset as it is treated weakly here.
    777   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
    778       SharedFunctionInfo::kCodeOffset);
    779   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
    780       SharedFunctionInfo::kOptimizedCodeMapOffset);
    781 
    782   Object** start_slot =
    783       HeapObject::RawField(object,
    784                            SharedFunctionInfo::kOptimizedCodeMapOffset);
    785   Object** end_slot =
    786       HeapObject::RawField(object,
    787                            SharedFunctionInfo::BodyDescriptor::kEndOffset);
    788   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    789 }
    790 
    791 
    792 template<typename StaticVisitor>
    793 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
    794     Heap* heap, HeapObject* object) {
    795   Object** start_slot =
    796       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    797   Object** end_slot =
    798       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    799   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    800 
    801   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
    802   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    803       JSFunction::kPrototypeOrInitialMapOffset);
    804 
    805   start_slot =
    806       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    807   end_slot =
    808       HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    809   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    810 }
    811 
    812 
    813 template<typename StaticVisitor>
    814 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
    815     Heap* heap, HeapObject* object) {
    816   Object** start_slot =
    817       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
    818   Object** end_slot =
    819       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
    820   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    821 
    822   // Skip visiting kCodeEntryOffset as it is treated weakly here.
    823   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
    824       JSFunction::kPrototypeOrInitialMapOffset);
    825 
    826   start_slot =
    827       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
    828   end_slot =
    829       HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
    830   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
    831 }
    832 
    833 
    834 void Code::CodeIterateBody(ObjectVisitor* v) {
    835   int mode_mask = RelocInfo::kCodeTargetMask |
    836                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    837                   RelocInfo::ModeMask(RelocInfo::CELL) |
    838                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    839                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    840                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    841                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    842 
    843   // There are two places where we iterate code bodies: here and the
    844   // templated CodeIterateBody (below). They should be kept in sync.
    845   IteratePointer(v, kRelocationInfoOffset);
    846   IteratePointer(v, kHandlerTableOffset);
    847   IteratePointer(v, kDeoptimizationDataOffset);
    848   IteratePointer(v, kTypeFeedbackInfoOffset);
    849 
    850   RelocIterator it(this, mode_mask);
    851   for (; !it.done(); it.next()) {
    852     it.rinfo()->Visit(v);
    853   }
    854 }
    855 
    856 
    857 template<typename StaticVisitor>
    858 void Code::CodeIterateBody(Heap* heap) {
    859   int mode_mask = RelocInfo::kCodeTargetMask |
    860                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
    861                   RelocInfo::ModeMask(RelocInfo::CELL) |
    862                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
    863                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
    864                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
    865                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
    866 
    867   // There are two places where we iterate code bodies: here and the non-
    868   // templated CodeIterateBody (above). They should be kept in sync.
    869   StaticVisitor::VisitPointer(
    870       heap,
    871       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
    872   StaticVisitor::VisitPointer(
    873       heap,
    874       reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
    875   StaticVisitor::VisitPointer(
    876       heap,
    877       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
    878   StaticVisitor::VisitPointer(
    879       heap,
    880       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
    881 
    882   RelocIterator it(this, mode_mask);
    883   for (; !it.done(); it.next()) {
    884     it.rinfo()->template Visit<StaticVisitor>(heap);
    885   }
    886 }
    887 
    888 
    889 } }  // namespace v8::internal
    890 
    891 #endif  // V8_OBJECTS_VISITING_INL_H_
    892