Home | History | Annotate | Download | only in snapshot
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/snapshot/startup-serializer.h"
      6 
      7 #include "src/objects-inl.h"
      8 #include "src/v8threads.h"
      9 
     10 namespace v8 {
     11 namespace internal {
     12 
     13 StartupSerializer::StartupSerializer(
     14     Isolate* isolate,
     15     v8::SnapshotCreator::FunctionCodeHandling function_code_handling)
     16     : Serializer(isolate),
     17       clear_function_code_(function_code_handling ==
     18                            v8::SnapshotCreator::FunctionCodeHandling::kClear),
     19       serializing_builtins_(false) {
     20   InitializeCodeAddressMap();
     21 }
     22 
     23 StartupSerializer::~StartupSerializer() {
     24   RestoreExternalReferenceRedirectors(&accessor_infos_);
     25   OutputStatistics("StartupSerializer");
     26 }
     27 
     28 void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
     29                                         WhereToPoint where_to_point, int skip) {
     30   DCHECK(!obj->IsJSFunction());
     31 
     32   if (clear_function_code_) {
     33     if (obj->IsCode()) {
     34       Code* code = Code::cast(obj);
     35       // If the function code is compiled (either as native code or bytecode),
     36       // replace it with lazy-compile builtin. Only exception is when we are
     37       // serializing the canonical interpreter-entry-trampoline builtin.
     38       if (code->kind() == Code::FUNCTION ||
     39           (!serializing_builtins_ &&
     40            code->is_interpreter_trampoline_builtin())) {
     41         obj = isolate()->builtins()->builtin(Builtins::kCompileLazy);
     42       }
     43     } else if (obj->IsBytecodeArray()) {
     44       obj = isolate()->heap()->undefined_value();
     45     }
     46   } else if (obj->IsCode()) {
     47     Code* code = Code::cast(obj);
     48     if (code->kind() == Code::FUNCTION) {
     49       code->ClearInlineCaches();
     50       code->set_profiler_ticks(0);
     51     }
     52   }
     53 
     54   if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
     55 
     56   int root_index = root_index_map_.Lookup(obj);
     57   // We can only encode roots as such if it has already been serialized.
     58   // That applies to root indices below the wave front.
     59   if (root_index != RootIndexMap::kInvalidRootIndex) {
     60     if (root_has_been_serialized_.test(root_index)) {
     61       PutRoot(root_index, obj, how_to_code, where_to_point, skip);
     62       return;
     63     }
     64   }
     65 
     66   if (SerializeBackReference(obj, how_to_code, where_to_point, skip)) return;
     67 
     68   FlushSkip(skip);
     69 
     70   if (isolate_->external_reference_redirector() && obj->IsAccessorInfo()) {
     71     // Wipe external reference redirects in the accessor info.
     72     AccessorInfo* info = AccessorInfo::cast(obj);
     73     Address original_address = Foreign::cast(info->getter())->foreign_address();
     74     Foreign::cast(info->js_getter())->set_foreign_address(original_address);
     75     accessor_infos_.Add(info);
     76   }
     77 
     78   // Object has not yet been serialized.  Serialize it here.
     79   ObjectSerializer object_serializer(this, obj, &sink_, how_to_code,
     80                                      where_to_point);
     81   object_serializer.Serialize();
     82 
     83   if (serializing_immortal_immovables_roots_ &&
     84       root_index != RootIndexMap::kInvalidRootIndex) {
     85     // Make sure that the immortal immovable root has been included in the first
     86     // chunk of its reserved space , so that it is deserialized onto the first
     87     // page of its space and stays immortal immovable.
     88     SerializerReference ref = reference_map_.Lookup(obj);
     89     CHECK(ref.is_back_reference() && ref.chunk_index() == 0);
     90   }
     91 }
     92 
     93 void StartupSerializer::SerializeWeakReferencesAndDeferred() {
     94   // This comes right after serialization of the partial snapshot, where we
     95   // add entries to the partial snapshot cache of the startup snapshot. Add
     96   // one entry with 'undefined' to terminate the partial snapshot cache.
     97   Object* undefined = isolate()->heap()->undefined_value();
     98   VisitPointer(&undefined);
     99   isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
    100   SerializeDeferredObjects();
    101   Pad();
    102 }
    103 
    104 int StartupSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
    105   int index;
    106   if (!partial_cache_index_map_.LookupOrInsert(heap_object, &index)) {
    107     // This object is not part of the partial snapshot cache yet. Add it to the
    108     // startup snapshot so we can refer to it via partial snapshot index from
    109     // the partial snapshot.
    110     VisitPointer(reinterpret_cast<Object**>(&heap_object));
    111   }
    112   return index;
    113 }
    114 
    115 void StartupSerializer::Synchronize(VisitorSynchronization::SyncTag tag) {
    116   // We expect the builtins tag after builtins have been serialized.
    117   DCHECK(!serializing_builtins_ || tag == VisitorSynchronization::kBuiltins);
    118   serializing_builtins_ = (tag == VisitorSynchronization::kHandleScope);
    119   sink_.Put(kSynchronize, "Synchronize");
    120 }
    121 
    122 void StartupSerializer::SerializeStrongReferences() {
    123   Isolate* isolate = this->isolate();
    124   // No active threads.
    125   CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
    126   // No active or weak handles.
    127   CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
    128   CHECK_EQ(0, isolate->global_handles()->global_handles_count());
    129   CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
    130   // First visit immortal immovables to make sure they end up in the first page.
    131   serializing_immortal_immovables_roots_ = true;
    132   isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST);
    133   // Check that immortal immovable roots are allocated on the first page.
    134   CHECK(HasNotExceededFirstPageOfEachSpace());
    135   serializing_immortal_immovables_roots_ = false;
    136   // Visit the rest of the strong roots.
    137   // Clear the stack limits to make the snapshot reproducible.
    138   // Reset it again afterwards.
    139   isolate->heap()->ClearStackLimits();
    140   isolate->heap()->IterateSmiRoots(this);
    141   isolate->heap()->SetStackLimits();
    142 
    143   isolate->heap()->IterateStrongRoots(this,
    144                                       VISIT_ONLY_STRONG_FOR_SERIALIZATION);
    145 }
    146 
    147 void StartupSerializer::VisitPointers(Object** start, Object** end) {
    148   if (start == isolate()->heap()->roots_array_start()) {
    149     // Serializing the root list needs special handling:
    150     // - The first pass over the root list only serializes immortal immovables.
    151     // - The second pass over the root list serializes the rest.
    152     // - Only root list elements that have been fully serialized can be
    153     //   referenced via as root by using kRootArray bytecodes.
    154     int skip = 0;
    155     for (Object** current = start; current < end; current++) {
    156       int root_index = static_cast<int>(current - start);
    157       if (RootShouldBeSkipped(root_index)) {
    158         skip += kPointerSize;
    159         continue;
    160       } else {
    161         if ((*current)->IsSmi()) {
    162           FlushSkip(skip);
    163           PutSmi(Smi::cast(*current));
    164         } else {
    165           SerializeObject(HeapObject::cast(*current), kPlain, kStartOfObject,
    166                           skip);
    167         }
    168         root_has_been_serialized_.set(root_index);
    169         skip = 0;
    170       }
    171     }
    172     FlushSkip(skip);
    173   } else {
    174     Serializer::VisitPointers(start, end);
    175   }
    176 }
    177 
    178 bool StartupSerializer::RootShouldBeSkipped(int root_index) {
    179   if (root_index == Heap::kStackLimitRootIndex ||
    180       root_index == Heap::kRealStackLimitRootIndex) {
    181     return true;
    182   }
    183   return Heap::RootIsImmortalImmovable(root_index) !=
    184          serializing_immortal_immovables_roots_;
    185 }
    186 
    187 }  // namespace internal
    188 }  // namespace v8
    189