Home | History | Annotate | Download | only in src
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/deoptimizer.h"
      6 
      7 #include <memory>
      8 
      9 #include "src/accessors.h"
     10 #include "src/ast/prettyprinter.h"
     11 #include "src/codegen.h"
     12 #include "src/disasm.h"
     13 #include "src/frames-inl.h"
     14 #include "src/full-codegen/full-codegen.h"
     15 #include "src/global-handles.h"
     16 #include "src/interpreter/interpreter.h"
     17 #include "src/macro-assembler.h"
     18 #include "src/tracing/trace-event.h"
     19 #include "src/v8.h"
     20 
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
     26   return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
     27                                   MemoryAllocator::GetCommitPageSize(),
     28                                   EXECUTABLE, NULL);
     29 }
     30 
     31 
     32 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
     33     : allocator_(allocator),
     34       current_(NULL) {
     35   for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
     36     deopt_entry_code_entries_[i] = -1;
     37     deopt_entry_code_[i] = AllocateCodeChunk(allocator);
     38   }
     39 }
     40 
     41 
     42 DeoptimizerData::~DeoptimizerData() {
     43   for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
     44     allocator_->Free<MemoryAllocator::kFull>(deopt_entry_code_[i]);
     45     deopt_entry_code_[i] = NULL;
     46   }
     47 }
     48 
     49 
     50 Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
     51   if (function_->IsHeapObject()) {
     52     // Search all deoptimizing code in the native context of the function.
     53     Isolate* isolate = function_->GetIsolate();
     54     Context* native_context = function_->context()->native_context();
     55     Object* element = native_context->DeoptimizedCodeListHead();
     56     while (!element->IsUndefined(isolate)) {
     57       Code* code = Code::cast(element);
     58       CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
     59       if (code->contains(addr)) return code;
     60       element = code->next_code_link();
     61     }
     62   }
     63   return NULL;
     64 }
     65 
     66 
     67 // We rely on this function not causing a GC.  It is called from generated code
     68 // without having a real stack frame in place.
     69 Deoptimizer* Deoptimizer::New(JSFunction* function,
     70                               BailoutType type,
     71                               unsigned bailout_id,
     72                               Address from,
     73                               int fp_to_sp_delta,
     74                               Isolate* isolate) {
     75   Deoptimizer* deoptimizer = new Deoptimizer(isolate, function, type,
     76                                              bailout_id, from, fp_to_sp_delta);
     77   CHECK(isolate->deoptimizer_data()->current_ == NULL);
     78   isolate->deoptimizer_data()->current_ = deoptimizer;
     79   return deoptimizer;
     80 }
     81 
     82 
     83 // No larger than 2K on all platforms
     84 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
     85 
     86 
     87 size_t Deoptimizer::GetMaxDeoptTableSize() {
     88   int entries_size =
     89       Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
     90   int commit_page_size = static_cast<int>(MemoryAllocator::GetCommitPageSize());
     91   int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
     92                     commit_page_size) + 1;
     93   return static_cast<size_t>(commit_page_size * page_count);
     94 }
     95 
     96 
     97 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
     98   Deoptimizer* result = isolate->deoptimizer_data()->current_;
     99   CHECK_NOT_NULL(result);
    100   result->DeleteFrameDescriptions();
    101   isolate->deoptimizer_data()->current_ = NULL;
    102   return result;
    103 }
    104 
    105 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
    106     JavaScriptFrame* frame,
    107     int jsframe_index,
    108     Isolate* isolate) {
    109   CHECK(frame->is_optimized());
    110 
    111   TranslatedState translated_values(frame);
    112   translated_values.Prepare(false, frame->fp());
    113 
    114   TranslatedState::iterator frame_it = translated_values.end();
    115   int counter = jsframe_index;
    116   for (auto it = translated_values.begin(); it != translated_values.end();
    117        it++) {
    118     if (it->kind() == TranslatedFrame::kFunction ||
    119         it->kind() == TranslatedFrame::kInterpretedFunction) {
    120       if (counter == 0) {
    121         frame_it = it;
    122         break;
    123       }
    124       counter--;
    125     }
    126   }
    127   CHECK(frame_it != translated_values.end());
    128 
    129   DeoptimizedFrameInfo* info =
    130       new DeoptimizedFrameInfo(&translated_values, frame_it, isolate);
    131 
    132   return info;
    133 }
    134 
    135 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
    136                                                 int count,
    137                                                 BailoutType type) {
    138   TableEntryGenerator generator(masm, type, count);
    139   generator.Generate();
    140 }
    141 
    142 void Deoptimizer::VisitAllOptimizedFunctionsForContext(
    143     Context* context, OptimizedFunctionVisitor* visitor) {
    144   DisallowHeapAllocation no_allocation;
    145 
    146   CHECK(context->IsNativeContext());
    147 
    148   visitor->EnterContext(context);
    149 
    150   // Visit the list of optimized functions, removing elements that
    151   // no longer refer to optimized code.
    152   JSFunction* prev = NULL;
    153   Object* element = context->OptimizedFunctionsListHead();
    154   Isolate* isolate = context->GetIsolate();
    155   while (!element->IsUndefined(isolate)) {
    156     JSFunction* function = JSFunction::cast(element);
    157     Object* next = function->next_function_link();
    158     if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
    159         (visitor->VisitFunction(function),
    160          function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
    161       // The function no longer refers to optimized code, or the visitor
    162       // changed the code to which it refers to no longer be optimized code.
    163       // Remove the function from this list.
    164       if (prev != NULL) {
    165         prev->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
    166       } else {
    167         context->SetOptimizedFunctionsListHead(next);
    168       }
    169       // The visitor should not alter the link directly.
    170       CHECK_EQ(function->next_function_link(), next);
    171       // Set the next function link to undefined to indicate it is no longer
    172       // in the optimized functions list.
    173       function->set_next_function_link(context->GetHeap()->undefined_value(),
    174                                        SKIP_WRITE_BARRIER);
    175     } else {
    176       // The visitor should not alter the link directly.
    177       CHECK_EQ(function->next_function_link(), next);
    178       // preserve this element.
    179       prev = function;
    180     }
    181     element = next;
    182   }
    183 
    184   visitor->LeaveContext(context);
    185 }
    186 
    187 
    188 void Deoptimizer::VisitAllOptimizedFunctions(
    189     Isolate* isolate,
    190     OptimizedFunctionVisitor* visitor) {
    191   DisallowHeapAllocation no_allocation;
    192 
    193   // Run through the list of all native contexts.
    194   Object* context = isolate->heap()->native_contexts_list();
    195   while (!context->IsUndefined(isolate)) {
    196     VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
    197     context = Context::cast(context)->next_context_link();
    198   }
    199 }
    200 
    201 
    202 // Unlink functions referring to code marked for deoptimization, then move
    203 // marked code from the optimized code list to the deoptimized code list,
    204 // and patch code for lazy deopt.
    205 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
    206   DisallowHeapAllocation no_allocation;
    207 
    208   // A "closure" that unlinks optimized code that is going to be
    209   // deoptimized from the functions that refer to it.
    210   class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
    211    public:
    212     virtual void EnterContext(Context* context) { }  // Don't care.
    213     virtual void LeaveContext(Context* context)  { }  // Don't care.
    214     virtual void VisitFunction(JSFunction* function) {
    215       Code* code = function->code();
    216       if (!code->marked_for_deoptimization()) return;
    217 
    218       // Unlink this function and evict from optimized code map.
    219       SharedFunctionInfo* shared = function->shared();
    220       function->set_code(shared->code());
    221 
    222       if (FLAG_trace_deopt) {
    223         CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
    224         PrintF(scope.file(), "[deoptimizer unlinked: ");
    225         function->PrintName(scope.file());
    226         PrintF(scope.file(),
    227                " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
    228       }
    229     }
    230   };
    231 
    232   // Unlink all functions that refer to marked code.
    233   SelectedCodeUnlinker unlinker;
    234   VisitAllOptimizedFunctionsForContext(context, &unlinker);
    235 
    236   Isolate* isolate = context->GetHeap()->isolate();
    237 #ifdef DEBUG
    238   Code* topmost_optimized_code = NULL;
    239   bool safe_to_deopt_topmost_optimized_code = false;
    240   // Make sure all activations of optimized code can deopt at their current PC.
    241   // The topmost optimized code has special handling because it cannot be
    242   // deoptimized due to weak object dependency.
    243   for (StackFrameIterator it(isolate, isolate->thread_local_top());
    244        !it.done(); it.Advance()) {
    245     StackFrame::Type type = it.frame()->type();
    246     if (type == StackFrame::OPTIMIZED) {
    247       Code* code = it.frame()->LookupCode();
    248       JSFunction* function =
    249           static_cast<OptimizedFrame*>(it.frame())->function();
    250       if (FLAG_trace_deopt) {
    251         CodeTracer::Scope scope(isolate->GetCodeTracer());
    252         PrintF(scope.file(), "[deoptimizer found activation of function: ");
    253         function->PrintName(scope.file());
    254         PrintF(scope.file(),
    255                " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
    256       }
    257       SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
    258       int deopt_index = safepoint.deoptimization_index();
    259       // Turbofan deopt is checked when we are patching addresses on stack.
    260       bool turbofanned =
    261           code->is_turbofanned() && function->shared()->asm_function();
    262       bool safe_to_deopt =
    263           deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
    264       bool builtin = code->kind() == Code::BUILTIN;
    265       CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned ||
    266             builtin);
    267       if (topmost_optimized_code == NULL) {
    268         topmost_optimized_code = code;
    269         safe_to_deopt_topmost_optimized_code = safe_to_deopt;
    270       }
    271     }
    272   }
    273 #endif
    274 
    275   // Move marked code from the optimized code list to the deoptimized
    276   // code list, collecting them into a ZoneList.
    277   Zone zone(isolate->allocator(), ZONE_NAME);
    278   ZoneList<Code*> codes(10, &zone);
    279 
    280   // Walk over all optimized code objects in this native context.
    281   Code* prev = NULL;
    282   Object* element = context->OptimizedCodeListHead();
    283   while (!element->IsUndefined(isolate)) {
    284     Code* code = Code::cast(element);
    285     CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
    286     Object* next = code->next_code_link();
    287 
    288     if (code->marked_for_deoptimization()) {
    289       // Put the code into the list for later patching.
    290       codes.Add(code, &zone);
    291 
    292       if (prev != NULL) {
    293         // Skip this code in the optimized code list.
    294         prev->set_next_code_link(next);
    295       } else {
    296         // There was no previous node, the next node is the new head.
    297         context->SetOptimizedCodeListHead(next);
    298       }
    299 
    300       // Move the code to the _deoptimized_ code list.
    301       code->set_next_code_link(context->DeoptimizedCodeListHead());
    302       context->SetDeoptimizedCodeListHead(code);
    303     } else {
    304       // Not marked; preserve this element.
    305       prev = code;
    306     }
    307     element = next;
    308   }
    309 
    310   // We need a handle scope only because of the macro assembler,
    311   // which is used in code patching in EnsureCodeForDeoptimizationEntry.
    312   HandleScope scope(isolate);
    313 
    314   // Now patch all the codes for deoptimization.
    315   for (int i = 0; i < codes.length(); i++) {
    316 #ifdef DEBUG
    317     if (codes[i] == topmost_optimized_code) {
    318       DCHECK(safe_to_deopt_topmost_optimized_code);
    319     }
    320 #endif
    321     // It is finally time to die, code object.
    322 
    323     // Remove the code from optimized code map.
    324     DeoptimizationInputData* deopt_data =
    325         DeoptimizationInputData::cast(codes[i]->deoptimization_data());
    326     SharedFunctionInfo* shared =
    327         SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo());
    328     shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
    329 
    330     // Do platform-specific patching to force any activations to lazy deopt.
    331     PatchCodeForDeoptimization(isolate, codes[i]);
    332 
    333     // We might be in the middle of incremental marking with compaction.
    334     // Tell collector to treat this code object in a special way and
    335     // ignore all slots that might have been recorded on it.
    336     isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
    337   }
    338 }
    339 
    340 
    341 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
    342   RuntimeCallTimerScope runtimeTimer(isolate,
    343                                      &RuntimeCallStats::DeoptimizeCode);
    344   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
    345   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
    346   if (FLAG_trace_deopt) {
    347     CodeTracer::Scope scope(isolate->GetCodeTracer());
    348     PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
    349   }
    350   DisallowHeapAllocation no_allocation;
    351   // For all contexts, mark all code, then deoptimize.
    352   Object* context = isolate->heap()->native_contexts_list();
    353   while (!context->IsUndefined(isolate)) {
    354     Context* native_context = Context::cast(context);
    355     MarkAllCodeForContext(native_context);
    356     DeoptimizeMarkedCodeForContext(native_context);
    357     context = native_context->next_context_link();
    358   }
    359 }
    360 
    361 
    362 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
    363   RuntimeCallTimerScope runtimeTimer(isolate,
    364                                      &RuntimeCallStats::DeoptimizeCode);
    365   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
    366   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
    367   if (FLAG_trace_deopt) {
    368     CodeTracer::Scope scope(isolate->GetCodeTracer());
    369     PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
    370   }
    371   DisallowHeapAllocation no_allocation;
    372   // For all contexts, deoptimize code already marked.
    373   Object* context = isolate->heap()->native_contexts_list();
    374   while (!context->IsUndefined(isolate)) {
    375     Context* native_context = Context::cast(context);
    376     DeoptimizeMarkedCodeForContext(native_context);
    377     context = native_context->next_context_link();
    378   }
    379 }
    380 
    381 
    382 void Deoptimizer::MarkAllCodeForContext(Context* context) {
    383   Object* element = context->OptimizedCodeListHead();
    384   Isolate* isolate = context->GetIsolate();
    385   while (!element->IsUndefined(isolate)) {
    386     Code* code = Code::cast(element);
    387     CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
    388     code->set_marked_for_deoptimization(true);
    389     element = code->next_code_link();
    390   }
    391 }
    392 
    393 void Deoptimizer::DeoptimizeFunction(JSFunction* function, Code* code) {
    394   Isolate* isolate = function->GetIsolate();
    395   RuntimeCallTimerScope runtimeTimer(isolate,
    396                                      &RuntimeCallStats::DeoptimizeCode);
    397   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
    398   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
    399   if (code == nullptr) code = function->code();
    400   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
    401     // Mark the code for deoptimization and unlink any functions that also
    402     // refer to that code. The code cannot be shared across native contexts,
    403     // so we only need to search one.
    404     code->set_marked_for_deoptimization(true);
    405     DeoptimizeMarkedCodeForContext(function->context()->native_context());
    406   }
    407 }
    408 
    409 
    410 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
    411   deoptimizer->DoComputeOutputFrames();
    412 }
    413 
    414 bool Deoptimizer::TraceEnabledFor(StackFrame::Type frame_type) {
    415   return (frame_type == StackFrame::STUB) ? FLAG_trace_stub_failures
    416                                           : FLAG_trace_deopt;
    417 }
    418 
    419 
    420 const char* Deoptimizer::MessageFor(BailoutType type) {
    421   switch (type) {
    422     case EAGER: return "eager";
    423     case SOFT: return "soft";
    424     case LAZY: return "lazy";
    425   }
    426   FATAL("Unsupported deopt type");
    427   return NULL;
    428 }
    429 
    430 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
    431                          BailoutType type, unsigned bailout_id, Address from,
    432                          int fp_to_sp_delta)
    433     : isolate_(isolate),
    434       function_(function),
    435       bailout_id_(bailout_id),
    436       bailout_type_(type),
    437       from_(from),
    438       fp_to_sp_delta_(fp_to_sp_delta),
    439       deoptimizing_throw_(false),
    440       catch_handler_data_(-1),
    441       catch_handler_pc_offset_(-1),
    442       input_(nullptr),
    443       output_count_(0),
    444       jsframe_count_(0),
    445       output_(nullptr),
    446       caller_frame_top_(0),
    447       caller_fp_(0),
    448       caller_pc_(0),
    449       caller_constant_pool_(0),
    450       input_frame_context_(0),
    451       stack_fp_(0),
    452       trace_scope_(nullptr) {
    453   if (isolate->deoptimizer_lazy_throw()) {
    454     isolate->set_deoptimizer_lazy_throw(false);
    455     deoptimizing_throw_ = true;
    456   }
    457 
    458   // For COMPILED_STUBs called from builtins, the function pointer is a SMI
    459   // indicating an internal frame.
    460   if (function->IsSmi()) {
    461     function = nullptr;
    462   }
    463   DCHECK(from != nullptr);
    464   if (function != nullptr && function->IsOptimized()) {
    465     function->shared()->increment_deopt_count();
    466     if (bailout_type_ == Deoptimizer::SOFT) {
    467       isolate->counters()->soft_deopts_executed()->Increment();
    468       // Soft deopts shouldn't count against the overall re-optimization count
    469       // that can eventually lead to disabling optimization for a function.
    470       int opt_count = function->shared()->opt_count();
    471       if (opt_count > 0) opt_count--;
    472       function->shared()->set_opt_count(opt_count);
    473     }
    474   }
    475   compiled_code_ = FindOptimizedCode(function);
    476 #if DEBUG
    477   DCHECK(compiled_code_ != NULL);
    478   if (type == EAGER || type == SOFT || type == LAZY) {
    479     DCHECK(compiled_code_->kind() != Code::FUNCTION);
    480   }
    481 #endif
    482 
    483   StackFrame::Type frame_type = function == NULL
    484       ? StackFrame::STUB
    485       : StackFrame::JAVA_SCRIPT;
    486   trace_scope_ = TraceEnabledFor(frame_type)
    487                      ? new CodeTracer::Scope(isolate->GetCodeTracer())
    488                      : NULL;
    489 #ifdef DEBUG
    490   CHECK(AllowHeapAllocation::IsAllowed());
    491   disallow_heap_allocation_ = new DisallowHeapAllocation();
    492 #endif  // DEBUG
    493   if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
    494     PROFILE(isolate_, CodeDeoptEvent(compiled_code_, from_, fp_to_sp_delta_));
    495   }
    496   unsigned size = ComputeInputFrameSize();
    497   int parameter_count =
    498       function == nullptr
    499           ? 0
    500           : (function->shared()->internal_formal_parameter_count() + 1);
    501   input_ = new (size) FrameDescription(size, parameter_count);
    502   input_->SetFrameType(frame_type);
    503 }
    504 
    505 Code* Deoptimizer::FindOptimizedCode(JSFunction* function) {
    506   Code* compiled_code = FindDeoptimizingCode(from_);
    507   return (compiled_code == NULL)
    508              ? static_cast<Code*>(isolate_->FindCodeObject(from_))
    509              : compiled_code;
    510 }
    511 
    512 
    513 void Deoptimizer::PrintFunctionName() {
    514   if (function_ != nullptr && function_->IsJSFunction()) {
    515     function_->ShortPrint(trace_scope_->file());
    516   } else {
    517     PrintF(trace_scope_->file(),
    518            "%s", Code::Kind2String(compiled_code_->kind()));
    519   }
    520 }
    521 
    522 
    523 Deoptimizer::~Deoptimizer() {
    524   DCHECK(input_ == NULL && output_ == NULL);
    525   DCHECK(disallow_heap_allocation_ == NULL);
    526   delete trace_scope_;
    527 }
    528 
    529 
    530 void Deoptimizer::DeleteFrameDescriptions() {
    531   delete input_;
    532   for (int i = 0; i < output_count_; ++i) {
    533     if (output_[i] != input_) delete output_[i];
    534   }
    535   delete[] output_;
    536   input_ = NULL;
    537   output_ = NULL;
    538 #ifdef DEBUG
    539   CHECK(!AllowHeapAllocation::IsAllowed());
    540   CHECK(disallow_heap_allocation_ != NULL);
    541   delete disallow_heap_allocation_;
    542   disallow_heap_allocation_ = NULL;
    543 #endif  // DEBUG
    544 }
    545 
    546 
    547 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
    548                                             int id,
    549                                             BailoutType type,
    550                                             GetEntryMode mode) {
    551   CHECK_GE(id, 0);
    552   if (id >= kMaxNumberOfEntries) return NULL;
    553   if (mode == ENSURE_ENTRY_CODE) {
    554     EnsureCodeForDeoptimizationEntry(isolate, type, id);
    555   } else {
    556     CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
    557   }
    558   DeoptimizerData* data = isolate->deoptimizer_data();
    559   CHECK_LE(type, kLastBailoutType);
    560   MemoryChunk* base = data->deopt_entry_code_[type];
    561   return base->area_start() + (id * table_entry_size_);
    562 }
    563 
    564 
    565 int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
    566                                      Address addr,
    567                                      BailoutType type) {
    568   DeoptimizerData* data = isolate->deoptimizer_data();
    569   MemoryChunk* base = data->deopt_entry_code_[type];
    570   Address start = base->area_start();
    571   if (addr < start ||
    572       addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
    573     return kNotDeoptimizationEntry;
    574   }
    575   DCHECK_EQ(0,
    576             static_cast<int>(addr - start) % table_entry_size_);
    577   return static_cast<int>(addr - start) / table_entry_size_;
    578 }
    579 
    580 
    581 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
    582                                BailoutId id,
    583                                SharedFunctionInfo* shared) {
    584   // TODO(kasperl): For now, we do a simple linear search for the PC
    585   // offset associated with the given node id. This should probably be
    586   // changed to a binary search.
    587   int length = data->DeoptPoints();
    588   for (int i = 0; i < length; i++) {
    589     if (data->AstId(i) == id) {
    590       return data->PcAndState(i)->value();
    591     }
    592   }
    593   OFStream os(stderr);
    594   os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n"
    595      << "[method: " << shared->DebugName()->ToCString().get() << "]\n"
    596      << "[source:\n" << SourceCodeOf(shared) << "\n]" << std::endl;
    597 
    598   shared->GetHeap()->isolate()->PushStackTraceAndDie(0xfefefefe, data, shared,
    599                                                      0xfefefeff);
    600   FATAL("unable to find pc offset during deoptimization");
    601   return -1;
    602 }
    603 
    604 
    605 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
    606   int length = 0;
    607   // Count all entries in the deoptimizing code list of every context.
    608   Object* context = isolate->heap()->native_contexts_list();
    609   while (!context->IsUndefined(isolate)) {
    610     Context* native_context = Context::cast(context);
    611     Object* element = native_context->DeoptimizedCodeListHead();
    612     while (!element->IsUndefined(isolate)) {
    613       Code* code = Code::cast(element);
    614       DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
    615       length++;
    616       element = code->next_code_link();
    617     }
    618     context = Context::cast(context)->next_context_link();
    619   }
    620   return length;
    621 }
    622 
    623 namespace {
    624 
    625 int LookupCatchHandler(TranslatedFrame* translated_frame, int* data_out) {
    626   switch (translated_frame->kind()) {
    627     case TranslatedFrame::kFunction: {
    628 #ifdef DEBUG
    629       JSFunction* function =
    630           JSFunction::cast(translated_frame->begin()->GetRawValue());
    631       Code* non_optimized_code = function->shared()->code();
    632       HandlerTable* table =
    633           HandlerTable::cast(non_optimized_code->handler_table());
    634       DCHECK_EQ(0, table->NumberOfRangeEntries());
    635 #endif
    636       break;
    637     }
    638     case TranslatedFrame::kInterpretedFunction: {
    639       int bytecode_offset = translated_frame->node_id().ToInt();
    640       JSFunction* function =
    641           JSFunction::cast(translated_frame->begin()->GetRawValue());
    642       BytecodeArray* bytecode = function->shared()->bytecode_array();
    643       HandlerTable* table = HandlerTable::cast(bytecode->handler_table());
    644       return table->LookupRange(bytecode_offset, data_out, nullptr);
    645     }
    646     default:
    647       break;
    648   }
    649   return -1;
    650 }
    651 
    652 }  // namespace
    653 
    654 // We rely on this function not causing a GC.  It is called from generated code
    655 // without having a real stack frame in place.
    656 void Deoptimizer::DoComputeOutputFrames() {
    657   base::ElapsedTimer timer;
    658 
    659   // Determine basic deoptimization information.  The optimized frame is
    660   // described by the input data.
    661   DeoptimizationInputData* input_data =
    662       DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
    663 
    664   {
    665     // Read caller's PC, caller's FP and caller's constant pool values
    666     // from input frame. Compute caller's frame top address.
    667 
    668     Register fp_reg = JavaScriptFrame::fp_register();
    669     stack_fp_ = input_->GetRegister(fp_reg.code());
    670 
    671     caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize();
    672 
    673     Address fp_address = input_->GetFramePointerAddress();
    674     caller_fp_ = Memory::intptr_at(fp_address);
    675     caller_pc_ =
    676         Memory::intptr_at(fp_address + CommonFrameConstants::kCallerPCOffset);
    677     input_frame_context_ = Memory::intptr_at(
    678         fp_address + CommonFrameConstants::kContextOrFrameTypeOffset);
    679 
    680     if (FLAG_enable_embedded_constant_pool) {
    681       caller_constant_pool_ = Memory::intptr_at(
    682           fp_address + CommonFrameConstants::kConstantPoolOffset);
    683     }
    684   }
    685 
    686   if (trace_scope_ != NULL) {
    687     timer.Start();
    688     PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ",
    689            MessageFor(bailout_type_));
    690     PrintFunctionName();
    691     PrintF(trace_scope_->file(),
    692            " (opt #%d) @%d, FP to SP delta: %d, caller sp: 0x%08" V8PRIxPTR
    693            "]\n",
    694            input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_,
    695            caller_frame_top_);
    696     if (bailout_type_ == EAGER || bailout_type_ == SOFT ||
    697         (compiled_code_->is_hydrogen_stub())) {
    698       compiled_code_->PrintDeoptLocation(trace_scope_->file(), from_);
    699     }
    700   }
    701 
    702   BailoutId node_id = input_data->AstId(bailout_id_);
    703   ByteArray* translations = input_data->TranslationByteArray();
    704   unsigned translation_index =
    705       input_data->TranslationIndex(bailout_id_)->value();
    706 
    707   TranslationIterator state_iterator(translations, translation_index);
    708   translated_state_.Init(
    709       input_->GetFramePointerAddress(), &state_iterator,
    710       input_data->LiteralArray(), input_->GetRegisterValues(),
    711       trace_scope_ == nullptr ? nullptr : trace_scope_->file());
    712 
    713   // Do the input frame to output frame(s) translation.
    714   size_t count = translated_state_.frames().size();
    715   // If we are supposed to go to the catch handler, find the catching frame
    716   // for the catch and make sure we only deoptimize upto that frame.
    717   if (deoptimizing_throw_) {
    718     size_t catch_handler_frame_index = count;
    719     for (size_t i = count; i-- > 0;) {
    720       catch_handler_pc_offset_ = LookupCatchHandler(
    721           &(translated_state_.frames()[i]), &catch_handler_data_);
    722       if (catch_handler_pc_offset_ >= 0) {
    723         catch_handler_frame_index = i;
    724         break;
    725       }
    726     }
    727     CHECK_LT(catch_handler_frame_index, count);
    728     count = catch_handler_frame_index + 1;
    729   }
    730 
    731   DCHECK(output_ == NULL);
    732   output_ = new FrameDescription*[count];
    733   for (size_t i = 0; i < count; ++i) {
    734     output_[i] = NULL;
    735   }
    736   output_count_ = static_cast<int>(count);
    737 
    738   // Translate each output frame.
    739   int frame_index = 0;  // output_frame_index
    740   for (size_t i = 0; i < count; ++i, ++frame_index) {
    741     // Read the ast node id, function, and frame height for this output frame.
    742     TranslatedFrame* translated_frame = &(translated_state_.frames()[i]);
    743     switch (translated_frame->kind()) {
    744       case TranslatedFrame::kFunction:
    745         DoComputeJSFrame(translated_frame, frame_index,
    746                          deoptimizing_throw_ && i == count - 1);
    747         jsframe_count_++;
    748         break;
    749       case TranslatedFrame::kInterpretedFunction:
    750         DoComputeInterpretedFrame(translated_frame, frame_index,
    751                                   deoptimizing_throw_ && i == count - 1);
    752         jsframe_count_++;
    753         break;
    754       case TranslatedFrame::kArgumentsAdaptor:
    755         DoComputeArgumentsAdaptorFrame(translated_frame, frame_index);
    756         break;
    757       case TranslatedFrame::kTailCallerFunction:
    758         DoComputeTailCallerFrame(translated_frame, frame_index);
    759         // Tail caller frame translations do not produce output frames.
    760         frame_index--;
    761         output_count_--;
    762         break;
    763       case TranslatedFrame::kConstructStub:
    764         DoComputeConstructStubFrame(translated_frame, frame_index);
    765         break;
    766       case TranslatedFrame::kGetter:
    767         DoComputeAccessorStubFrame(translated_frame, frame_index, false);
    768         break;
    769       case TranslatedFrame::kSetter:
    770         DoComputeAccessorStubFrame(translated_frame, frame_index, true);
    771         break;
    772       case TranslatedFrame::kCompiledStub:
    773         DoComputeCompiledStubFrame(translated_frame, frame_index);
    774         break;
    775       case TranslatedFrame::kInvalid:
    776         FATAL("invalid frame");
    777         break;
    778     }
    779   }
    780 
    781   // Print some helpful diagnostic information.
    782   if (trace_scope_ != NULL) {
    783     double ms = timer.Elapsed().InMillisecondsF();
    784     int index = output_count_ - 1;  // Index of the topmost frame.
    785     PrintF(trace_scope_->file(), "[deoptimizing (%s): end ",
    786            MessageFor(bailout_type_));
    787     PrintFunctionName();
    788     PrintF(trace_scope_->file(),
    789            " @%d => node=%d, pc=0x%08" V8PRIxPTR ", caller sp=0x%08" V8PRIxPTR
    790            ", state=%s, took %0.3f ms]\n",
    791            bailout_id_, node_id.ToInt(), output_[index]->GetPc(),
    792            caller_frame_top_, BailoutStateToString(static_cast<BailoutState>(
    793                                   output_[index]->GetState()->value())),
    794            ms);
    795   }
    796 }
    797 
    798 void Deoptimizer::DoComputeJSFrame(TranslatedFrame* translated_frame,
    799                                    int frame_index, bool goto_catch_handler) {
    800   SharedFunctionInfo* shared = translated_frame->raw_shared_info();
    801 
    802   TranslatedFrame::iterator value_iterator = translated_frame->begin();
    803   bool is_bottommost = (0 == frame_index);
    804   bool is_topmost = (output_count_ - 1 == frame_index);
    805   int input_index = 0;
    806 
    807   BailoutId node_id = translated_frame->node_id();
    808   unsigned height =
    809       translated_frame->height() - 1;  // Do not count the context.
    810   unsigned height_in_bytes = height * kPointerSize;
    811   if (goto_catch_handler) {
    812     // Take the stack height from the handler table.
    813     height = catch_handler_data_;
    814     // We also make space for the exception itself.
    815     height_in_bytes = (height + 1) * kPointerSize;
    816     CHECK(is_topmost);
    817   }
    818 
    819   JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
    820   value_iterator++;
    821   input_index++;
    822   if (trace_scope_ != NULL) {
    823     PrintF(trace_scope_->file(), "  translating frame ");
    824     std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
    825     PrintF(trace_scope_->file(), "%s", name.get());
    826     PrintF(trace_scope_->file(), " => node=%d, height=%d%s\n", node_id.ToInt(),
    827            height_in_bytes, goto_catch_handler ? " (throw)" : "");
    828   }
    829 
    830   // The 'fixed' part of the frame consists of the incoming parameters and
    831   // the part described by JavaScriptFrameConstants.
    832   unsigned fixed_frame_size = ComputeJavascriptFixedSize(shared);
    833   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
    834 
    835   // Allocate and store the output frame description.
    836   int parameter_count = shared->internal_formal_parameter_count() + 1;
    837   FrameDescription* output_frame = new (output_frame_size)
    838       FrameDescription(output_frame_size, parameter_count);
    839   output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
    840 
    841   CHECK(frame_index >= 0 && frame_index < output_count_);
    842   CHECK_NULL(output_[frame_index]);
    843   output_[frame_index] = output_frame;
    844 
    845   // The top address of the frame is computed from the previous frame's top and
    846   // this frame's size.
    847   intptr_t top_address;
    848   if (is_bottommost) {
    849     top_address = caller_frame_top_ - output_frame_size;
    850   } else {
    851     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
    852   }
    853   output_frame->SetTop(top_address);
    854 
    855   // Compute the incoming parameter translation.
    856   unsigned output_offset = output_frame_size;
    857   for (int i = 0; i < parameter_count; ++i) {
    858     output_offset -= kPointerSize;
    859     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
    860                                  output_offset);
    861   }
    862 
    863   if (trace_scope_ != nullptr) {
    864     PrintF(trace_scope_->file(), "    -------------------------\n");
    865   }
    866 
    867   // There are no translation commands for the caller's pc and fp, the
    868   // context, and the function.  Synthesize their values and set them up
    869   // explicitly.
    870   //
    871   // The caller's pc for the bottommost output frame is the same as in the
    872   // input frame.  For all subsequent output frames, it can be read from the
    873   // previous one.  This frame's pc can be computed from the non-optimized
    874   // function code and AST id of the bailout.
    875   output_offset -= kPCOnStackSize;
    876   intptr_t value;
    877   if (is_bottommost) {
    878     value = caller_pc_;
    879   } else {
    880     value = output_[frame_index - 1]->GetPc();
    881   }
    882   output_frame->SetCallerPc(output_offset, value);
    883   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
    884 
    885   // The caller's frame pointer for the bottommost output frame is the same
    886   // as in the input frame.  For all subsequent output frames, it can be
    887   // read from the previous one.  Also compute and set this frame's frame
    888   // pointer.
    889   output_offset -= kFPOnStackSize;
    890   if (is_bottommost) {
    891     value = caller_fp_;
    892   } else {
    893     value = output_[frame_index - 1]->GetFp();
    894   }
    895   output_frame->SetCallerFp(output_offset, value);
    896   intptr_t fp_value = top_address + output_offset;
    897   output_frame->SetFp(fp_value);
    898   if (is_topmost) {
    899     Register fp_reg = JavaScriptFrame::fp_register();
    900     output_frame->SetRegister(fp_reg.code(), fp_value);
    901   }
    902   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
    903 
    904   if (FLAG_enable_embedded_constant_pool) {
    905     // For the bottommost output frame the constant pool pointer can be gotten
    906     // from the input frame. For subsequent output frames, it can be read from
    907     // the previous frame.
    908     output_offset -= kPointerSize;
    909     if (is_bottommost) {
    910       value = caller_constant_pool_;
    911     } else {
    912       value = output_[frame_index - 1]->GetConstantPool();
    913     }
    914     output_frame->SetCallerConstantPool(output_offset, value);
    915     DebugPrintOutputSlot(value, frame_index, output_offset,
    916                          "caller's constant_pool\n");
    917   }
    918 
    919   // For the bottommost output frame the context can be gotten from the input
    920   // frame. For all subsequent output frames it can be gotten from the function
    921   // so long as we don't inline functions that need local contexts.
    922   output_offset -= kPointerSize;
    923 
    924   // When deoptimizing into a catch block, we need to take the context
    925   // from just above the top of the operand stack (we push the context
    926   // at the entry of the try block).
    927   TranslatedFrame::iterator context_pos = value_iterator;
    928   int context_input_index = input_index;
    929   if (goto_catch_handler) {
    930     for (unsigned i = 0; i < height + 1; ++i) {
    931       context_pos++;
    932       context_input_index++;
    933     }
    934   }
    935   // Read the context from the translations.
    936   Object* context = context_pos->GetRawValue();
    937   if (context->IsUndefined(isolate_)) {
    938     // If the context was optimized away, just use the context from
    939     // the activation. This should only apply to Crankshaft code.
    940     CHECK(!compiled_code_->is_turbofanned());
    941     context = is_bottommost ? reinterpret_cast<Object*>(input_frame_context_)
    942                             : function->context();
    943   }
    944   value = reinterpret_cast<intptr_t>(context);
    945   output_frame->SetContext(value);
    946   WriteValueToOutput(context, context_input_index, frame_index, output_offset,
    947                      "context    ");
    948   if (context == isolate_->heap()->arguments_marker()) {
    949     Address output_address =
    950         reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
    951         output_offset;
    952     values_to_materialize_.push_back({output_address, context_pos});
    953   }
    954   value_iterator++;
    955   input_index++;
    956 
    957   // The function was mentioned explicitly in the BEGIN_FRAME.
    958   output_offset -= kPointerSize;
    959   value = reinterpret_cast<intptr_t>(function);
    960   WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
    961 
    962   if (trace_scope_ != nullptr) {
    963     PrintF(trace_scope_->file(), "    -------------------------\n");
    964   }
    965 
    966   // Translate the rest of the frame.
    967   for (unsigned i = 0; i < height; ++i) {
    968     output_offset -= kPointerSize;
    969     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
    970                                  output_offset);
    971   }
    972   if (goto_catch_handler) {
    973     // Write out the exception for the catch handler.
    974     output_offset -= kPointerSize;
    975     Object* exception_obj = reinterpret_cast<Object*>(
    976         input_->GetRegister(FullCodeGenerator::result_register().code()));
    977     WriteValueToOutput(exception_obj, input_index, frame_index, output_offset,
    978                        "exception   ");
    979     input_index++;
    980   }
    981   CHECK_EQ(0u, output_offset);
    982 
    983   // Update constant pool.
    984   Code* non_optimized_code = shared->code();
    985   if (FLAG_enable_embedded_constant_pool) {
    986     intptr_t constant_pool_value =
    987         reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
    988     output_frame->SetConstantPool(constant_pool_value);
    989     if (is_topmost) {
    990       Register constant_pool_reg =
    991           JavaScriptFrame::constant_pool_pointer_register();
    992       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
    993     }
    994   }
    995 
    996   // Compute this frame's PC and state.
    997   FixedArray* raw_data = non_optimized_code->deoptimization_data();
    998   DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
    999   Address start = non_optimized_code->instruction_start();
   1000   unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
   1001   unsigned pc_offset = goto_catch_handler
   1002                            ? catch_handler_pc_offset_
   1003                            : FullCodeGenerator::PcField::decode(pc_and_state);
   1004   intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
   1005   output_frame->SetPc(pc_value);
   1006 
   1007   // If we are going to the catch handler, then the exception lives in
   1008   // the accumulator.
   1009   BailoutState state =
   1010       goto_catch_handler
   1011           ? BailoutState::TOS_REGISTER
   1012           : FullCodeGenerator::BailoutStateField::decode(pc_and_state);
   1013   output_frame->SetState(Smi::FromInt(static_cast<int>(state)));
   1014 
   1015   // Clear the context register. The context might be a de-materialized object
   1016   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
   1017   // safety we use Smi(0) instead of the potential {arguments_marker} here.
   1018   if (is_topmost) {
   1019     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
   1020     Register context_reg = JavaScriptFrame::context_register();
   1021     output_frame->SetRegister(context_reg.code(), context_value);
   1022   }
   1023 
   1024   // Set the continuation for the topmost frame.
   1025   if (is_topmost) {
   1026     Builtins* builtins = isolate_->builtins();
   1027     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
   1028     if (bailout_type_ == LAZY) {
   1029       continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
   1030     } else if (bailout_type_ == SOFT) {
   1031       continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
   1032     } else {
   1033       CHECK_EQ(bailout_type_, EAGER);
   1034     }
   1035     output_frame->SetContinuation(
   1036         reinterpret_cast<intptr_t>(continuation->entry()));
   1037   }
   1038 }
   1039 
   1040 void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
   1041                                             int frame_index,
   1042                                             bool goto_catch_handler) {
   1043   SharedFunctionInfo* shared = translated_frame->raw_shared_info();
   1044 
   1045   TranslatedFrame::iterator value_iterator = translated_frame->begin();
   1046   bool is_bottommost = (0 == frame_index);
   1047   bool is_topmost = (output_count_ - 1 == frame_index);
   1048   int input_index = 0;
   1049 
   1050   int bytecode_offset = translated_frame->node_id().ToInt();
   1051   unsigned height = translated_frame->height();
   1052   unsigned height_in_bytes = height * kPointerSize;
   1053 
   1054   // All tranlations for interpreted frames contain the accumulator and hence
   1055   // are assumed to be in bailout state {BailoutState::TOS_REGISTER}. However
   1056   // such a state is only supported for the topmost frame. We need to skip
   1057   // pushing the accumulator for any non-topmost frame.
   1058   if (!is_topmost) height_in_bytes -= kPointerSize;
   1059 
   1060   JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
   1061   value_iterator++;
   1062   input_index++;
   1063   if (trace_scope_ != NULL) {
   1064     PrintF(trace_scope_->file(), "  translating interpreted frame ");
   1065     std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
   1066     PrintF(trace_scope_->file(), "%s", name.get());
   1067     PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n",
   1068            bytecode_offset, height_in_bytes,
   1069            goto_catch_handler ? " (throw)" : "");
   1070   }
   1071   if (goto_catch_handler) {
   1072     bytecode_offset = catch_handler_pc_offset_;
   1073   }
   1074 
   1075   // The 'fixed' part of the frame consists of the incoming parameters and
   1076   // the part described by InterpreterFrameConstants.
   1077   unsigned fixed_frame_size = ComputeInterpretedFixedSize(shared);
   1078   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1079 
   1080   // Allocate and store the output frame description.
   1081   int parameter_count = shared->internal_formal_parameter_count() + 1;
   1082   FrameDescription* output_frame = new (output_frame_size)
   1083       FrameDescription(output_frame_size, parameter_count);
   1084   output_frame->SetFrameType(StackFrame::INTERPRETED);
   1085 
   1086   CHECK(frame_index >= 0 && frame_index < output_count_);
   1087   CHECK_NULL(output_[frame_index]);
   1088   output_[frame_index] = output_frame;
   1089 
   1090   // The top address of the frame is computed from the previous frame's top and
   1091   // this frame's size.
   1092   intptr_t top_address;
   1093   if (is_bottommost) {
   1094     top_address = caller_frame_top_ - output_frame_size;
   1095   } else {
   1096     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1097   }
   1098   output_frame->SetTop(top_address);
   1099 
   1100   // Compute the incoming parameter translation.
   1101   unsigned output_offset = output_frame_size;
   1102   for (int i = 0; i < parameter_count; ++i) {
   1103     output_offset -= kPointerSize;
   1104     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
   1105                                  output_offset);
   1106   }
   1107 
   1108   if (trace_scope_ != nullptr) {
   1109     PrintF(trace_scope_->file(), "    -------------------------\n");
   1110   }
   1111 
   1112   // There are no translation commands for the caller's pc and fp, the
   1113   // context, the function, new.target and the bytecode offset.  Synthesize
   1114   // their values and set them up
   1115   // explicitly.
   1116   //
   1117   // The caller's pc for the bottommost output frame is the same as in the
   1118   // input frame.  For all subsequent output frames, it can be read from the
   1119   // previous one.  This frame's pc can be computed from the non-optimized
   1120   // function code and AST id of the bailout.
   1121   output_offset -= kPCOnStackSize;
   1122   intptr_t value;
   1123   if (is_bottommost) {
   1124     value = caller_pc_;
   1125   } else {
   1126     value = output_[frame_index - 1]->GetPc();
   1127   }
   1128   output_frame->SetCallerPc(output_offset, value);
   1129   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
   1130 
   1131   // The caller's frame pointer for the bottommost output frame is the same
   1132   // as in the input frame.  For all subsequent output frames, it can be
   1133   // read from the previous one.  Also compute and set this frame's frame
   1134   // pointer.
   1135   output_offset -= kFPOnStackSize;
   1136   if (is_bottommost) {
   1137     value = caller_fp_;
   1138   } else {
   1139     value = output_[frame_index - 1]->GetFp();
   1140   }
   1141   output_frame->SetCallerFp(output_offset, value);
   1142   intptr_t fp_value = top_address + output_offset;
   1143   output_frame->SetFp(fp_value);
   1144   if (is_topmost) {
   1145     Register fp_reg = InterpretedFrame::fp_register();
   1146     output_frame->SetRegister(fp_reg.code(), fp_value);
   1147   }
   1148   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
   1149 
   1150   if (FLAG_enable_embedded_constant_pool) {
   1151     // For the bottommost output frame the constant pool pointer can be gotten
   1152     // from the input frame. For subsequent output frames, it can be read from
   1153     // the previous frame.
   1154     output_offset -= kPointerSize;
   1155     if (is_bottommost) {
   1156       value = caller_constant_pool_;
   1157     } else {
   1158       value = output_[frame_index - 1]->GetConstantPool();
   1159     }
   1160     output_frame->SetCallerConstantPool(output_offset, value);
   1161     DebugPrintOutputSlot(value, frame_index, output_offset,
   1162                          "caller's constant_pool\n");
   1163   }
   1164 
   1165   // For the bottommost output frame the context can be gotten from the input
   1166   // frame. For all subsequent output frames it can be gotten from the function
   1167   // so long as we don't inline functions that need local contexts.
   1168   output_offset -= kPointerSize;
   1169 
   1170   // When deoptimizing into a catch block, we need to take the context
   1171   // from a register that was specified in the handler table.
   1172   TranslatedFrame::iterator context_pos = value_iterator;
   1173   int context_input_index = input_index;
   1174   if (goto_catch_handler) {
   1175     // Skip to the translated value of the register specified
   1176     // in the handler table.
   1177     for (int i = 0; i < catch_handler_data_ + 1; ++i) {
   1178       context_pos++;
   1179       context_input_index++;
   1180     }
   1181   }
   1182   // Read the context from the translations.
   1183   Object* context = context_pos->GetRawValue();
   1184   value = reinterpret_cast<intptr_t>(context);
   1185   output_frame->SetContext(value);
   1186   WriteValueToOutput(context, context_input_index, frame_index, output_offset,
   1187                      "context    ");
   1188   if (context == isolate_->heap()->arguments_marker()) {
   1189     Address output_address =
   1190         reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
   1191         output_offset;
   1192     values_to_materialize_.push_back({output_address, context_pos});
   1193   }
   1194   value_iterator++;
   1195   input_index++;
   1196 
   1197   // The function was mentioned explicitly in the BEGIN_FRAME.
   1198   output_offset -= kPointerSize;
   1199   value = reinterpret_cast<intptr_t>(function);
   1200   WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
   1201 
   1202   // The new.target slot is only used during function activiation which is
   1203   // before the first deopt point, so should never be needed. Just set it to
   1204   // undefined.
   1205   output_offset -= kPointerSize;
   1206   Object* new_target = isolate_->heap()->undefined_value();
   1207   WriteValueToOutput(new_target, 0, frame_index, output_offset, "new_target  ");
   1208 
   1209   // Set the bytecode array pointer.
   1210   output_offset -= kPointerSize;
   1211   Object* bytecode_array = shared->HasDebugInfo()
   1212                                ? shared->GetDebugInfo()->DebugBytecodeArray()
   1213                                : shared->bytecode_array();
   1214   WriteValueToOutput(bytecode_array, 0, frame_index, output_offset,
   1215                      "bytecode array ");
   1216 
   1217   // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
   1218   output_offset -= kPointerSize;
   1219   int raw_bytecode_offset =
   1220       BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset;
   1221   Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
   1222   WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset,
   1223                      "bytecode offset ");
   1224 
   1225   if (trace_scope_ != nullptr) {
   1226     PrintF(trace_scope_->file(), "    -------------------------\n");
   1227   }
   1228 
   1229   // Translate the rest of the interpreter registers in the frame.
   1230   for (unsigned i = 0; i < height - 1; ++i) {
   1231     output_offset -= kPointerSize;
   1232     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
   1233                                  output_offset);
   1234   }
   1235 
   1236   // Translate the accumulator register (depending on frame position).
   1237   if (is_topmost) {
   1238     // For topmost frame, put the accumulator on the stack. The bailout state
   1239     // for interpreted frames is always set to {BailoutState::TOS_REGISTER} and
   1240     // the {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
   1241     // after materialization).
   1242     output_offset -= kPointerSize;
   1243     if (goto_catch_handler) {
   1244       // If we are lazy deopting to a catch handler, we set the accumulator to
   1245       // the exception (which lives in the result register).
   1246       intptr_t accumulator_value =
   1247           input_->GetRegister(FullCodeGenerator::result_register().code());
   1248       WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0,
   1249                          frame_index, output_offset, "accumulator ");
   1250       value_iterator++;
   1251     } else {
   1252       WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
   1253                                    output_offset, "accumulator ");
   1254     }
   1255   } else {
   1256     // For non-topmost frames, skip the accumulator translation. For those
   1257     // frames, the return value from the callee will become the accumulator.
   1258     value_iterator++;
   1259     input_index++;
   1260   }
   1261   CHECK_EQ(0u, output_offset);
   1262 
   1263   // Compute this frame's PC and state. The PC will be a special builtin that
   1264   // continues the bytecode dispatch. Note that non-topmost and lazy-style
   1265   // bailout handlers also advance the bytecode offset before dispatch, hence
   1266   // simulating what normal handlers do upon completion of the operation.
   1267   Builtins* builtins = isolate_->builtins();
   1268   Code* dispatch_builtin =
   1269       (!is_topmost || (bailout_type_ == LAZY)) && !goto_catch_handler
   1270           ? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
   1271           : builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
   1272   output_frame->SetPc(reinterpret_cast<intptr_t>(dispatch_builtin->entry()));
   1273   // Restore accumulator (TOS) register.
   1274   output_frame->SetState(
   1275       Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
   1276 
   1277   // Update constant pool.
   1278   if (FLAG_enable_embedded_constant_pool) {
   1279     intptr_t constant_pool_value =
   1280         reinterpret_cast<intptr_t>(dispatch_builtin->constant_pool());
   1281     output_frame->SetConstantPool(constant_pool_value);
   1282     if (is_topmost) {
   1283       Register constant_pool_reg =
   1284           InterpretedFrame::constant_pool_pointer_register();
   1285       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
   1286     }
   1287   }
   1288 
   1289   // Clear the context register. The context might be a de-materialized object
   1290   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
   1291   // safety we use Smi(0) instead of the potential {arguments_marker} here.
   1292   if (is_topmost) {
   1293     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
   1294     Register context_reg = JavaScriptFrame::context_register();
   1295     output_frame->SetRegister(context_reg.code(), context_value);
   1296   }
   1297 
   1298   // Set the continuation for the topmost frame.
   1299   if (is_topmost) {
   1300     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
   1301     if (bailout_type_ == LAZY) {
   1302       continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
   1303     } else if (bailout_type_ == SOFT) {
   1304       continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
   1305     } else {
   1306       CHECK_EQ(bailout_type_, EAGER);
   1307     }
   1308     output_frame->SetContinuation(
   1309         reinterpret_cast<intptr_t>(continuation->entry()));
   1310   }
   1311 }
   1312 
   1313 void Deoptimizer::DoComputeArgumentsAdaptorFrame(
   1314     TranslatedFrame* translated_frame, int frame_index) {
   1315   TranslatedFrame::iterator value_iterator = translated_frame->begin();
   1316   bool is_bottommost = (0 == frame_index);
   1317   int input_index = 0;
   1318 
   1319   unsigned height = translated_frame->height();
   1320   unsigned height_in_bytes = height * kPointerSize;
   1321   JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
   1322   value_iterator++;
   1323   input_index++;
   1324   if (trace_scope_ != NULL) {
   1325     PrintF(trace_scope_->file(),
   1326            "  translating arguments adaptor => height=%d\n", height_in_bytes);
   1327   }
   1328 
   1329   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFixedFrameSize;
   1330   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1331 
   1332   // Allocate and store the output frame description.
   1333   int parameter_count = height;
   1334   FrameDescription* output_frame = new (output_frame_size)
   1335       FrameDescription(output_frame_size, parameter_count);
   1336   output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
   1337 
   1338   // Arguments adaptor can not be topmost.
   1339   CHECK(frame_index < output_count_ - 1);
   1340   CHECK(output_[frame_index] == NULL);
   1341   output_[frame_index] = output_frame;
   1342 
   1343   // The top address of the frame is computed from the previous frame's top and
   1344   // this frame's size.
   1345   intptr_t top_address;
   1346   if (is_bottommost) {
   1347     top_address = caller_frame_top_ - output_frame_size;
   1348   } else {
   1349     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1350   }
   1351   output_frame->SetTop(top_address);
   1352 
   1353   // Compute the incoming parameter translation.
   1354   unsigned output_offset = output_frame_size;
   1355   for (int i = 0; i < parameter_count; ++i) {
   1356     output_offset -= kPointerSize;
   1357     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
   1358                                  output_offset);
   1359   }
   1360 
   1361   // Read caller's PC from the previous frame.
   1362   output_offset -= kPCOnStackSize;
   1363   intptr_t value;
   1364   if (is_bottommost) {
   1365     value = caller_pc_;
   1366   } else {
   1367     value = output_[frame_index - 1]->GetPc();
   1368   }
   1369   output_frame->SetCallerPc(output_offset, value);
   1370   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
   1371 
   1372   // Read caller's FP from the previous frame, and set this frame's FP.
   1373   output_offset -= kFPOnStackSize;
   1374   if (is_bottommost) {
   1375     value = caller_fp_;
   1376   } else {
   1377     value = output_[frame_index - 1]->GetFp();
   1378   }
   1379   output_frame->SetCallerFp(output_offset, value);
   1380   intptr_t fp_value = top_address + output_offset;
   1381   output_frame->SetFp(fp_value);
   1382   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
   1383 
   1384   if (FLAG_enable_embedded_constant_pool) {
   1385     // Read the caller's constant pool from the previous frame.
   1386     output_offset -= kPointerSize;
   1387     if (is_bottommost) {
   1388       value = caller_constant_pool_;
   1389     } else {
   1390       value = output_[frame_index - 1]->GetConstantPool();
   1391     }
   1392     output_frame->SetCallerConstantPool(output_offset, value);
   1393     DebugPrintOutputSlot(value, frame_index, output_offset,
   1394                          "caller's constant_pool\n");
   1395   }
   1396 
   1397   // A marker value is used in place of the context.
   1398   output_offset -= kPointerSize;
   1399   intptr_t context = StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR);
   1400   output_frame->SetFrameSlot(output_offset, context);
   1401   DebugPrintOutputSlot(context, frame_index, output_offset,
   1402                        "context (adaptor sentinel)\n");
   1403 
   1404   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
   1405   output_offset -= kPointerSize;
   1406   value = reinterpret_cast<intptr_t>(function);
   1407   WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
   1408 
   1409   // Number of incoming arguments.
   1410   output_offset -= kPointerSize;
   1411   value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
   1412   output_frame->SetFrameSlot(output_offset, value);
   1413   DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
   1414   if (trace_scope_ != nullptr) {
   1415     PrintF(trace_scope_->file(), "(%d)\n", height - 1);
   1416   }
   1417 
   1418   DCHECK(0 == output_offset);
   1419 
   1420   Builtins* builtins = isolate_->builtins();
   1421   Code* adaptor_trampoline =
   1422       builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
   1423   intptr_t pc_value = reinterpret_cast<intptr_t>(
   1424       adaptor_trampoline->instruction_start() +
   1425       isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
   1426   output_frame->SetPc(pc_value);
   1427   if (FLAG_enable_embedded_constant_pool) {
   1428     intptr_t constant_pool_value =
   1429         reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
   1430     output_frame->SetConstantPool(constant_pool_value);
   1431   }
   1432 }
   1433 
   1434 void Deoptimizer::DoComputeTailCallerFrame(TranslatedFrame* translated_frame,
   1435                                            int frame_index) {
   1436   SharedFunctionInfo* shared = translated_frame->raw_shared_info();
   1437 
   1438   bool is_bottommost = (0 == frame_index);
   1439   // Tail caller frame can't be topmost.
   1440   CHECK_NE(output_count_ - 1, frame_index);
   1441 
   1442   if (trace_scope_ != NULL) {
   1443     PrintF(trace_scope_->file(), "  translating tail caller frame ");
   1444     std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
   1445     PrintF(trace_scope_->file(), "%s\n", name.get());
   1446   }
   1447 
   1448   if (!is_bottommost) return;
   1449 
   1450   // Drop arguments adaptor frame below current frame if it exsits.
   1451   Address fp_address = input_->GetFramePointerAddress();
   1452   Address adaptor_fp_address =
   1453       Memory::Address_at(fp_address + CommonFrameConstants::kCallerFPOffset);
   1454 
   1455   if (StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR) !=
   1456       Memory::intptr_at(adaptor_fp_address +
   1457                         CommonFrameConstants::kContextOrFrameTypeOffset)) {
   1458     return;
   1459   }
   1460 
   1461   int caller_params_count =
   1462       Smi::cast(
   1463           Memory::Object_at(adaptor_fp_address +
   1464                             ArgumentsAdaptorFrameConstants::kLengthOffset))
   1465           ->value();
   1466 
   1467   int callee_params_count =
   1468       function_->shared()->internal_formal_parameter_count();
   1469 
   1470   // Both caller and callee parameters count do not include receiver.
   1471   int offset = (caller_params_count - callee_params_count) * kPointerSize;
   1472   intptr_t new_stack_fp =
   1473       reinterpret_cast<intptr_t>(adaptor_fp_address) + offset;
   1474 
   1475   intptr_t new_caller_frame_top = new_stack_fp +
   1476                                   (callee_params_count + 1) * kPointerSize +
   1477                                   CommonFrameConstants::kFixedFrameSizeAboveFp;
   1478 
   1479   intptr_t adaptor_caller_pc = Memory::intptr_at(
   1480       adaptor_fp_address + CommonFrameConstants::kCallerPCOffset);
   1481   intptr_t adaptor_caller_fp = Memory::intptr_at(
   1482       adaptor_fp_address + CommonFrameConstants::kCallerFPOffset);
   1483 
   1484   if (trace_scope_ != NULL) {
   1485     PrintF(trace_scope_->file(),
   1486            "    dropping caller arguments adaptor frame: offset=%d, "
   1487            "fp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR
   1488            ", "
   1489            "caller sp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR "\n",
   1490            offset, stack_fp_, new_stack_fp, caller_frame_top_,
   1491            new_caller_frame_top);
   1492   }
   1493   caller_frame_top_ = new_caller_frame_top;
   1494   caller_fp_ = adaptor_caller_fp;
   1495   caller_pc_ = adaptor_caller_pc;
   1496 }
   1497 
   1498 void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
   1499                                               int frame_index) {
   1500   TranslatedFrame::iterator value_iterator = translated_frame->begin();
   1501   bool is_topmost = (output_count_ - 1 == frame_index);
   1502   // The construct frame could become topmost only if we inlined a constructor
   1503   // call which does a tail call (otherwise the tail callee's frame would be
   1504   // the topmost one). So it could only be the LAZY case.
   1505   CHECK(!is_topmost || bailout_type_ == LAZY);
   1506   int input_index = 0;
   1507 
   1508   Builtins* builtins = isolate_->builtins();
   1509   Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
   1510   BailoutId bailout_id = translated_frame->node_id();
   1511   unsigned height = translated_frame->height();
   1512   unsigned height_in_bytes = height * kPointerSize;
   1513 
   1514   // If the construct frame appears to be topmost we should ensure that the
   1515   // value of result register is preserved during continuation execution.
   1516   // We do this here by "pushing" the result of the constructor function to the
   1517   // top of the reconstructed stack and then using the
   1518   // BailoutState::TOS_REGISTER machinery.
   1519   if (is_topmost) {
   1520     height_in_bytes += kPointerSize;
   1521   }
   1522 
   1523   JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
   1524   value_iterator++;
   1525   input_index++;
   1526   if (trace_scope_ != NULL) {
   1527     PrintF(trace_scope_->file(),
   1528            "  translating construct stub => bailout_id=%d (%s), height=%d\n",
   1529            bailout_id.ToInt(),
   1530            bailout_id == BailoutId::ConstructStubCreate() ? "create" : "invoke",
   1531            height_in_bytes);
   1532   }
   1533 
   1534   unsigned fixed_frame_size = ConstructFrameConstants::kFixedFrameSize;
   1535   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1536 
   1537   // Allocate and store the output frame description.
   1538   FrameDescription* output_frame =
   1539       new (output_frame_size) FrameDescription(output_frame_size);
   1540   output_frame->SetFrameType(StackFrame::CONSTRUCT);
   1541 
   1542   // Construct stub can not be topmost.
   1543   DCHECK(frame_index > 0 && frame_index < output_count_);
   1544   DCHECK(output_[frame_index] == NULL);
   1545   output_[frame_index] = output_frame;
   1546 
   1547   // The top address of the frame is computed from the previous frame's top and
   1548   // this frame's size.
   1549   intptr_t top_address;
   1550   top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1551   output_frame->SetTop(top_address);
   1552 
   1553   // Compute the incoming parameter translation.
   1554   int parameter_count = height;
   1555   unsigned output_offset = output_frame_size;
   1556   for (int i = 0; i < parameter_count; ++i) {
   1557     output_offset -= kPointerSize;
   1558     // The allocated receiver of a construct stub frame is passed as the
   1559     // receiver parameter through the translation. It might be encoding
   1560     // a captured object, override the slot address for a captured object.
   1561     WriteTranslatedValueToOutput(
   1562         &value_iterator, &input_index, frame_index, output_offset, nullptr,
   1563         (i == 0) ? reinterpret_cast<Address>(top_address) : nullptr);
   1564   }
   1565 
   1566   // Read caller's PC from the previous frame.
   1567   output_offset -= kPCOnStackSize;
   1568   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   1569   output_frame->SetCallerPc(output_offset, callers_pc);
   1570   DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
   1571 
   1572   // Read caller's FP from the previous frame, and set this frame's FP.
   1573   output_offset -= kFPOnStackSize;
   1574   intptr_t value = output_[frame_index - 1]->GetFp();
   1575   output_frame->SetCallerFp(output_offset, value);
   1576   intptr_t fp_value = top_address + output_offset;
   1577   output_frame->SetFp(fp_value);
   1578   if (is_topmost) {
   1579     Register fp_reg = JavaScriptFrame::fp_register();
   1580     output_frame->SetRegister(fp_reg.code(), fp_value);
   1581   }
   1582   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
   1583 
   1584   if (FLAG_enable_embedded_constant_pool) {
   1585     // Read the caller's constant pool from the previous frame.
   1586     output_offset -= kPointerSize;
   1587     value = output_[frame_index - 1]->GetConstantPool();
   1588     output_frame->SetCallerConstantPool(output_offset, value);
   1589     DebugPrintOutputSlot(value, frame_index, output_offset,
   1590                          "caller's constant_pool\n");
   1591   }
   1592 
   1593   // A marker value is used to mark the frame.
   1594   output_offset -= kPointerSize;
   1595   value = StackFrame::TypeToMarker(StackFrame::CONSTRUCT);
   1596   output_frame->SetFrameSlot(output_offset, value);
   1597   DebugPrintOutputSlot(value, frame_index, output_offset,
   1598                        "typed frame marker\n");
   1599 
   1600   // The context can be gotten from the previous frame.
   1601   output_offset -= kPointerSize;
   1602   value = output_[frame_index - 1]->GetContext();
   1603   output_frame->SetFrameSlot(output_offset, value);
   1604   DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
   1605 
   1606   // Number of incoming arguments.
   1607   output_offset -= kPointerSize;
   1608   value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
   1609   output_frame->SetFrameSlot(output_offset, value);
   1610   DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
   1611   if (trace_scope_ != nullptr) {
   1612     PrintF(trace_scope_->file(), "(%d)\n", height - 1);
   1613   }
   1614 
   1615   if (bailout_id == BailoutId::ConstructStubCreate()) {
   1616     // The function was mentioned explicitly in the CONSTRUCT_STUB_FRAME.
   1617     output_offset -= kPointerSize;
   1618     value = reinterpret_cast<intptr_t>(function);
   1619     WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
   1620   } else {
   1621     DCHECK(bailout_id == BailoutId::ConstructStubInvoke());
   1622     // The newly allocated object was passed as receiver in the artificial
   1623     // constructor stub environment created by HEnvironment::CopyForInlining().
   1624     output_offset -= kPointerSize;
   1625     value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
   1626     output_frame->SetFrameSlot(output_offset, value);
   1627     DebugPrintOutputSlot(value, frame_index, output_offset,
   1628                          "allocated receiver\n");
   1629   }
   1630 
   1631   if (is_topmost) {
   1632     // Ensure the result is restored back when we return to the stub.
   1633     output_offset -= kPointerSize;
   1634     Register result_reg = FullCodeGenerator::result_register();
   1635     value = input_->GetRegister(result_reg.code());
   1636     output_frame->SetFrameSlot(output_offset, value);
   1637     DebugPrintOutputSlot(value, frame_index, output_offset,
   1638                          "constructor result\n");
   1639 
   1640     output_frame->SetState(
   1641         Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
   1642   }
   1643 
   1644   CHECK_EQ(0u, output_offset);
   1645 
   1646   // Compute this frame's PC.
   1647   DCHECK(bailout_id.IsValidForConstructStub());
   1648   Address start = construct_stub->instruction_start();
   1649   int pc_offset =
   1650       bailout_id == BailoutId::ConstructStubCreate()
   1651           ? isolate_->heap()->construct_stub_create_deopt_pc_offset()->value()
   1652           : isolate_->heap()->construct_stub_invoke_deopt_pc_offset()->value();
   1653   intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
   1654   output_frame->SetPc(pc_value);
   1655 
   1656   // Update constant pool.
   1657   if (FLAG_enable_embedded_constant_pool) {
   1658     intptr_t constant_pool_value =
   1659         reinterpret_cast<intptr_t>(construct_stub->constant_pool());
   1660     output_frame->SetConstantPool(constant_pool_value);
   1661     if (is_topmost) {
   1662       Register constant_pool_reg =
   1663           JavaScriptFrame::constant_pool_pointer_register();
   1664       output_frame->SetRegister(constant_pool_reg.code(), fp_value);
   1665     }
   1666   }
   1667 
   1668   // Clear the context register. The context might be a de-materialized object
   1669   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
   1670   // safety we use Smi(0) instead of the potential {arguments_marker} here.
   1671   if (is_topmost) {
   1672     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
   1673     Register context_reg = JavaScriptFrame::context_register();
   1674     output_frame->SetRegister(context_reg.code(), context_value);
   1675   }
   1676 
   1677   // Set the continuation for the topmost frame.
   1678   if (is_topmost) {
   1679     Builtins* builtins = isolate_->builtins();
   1680     DCHECK_EQ(LAZY, bailout_type_);
   1681     Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
   1682     output_frame->SetContinuation(
   1683         reinterpret_cast<intptr_t>(continuation->entry()));
   1684   }
   1685 }
   1686 
   1687 void Deoptimizer::DoComputeAccessorStubFrame(TranslatedFrame* translated_frame,
   1688                                              int frame_index,
   1689                                              bool is_setter_stub_frame) {
   1690   TranslatedFrame::iterator value_iterator = translated_frame->begin();
   1691   bool is_topmost = (output_count_ - 1 == frame_index);
   1692   // The accessor frame could become topmost only if we inlined an accessor
   1693   // call which does a tail call (otherwise the tail callee's frame would be
   1694   // the topmost one). So it could only be the LAZY case.
   1695   CHECK(!is_topmost || bailout_type_ == LAZY);
   1696   int input_index = 0;
   1697 
   1698   // Skip accessor.
   1699   value_iterator++;
   1700   input_index++;
   1701   // The receiver (and the implicit return value, if any) are expected in
   1702   // registers by the LoadIC/StoreIC, so they don't belong to the output stack
   1703   // frame. This means that we have to use a height of 0.
   1704   unsigned height = 0;
   1705   unsigned height_in_bytes = height * kPointerSize;
   1706 
   1707   // If the accessor frame appears to be topmost we should ensure that the
   1708   // value of result register is preserved during continuation execution.
   1709   // We do this here by "pushing" the result of the accessor function to the
   1710   // top of the reconstructed stack and then using the
   1711   // BailoutState::TOS_REGISTER machinery.
   1712   // We don't need to restore the result in case of a setter call because we
   1713   // have to return the stored value but not the result of the setter function.
   1714   bool should_preserve_result = is_topmost && !is_setter_stub_frame;
   1715   if (should_preserve_result) {
   1716     height_in_bytes += kPointerSize;
   1717   }
   1718 
   1719   const char* kind = is_setter_stub_frame ? "setter" : "getter";
   1720   if (trace_scope_ != NULL) {
   1721     PrintF(trace_scope_->file(),
   1722            "  translating %s stub => height=%u\n", kind, height_in_bytes);
   1723   }
   1724 
   1725   // We need 1 stack entry for the return address and enough entries for the
   1726   // StackFrame::INTERNAL (FP, frame type, context, code object and constant
   1727   // pool (if enabled)- see MacroAssembler::EnterFrame).
   1728   // For a setter stub frame we need one additional entry for the implicit
   1729   // return value, see StoreStubCompiler::CompileStoreViaSetter.
   1730   unsigned fixed_frame_entries =
   1731       (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
   1732       (is_setter_stub_frame ? 1 : 0);
   1733   unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
   1734   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1735 
   1736   // Allocate and store the output frame description.
   1737   FrameDescription* output_frame =
   1738       new (output_frame_size) FrameDescription(output_frame_size);
   1739   output_frame->SetFrameType(StackFrame::INTERNAL);
   1740 
   1741   // A frame for an accessor stub can not be bottommost.
   1742   CHECK(frame_index > 0 && frame_index < output_count_);
   1743   CHECK_NULL(output_[frame_index]);
   1744   output_[frame_index] = output_frame;
   1745 
   1746   // The top address of the frame is computed from the previous frame's top and
   1747   // this frame's size.
   1748   intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1749   output_frame->SetTop(top_address);
   1750 
   1751   unsigned output_offset = output_frame_size;
   1752 
   1753   // Read caller's PC from the previous frame.
   1754   output_offset -= kPCOnStackSize;
   1755   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   1756   output_frame->SetCallerPc(output_offset, callers_pc);
   1757   DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
   1758 
   1759   // Read caller's FP from the previous frame, and set this frame's FP.
   1760   output_offset -= kFPOnStackSize;
   1761   intptr_t value = output_[frame_index - 1]->GetFp();
   1762   output_frame->SetCallerFp(output_offset, value);
   1763   intptr_t fp_value = top_address + output_offset;
   1764   output_frame->SetFp(fp_value);
   1765   if (is_topmost) {
   1766     Register fp_reg = JavaScriptFrame::fp_register();
   1767     output_frame->SetRegister(fp_reg.code(), fp_value);
   1768   }
   1769   DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
   1770 
   1771   if (FLAG_enable_embedded_constant_pool) {
   1772     // Read the caller's constant pool from the previous frame.
   1773     output_offset -= kPointerSize;
   1774     value = output_[frame_index - 1]->GetConstantPool();
   1775     output_frame->SetCallerConstantPool(output_offset, value);
   1776     DebugPrintOutputSlot(value, frame_index, output_offset,
   1777                          "caller's constant_pool\n");
   1778   }
   1779 
   1780   // Set the frame type.
   1781   output_offset -= kPointerSize;
   1782   value = StackFrame::TypeToMarker(StackFrame::INTERNAL);
   1783   output_frame->SetFrameSlot(output_offset, value);
   1784   DebugPrintOutputSlot(value, frame_index, output_offset, "frame type ");
   1785   if (trace_scope_ != nullptr) {
   1786     PrintF(trace_scope_->file(), "(%s sentinel)\n", kind);
   1787   }
   1788 
   1789   // Get Code object from accessor stub.
   1790   output_offset -= kPointerSize;
   1791   Builtins::Name name = is_setter_stub_frame ?
   1792       Builtins::kStoreIC_Setter_ForDeopt :
   1793       Builtins::kLoadIC_Getter_ForDeopt;
   1794   Code* accessor_stub = isolate_->builtins()->builtin(name);
   1795   value = reinterpret_cast<intptr_t>(accessor_stub);
   1796   output_frame->SetFrameSlot(output_offset, value);
   1797   DebugPrintOutputSlot(value, frame_index, output_offset, "code object\n");
   1798 
   1799   // The context can be gotten from the previous frame.
   1800   output_offset -= kPointerSize;
   1801   value = output_[frame_index - 1]->GetContext();
   1802   output_frame->SetFrameSlot(output_offset, value);
   1803   DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
   1804 
   1805   // Skip receiver.
   1806   value_iterator++;
   1807   input_index++;
   1808 
   1809   if (is_setter_stub_frame) {
   1810     // The implicit return value was part of the artificial setter stub
   1811     // environment.
   1812     output_offset -= kPointerSize;
   1813     WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
   1814                                  output_offset);
   1815   }
   1816 
   1817   if (should_preserve_result) {
   1818     // Ensure the result is restored back when we return to the stub.
   1819     output_offset -= kPointerSize;
   1820     Register result_reg = FullCodeGenerator::result_register();
   1821     value = input_->GetRegister(result_reg.code());
   1822     output_frame->SetFrameSlot(output_offset, value);
   1823     DebugPrintOutputSlot(value, frame_index, output_offset,
   1824                          "accessor result\n");
   1825 
   1826     output_frame->SetState(
   1827         Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
   1828   } else {
   1829     output_frame->SetState(
   1830         Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS)));
   1831   }
   1832 
   1833   CHECK_EQ(0u, output_offset);
   1834 
   1835   Smi* offset = is_setter_stub_frame ?
   1836       isolate_->heap()->setter_stub_deopt_pc_offset() :
   1837       isolate_->heap()->getter_stub_deopt_pc_offset();
   1838   intptr_t pc = reinterpret_cast<intptr_t>(
   1839       accessor_stub->instruction_start() + offset->value());
   1840   output_frame->SetPc(pc);
   1841 
   1842   // Update constant pool.
   1843   if (FLAG_enable_embedded_constant_pool) {
   1844     intptr_t constant_pool_value =
   1845         reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
   1846     output_frame->SetConstantPool(constant_pool_value);
   1847     if (is_topmost) {
   1848       Register constant_pool_reg =
   1849           JavaScriptFrame::constant_pool_pointer_register();
   1850       output_frame->SetRegister(constant_pool_reg.code(), fp_value);
   1851     }
   1852   }
   1853 
   1854   // Clear the context register. The context might be a de-materialized object
   1855   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
   1856   // safety we use Smi(0) instead of the potential {arguments_marker} here.
   1857   if (is_topmost) {
   1858     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
   1859     Register context_reg = JavaScriptFrame::context_register();
   1860     output_frame->SetRegister(context_reg.code(), context_value);
   1861   }
   1862 
   1863   // Set the continuation for the topmost frame.
   1864   if (is_topmost) {
   1865     Builtins* builtins = isolate_->builtins();
   1866     DCHECK_EQ(LAZY, bailout_type_);
   1867     Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
   1868     output_frame->SetContinuation(
   1869         reinterpret_cast<intptr_t>(continuation->entry()));
   1870   }
   1871 }
   1872 
   1873 void Deoptimizer::DoComputeCompiledStubFrame(TranslatedFrame* translated_frame,
   1874                                              int frame_index) {
   1875   //
   1876   //               FROM                                  TO
   1877   //    |          ....           |          |          ....           |
   1878   //    +-------------------------+          +-------------------------+
   1879   //    | JSFunction continuation |          | JSFunction continuation |
   1880   //    +-------------------------+          +-------------------------+
   1881   // |  |    saved frame (FP)     |          |    saved frame (FP)     |
   1882   // |  +=========================+<-fpreg   +=========================+<-fpreg
   1883   // |  |constant pool (if ool_cp)|          |constant pool (if ool_cp)|
   1884   // |  +-------------------------+          +-------------------------|
   1885   // |  |   JSFunction context    |          |   JSFunction context    |
   1886   // v  +-------------------------+          +-------------------------|
   1887   //    |   COMPILED_STUB marker  |          |   STUB_FAILURE marker   |
   1888   //    +-------------------------+          +-------------------------+
   1889   //    |                         |          |  caller args.arguments_ |
   1890   //    | ...                     |          +-------------------------+
   1891   //    |                         |          |  caller args.length_    |
   1892   //    |-------------------------|<-spreg   +-------------------------+
   1893   //                                         |  caller args pointer    |
   1894   //                                         +-------------------------+
   1895   //                                         |  caller stack param 1   |
   1896   //      parameters in registers            +-------------------------+
   1897   //       and spilled to stack              |           ....          |
   1898   //                                         +-------------------------+
   1899   //                                         |  caller stack param n   |
   1900   //                                         +-------------------------+<-spreg
   1901   //                                         reg = number of parameters
   1902   //                                         reg = failure handler address
   1903   //                                         reg = saved frame
   1904   //                                         reg = JSFunction context
   1905   //
   1906   // Caller stack params contain the register parameters to the stub first,
   1907   // and then, if the descriptor specifies a constant number of stack
   1908   // parameters, the stack parameters as well.
   1909 
   1910   TranslatedFrame::iterator value_iterator = translated_frame->begin();
   1911   int input_index = 0;
   1912 
   1913   CHECK(compiled_code_->is_hydrogen_stub());
   1914   int major_key = CodeStub::GetMajorKey(compiled_code_);
   1915   CodeStubDescriptor descriptor(isolate_, compiled_code_->stub_key());
   1916 
   1917   // The output frame must have room for all pushed register parameters
   1918   // and the standard stack frame slots.  Include space for an argument
   1919   // object to the callee and optionally the space to pass the argument
   1920   // object to the stub failure handler.
   1921   int param_count = descriptor.GetRegisterParameterCount();
   1922   int stack_param_count = descriptor.GetStackParameterCount();
   1923   // The translated frame contains all of the register parameters
   1924   // plus the context.
   1925   CHECK_EQ(translated_frame->height(), param_count + 1);
   1926   CHECK_GE(param_count, 0);
   1927 
   1928   int height_in_bytes = kPointerSize * (param_count + stack_param_count);
   1929   int fixed_frame_size = StubFailureTrampolineFrameConstants::kFixedFrameSize;
   1930   int output_frame_size = height_in_bytes + fixed_frame_size;
   1931   if (trace_scope_ != NULL) {
   1932     PrintF(trace_scope_->file(),
   1933            "  translating %s => StubFailureTrampolineStub, height=%d\n",
   1934            CodeStub::MajorName(static_cast<CodeStub::Major>(major_key)),
   1935            height_in_bytes);
   1936   }
   1937 
   1938   // The stub failure trampoline is a single frame.
   1939   FrameDescription* output_frame =
   1940       new (output_frame_size) FrameDescription(output_frame_size);
   1941   output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
   1942   CHECK_EQ(frame_index, 0);
   1943   output_[frame_index] = output_frame;
   1944 
   1945   // The top address of the frame is computed from the previous frame's top and
   1946   // this frame's size.
   1947   intptr_t top_address = caller_frame_top_ - output_frame_size;
   1948   output_frame->SetTop(top_address);
   1949 
   1950   // Set caller's PC (JSFunction continuation).
   1951   unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
   1952   intptr_t value = caller_pc_;
   1953   output_frame->SetCallerPc(output_frame_offset, value);
   1954   DebugPrintOutputSlot(value, frame_index, output_frame_offset,
   1955                        "caller's pc\n");
   1956 
   1957   // Read caller's FP from the input frame, and set this frame's FP.
   1958   value = caller_fp_;
   1959   output_frame_offset -= kFPOnStackSize;
   1960   output_frame->SetCallerFp(output_frame_offset, value);
   1961   intptr_t frame_ptr = top_address + output_frame_offset;
   1962   Register fp_reg = StubFailureTrampolineFrame::fp_register();
   1963   output_frame->SetRegister(fp_reg.code(), frame_ptr);
   1964   output_frame->SetFp(frame_ptr);
   1965   DebugPrintOutputSlot(value, frame_index, output_frame_offset,
   1966                        "caller's fp\n");
   1967 
   1968   if (FLAG_enable_embedded_constant_pool) {
   1969     // Read the caller's constant pool from the input frame.
   1970     value = caller_constant_pool_;
   1971     output_frame_offset -= kPointerSize;
   1972     output_frame->SetCallerConstantPool(output_frame_offset, value);
   1973     DebugPrintOutputSlot(value, frame_index, output_frame_offset,
   1974                          "caller's constant_pool\n");
   1975   }
   1976 
   1977   // The marker for the typed stack frame
   1978   output_frame_offset -= kPointerSize;
   1979   value = StackFrame::TypeToMarker(StackFrame::STUB_FAILURE_TRAMPOLINE);
   1980   output_frame->SetFrameSlot(output_frame_offset, value);
   1981   DebugPrintOutputSlot(value, frame_index, output_frame_offset,
   1982                        "function (stub failure sentinel)\n");
   1983 
   1984   intptr_t caller_arg_count = stack_param_count;
   1985   bool arg_count_known = !descriptor.stack_parameter_count().is_valid();
   1986 
   1987   // Build the Arguments object for the caller's parameters and a pointer to it.
   1988   output_frame_offset -= kPointerSize;
   1989   int args_arguments_offset = output_frame_offset;
   1990   intptr_t the_hole = reinterpret_cast<intptr_t>(
   1991       isolate_->heap()->the_hole_value());
   1992   if (arg_count_known) {
   1993     value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
   1994         (caller_arg_count - 1) * kPointerSize;
   1995   } else {
   1996     value = the_hole;
   1997   }
   1998 
   1999   output_frame->SetFrameSlot(args_arguments_offset, value);
   2000   DebugPrintOutputSlot(
   2001       value, frame_index, args_arguments_offset,
   2002       arg_count_known ? "args.arguments\n" : "args.arguments (the hole)\n");
   2003 
   2004   output_frame_offset -= kPointerSize;
   2005   int length_frame_offset = output_frame_offset;
   2006   value = arg_count_known ? caller_arg_count : the_hole;
   2007   output_frame->SetFrameSlot(length_frame_offset, value);
   2008   DebugPrintOutputSlot(
   2009       value, frame_index, length_frame_offset,
   2010       arg_count_known ? "args.length\n" : "args.length (the hole)\n");
   2011 
   2012   output_frame_offset -= kPointerSize;
   2013   value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
   2014       (output_frame_size - output_frame_offset) + kPointerSize;
   2015   output_frame->SetFrameSlot(output_frame_offset, value);
   2016   DebugPrintOutputSlot(value, frame_index, output_frame_offset, "args*\n");
   2017 
   2018   // Copy the register parameters to the failure frame.
   2019   int arguments_length_offset = -1;
   2020   for (int i = 0; i < param_count; ++i) {
   2021     output_frame_offset -= kPointerSize;
   2022     WriteTranslatedValueToOutput(&value_iterator, &input_index, 0,
   2023                                  output_frame_offset);
   2024 
   2025     if (!arg_count_known &&
   2026         descriptor.GetRegisterParameter(i)
   2027             .is(descriptor.stack_parameter_count())) {
   2028       arguments_length_offset = output_frame_offset;
   2029     }
   2030   }
   2031 
   2032   Object* maybe_context = value_iterator->GetRawValue();
   2033   CHECK(maybe_context->IsContext());
   2034   Register context_reg = StubFailureTrampolineFrame::context_register();
   2035   value = reinterpret_cast<intptr_t>(maybe_context);
   2036   output_frame->SetRegister(context_reg.code(), value);
   2037   ++value_iterator;
   2038 
   2039   // Copy constant stack parameters to the failure frame. If the number of stack
   2040   // parameters is not known in the descriptor, the arguments object is the way
   2041   // to access them.
   2042   for (int i = 0; i < stack_param_count; i++) {
   2043     output_frame_offset -= kPointerSize;
   2044     Object** stack_parameter = reinterpret_cast<Object**>(
   2045         frame_ptr + StandardFrameConstants::kCallerSPOffset +
   2046         (stack_param_count - i - 1) * kPointerSize);
   2047     value = reinterpret_cast<intptr_t>(*stack_parameter);
   2048     output_frame->SetFrameSlot(output_frame_offset, value);
   2049     DebugPrintOutputSlot(value, frame_index, output_frame_offset,
   2050                          "stack parameter\n");
   2051   }
   2052 
   2053   CHECK_EQ(0u, output_frame_offset);
   2054 
   2055   if (!arg_count_known) {
   2056     CHECK_GE(arguments_length_offset, 0);
   2057     // We know it's a smi because 1) the code stub guarantees the stack
   2058     // parameter count is in smi range, and 2) the DoTranslateCommand in the
   2059     // parameter loop above translated that to a tagged value.
   2060     Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
   2061         output_frame->GetFrameSlot(arguments_length_offset));
   2062     caller_arg_count = smi_caller_arg_count->value();
   2063     output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
   2064     DebugPrintOutputSlot(caller_arg_count, frame_index, length_frame_offset,
   2065                          "args.length\n");
   2066     value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
   2067         (caller_arg_count - 1) * kPointerSize;
   2068     output_frame->SetFrameSlot(args_arguments_offset, value);
   2069     DebugPrintOutputSlot(value, frame_index, args_arguments_offset,
   2070                          "args.arguments");
   2071   }
   2072 
   2073   // Copy the double registers from the input into the output frame.
   2074   CopyDoubleRegisters(output_frame);
   2075 
   2076   // Fill registers containing handler and number of parameters.
   2077   SetPlatformCompiledStubRegisters(output_frame, &descriptor);
   2078 
   2079   // Compute this frame's PC, state, and continuation.
   2080   Code* trampoline = NULL;
   2081   StubFunctionMode function_mode = descriptor.function_mode();
   2082   StubFailureTrampolineStub(isolate_, function_mode)
   2083       .FindCodeInCache(&trampoline);
   2084   DCHECK(trampoline != NULL);
   2085   output_frame->SetPc(reinterpret_cast<intptr_t>(
   2086       trampoline->instruction_start()));
   2087   if (FLAG_enable_embedded_constant_pool) {
   2088     Register constant_pool_reg =
   2089         StubFailureTrampolineFrame::constant_pool_pointer_register();
   2090     intptr_t constant_pool_value =
   2091         reinterpret_cast<intptr_t>(trampoline->constant_pool());
   2092     output_frame->SetConstantPool(constant_pool_value);
   2093     output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
   2094   }
   2095   output_frame->SetState(
   2096       Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS)));
   2097   Code* notify_failure =
   2098       isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
   2099   output_frame->SetContinuation(
   2100       reinterpret_cast<intptr_t>(notify_failure->entry()));
   2101 }
   2102 
   2103 
   2104 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
   2105   // Walk to the last JavaScript output frame to find out if it has
   2106   // adapted arguments.
   2107   for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
   2108     if (frame_index != 0) it->Advance();
   2109   }
   2110   translated_state_.Prepare(it->frame()->has_adapted_arguments(),
   2111                             reinterpret_cast<Address>(stack_fp_));
   2112 
   2113   for (auto& materialization : values_to_materialize_) {
   2114     Handle<Object> value = materialization.value_->GetValue();
   2115 
   2116     if (trace_scope_ != nullptr) {
   2117       PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ;  ",
   2118              reinterpret_cast<intptr_t>(materialization.output_slot_address_),
   2119              reinterpret_cast<intptr_t>(*value));
   2120       value->ShortPrint(trace_scope_->file());
   2121       PrintF(trace_scope_->file(), "\n");
   2122     }
   2123 
   2124     *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) =
   2125         reinterpret_cast<intptr_t>(*value);
   2126   }
   2127 
   2128   isolate_->materialized_object_store()->Remove(
   2129       reinterpret_cast<Address>(stack_fp_));
   2130 }
   2131 
   2132 
   2133 void Deoptimizer::WriteTranslatedValueToOutput(
   2134     TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
   2135     unsigned output_offset, const char* debug_hint_string,
   2136     Address output_address_for_materialization) {
   2137   Object* value = (*iterator)->GetRawValue();
   2138 
   2139   WriteValueToOutput(value, *input_index, frame_index, output_offset,
   2140                      debug_hint_string);
   2141 
   2142   if (value == isolate_->heap()->arguments_marker()) {
   2143     Address output_address =
   2144         reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
   2145         output_offset;
   2146     if (output_address_for_materialization == nullptr) {
   2147       output_address_for_materialization = output_address;
   2148     }
   2149     values_to_materialize_.push_back(
   2150         {output_address_for_materialization, *iterator});
   2151   }
   2152 
   2153   (*iterator)++;
   2154   (*input_index)++;
   2155 }
   2156 
   2157 
   2158 void Deoptimizer::WriteValueToOutput(Object* value, int input_index,
   2159                                      int frame_index, unsigned output_offset,
   2160                                      const char* debug_hint_string) {
   2161   output_[frame_index]->SetFrameSlot(output_offset,
   2162                                      reinterpret_cast<intptr_t>(value));
   2163 
   2164   if (trace_scope_ != nullptr) {
   2165     DebugPrintOutputSlot(reinterpret_cast<intptr_t>(value), frame_index,
   2166                          output_offset, debug_hint_string);
   2167     value->ShortPrint(trace_scope_->file());
   2168     PrintF(trace_scope_->file(), "  (input #%d)\n", input_index);
   2169   }
   2170 }
   2171 
   2172 
   2173 void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index,
   2174                                        unsigned output_offset,
   2175                                        const char* debug_hint_string) {
   2176   if (trace_scope_ != nullptr) {
   2177     Address output_address =
   2178         reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
   2179         output_offset;
   2180     PrintF(trace_scope_->file(),
   2181            "    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ;  %s",
   2182            reinterpret_cast<intptr_t>(output_address), output_offset, value,
   2183            debug_hint_string == nullptr ? "" : debug_hint_string);
   2184   }
   2185 }
   2186 
   2187 unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const {
   2188   unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
   2189   if (!function_->IsSmi()) {
   2190     fixed_size += ComputeIncomingArgumentSize(function_->shared());
   2191   }
   2192   return fixed_size;
   2193 }
   2194 
   2195 unsigned Deoptimizer::ComputeInputFrameSize() const {
   2196   // The fp-to-sp delta already takes the context, constant pool pointer and the
   2197   // function into account so we have to avoid double counting them.
   2198   unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
   2199   unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
   2200   if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
   2201     unsigned stack_slots = compiled_code_->stack_slots();
   2202     unsigned outgoing_size =
   2203         ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
   2204     CHECK_EQ(fixed_size_above_fp + (stack_slots * kPointerSize) -
   2205                  CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size,
   2206              result);
   2207   }
   2208   return result;
   2209 }
   2210 
   2211 // static
   2212 unsigned Deoptimizer::ComputeJavascriptFixedSize(SharedFunctionInfo* shared) {
   2213   // The fixed part of the frame consists of the return address, frame
   2214   // pointer, function, context, and all the incoming arguments.
   2215   return ComputeIncomingArgumentSize(shared) +
   2216          StandardFrameConstants::kFixedFrameSize;
   2217 }
   2218 
   2219 // static
   2220 unsigned Deoptimizer::ComputeInterpretedFixedSize(SharedFunctionInfo* shared) {
   2221   // The fixed part of the frame consists of the return address, frame
   2222   // pointer, function, context, new.target, bytecode offset and all the
   2223   // incoming arguments.
   2224   return ComputeIncomingArgumentSize(shared) +
   2225          InterpreterFrameConstants::kFixedFrameSize;
   2226 }
   2227 
   2228 // static
   2229 unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo* shared) {
   2230   return (shared->internal_formal_parameter_count() + 1) * kPointerSize;
   2231 }
   2232 
   2233 
   2234 // static
   2235 unsigned Deoptimizer::ComputeOutgoingArgumentSize(Code* code,
   2236                                                   unsigned bailout_id) {
   2237   DeoptimizationInputData* data =
   2238       DeoptimizationInputData::cast(code->deoptimization_data());
   2239   unsigned height = data->ArgumentsStackHeight(bailout_id)->value();
   2240   return height * kPointerSize;
   2241 }
   2242 
   2243 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
   2244                                                    BailoutType type,
   2245                                                    int max_entry_id) {
   2246   // We cannot run this if the serializer is enabled because this will
   2247   // cause us to emit relocation information for the external
   2248   // references. This is fine because the deoptimizer's code section
   2249   // isn't meant to be serialized at all.
   2250   CHECK(type == EAGER || type == SOFT || type == LAZY);
   2251   DeoptimizerData* data = isolate->deoptimizer_data();
   2252   int entry_count = data->deopt_entry_code_entries_[type];
   2253   if (max_entry_id < entry_count) return;
   2254   entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
   2255   while (max_entry_id >= entry_count) entry_count *= 2;
   2256   CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
   2257 
   2258   MacroAssembler masm(isolate, NULL, 16 * KB, CodeObjectRequired::kYes);
   2259   masm.set_emit_debug_code(false);
   2260   GenerateDeoptimizationEntries(&masm, entry_count, type);
   2261   CodeDesc desc;
   2262   masm.GetCode(&desc);
   2263   DCHECK(!RelocInfo::RequiresRelocation(desc));
   2264 
   2265   MemoryChunk* chunk = data->deopt_entry_code_[type];
   2266   CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
   2267         desc.instr_size);
   2268   if (!chunk->CommitArea(desc.instr_size)) {
   2269     V8::FatalProcessOutOfMemory(
   2270         "Deoptimizer::EnsureCodeForDeoptimizationEntry");
   2271   }
   2272   CopyBytes(chunk->area_start(), desc.buffer,
   2273             static_cast<size_t>(desc.instr_size));
   2274   Assembler::FlushICache(isolate, chunk->area_start(), desc.instr_size);
   2275 
   2276   data->deopt_entry_code_entries_[type] = entry_count;
   2277 }
   2278 
   2279 FrameDescription::FrameDescription(uint32_t frame_size, int parameter_count)
   2280     : frame_size_(frame_size),
   2281       parameter_count_(parameter_count),
   2282       top_(kZapUint32),
   2283       pc_(kZapUint32),
   2284       fp_(kZapUint32),
   2285       context_(kZapUint32),
   2286       constant_pool_(kZapUint32) {
   2287   // Zap all the registers.
   2288   for (int r = 0; r < Register::kNumRegisters; r++) {
   2289     // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
   2290     // isn't used before the next safepoint, the GC will try to scan it as a
   2291     // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
   2292     SetRegister(r, kZapUint32);
   2293   }
   2294 
   2295   // Zap all the slots.
   2296   for (unsigned o = 0; o < frame_size; o += kPointerSize) {
   2297     SetFrameSlot(o, kZapUint32);
   2298   }
   2299 }
   2300 
   2301 void TranslationBuffer::Add(int32_t value) {
   2302   // This wouldn't handle kMinInt correctly if it ever encountered it.
   2303   DCHECK(value != kMinInt);
   2304   // Encode the sign bit in the least significant bit.
   2305   bool is_negative = (value < 0);
   2306   uint32_t bits = ((is_negative ? -value : value) << 1) |
   2307       static_cast<int32_t>(is_negative);
   2308   // Encode the individual bytes using the least significant bit of
   2309   // each byte to indicate whether or not more bytes follow.
   2310   do {
   2311     uint32_t next = bits >> 7;
   2312     contents_.push_back(((bits << 1) & 0xFF) | (next != 0));
   2313     bits = next;
   2314   } while (bits != 0);
   2315 }
   2316 
   2317 
   2318 int32_t TranslationIterator::Next() {
   2319   // Run through the bytes until we reach one with a least significant
   2320   // bit of zero (marks the end).
   2321   uint32_t bits = 0;
   2322   for (int i = 0; true; i += 7) {
   2323     DCHECK(HasNext());
   2324     uint8_t next = buffer_->get(index_++);
   2325     bits |= (next >> 1) << i;
   2326     if ((next & 1) == 0) break;
   2327   }
   2328   // The bits encode the sign in the least significant bit.
   2329   bool is_negative = (bits & 1) == 1;
   2330   int32_t result = bits >> 1;
   2331   return is_negative ? -result : result;
   2332 }
   2333 
   2334 
   2335 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
   2336   Handle<ByteArray> result = factory->NewByteArray(CurrentIndex(), TENURED);
   2337   contents_.CopyTo(result->GetDataStartAddress());
   2338   return result;
   2339 }
   2340 
   2341 void Translation::BeginConstructStubFrame(BailoutId bailout_id, int literal_id,
   2342                                           unsigned height) {
   2343   buffer_->Add(CONSTRUCT_STUB_FRAME);
   2344   buffer_->Add(bailout_id.ToInt());
   2345   buffer_->Add(literal_id);
   2346   buffer_->Add(height);
   2347 }
   2348 
   2349 
   2350 void Translation::BeginGetterStubFrame(int literal_id) {
   2351   buffer_->Add(GETTER_STUB_FRAME);
   2352   buffer_->Add(literal_id);
   2353 }
   2354 
   2355 
   2356 void Translation::BeginSetterStubFrame(int literal_id) {
   2357   buffer_->Add(SETTER_STUB_FRAME);
   2358   buffer_->Add(literal_id);
   2359 }
   2360 
   2361 
   2362 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
   2363   buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
   2364   buffer_->Add(literal_id);
   2365   buffer_->Add(height);
   2366 }
   2367 
   2368 void Translation::BeginTailCallerFrame(int literal_id) {
   2369   buffer_->Add(TAIL_CALLER_FRAME);
   2370   buffer_->Add(literal_id);
   2371 }
   2372 
   2373 void Translation::BeginJSFrame(BailoutId node_id, int literal_id,
   2374                                unsigned height) {
   2375   buffer_->Add(JS_FRAME);
   2376   buffer_->Add(node_id.ToInt());
   2377   buffer_->Add(literal_id);
   2378   buffer_->Add(height);
   2379 }
   2380 
   2381 
   2382 void Translation::BeginInterpretedFrame(BailoutId bytecode_offset,
   2383                                         int literal_id, unsigned height) {
   2384   buffer_->Add(INTERPRETED_FRAME);
   2385   buffer_->Add(bytecode_offset.ToInt());
   2386   buffer_->Add(literal_id);
   2387   buffer_->Add(height);
   2388 }
   2389 
   2390 
   2391 void Translation::BeginCompiledStubFrame(int height) {
   2392   buffer_->Add(COMPILED_STUB_FRAME);
   2393   buffer_->Add(height);
   2394 }
   2395 
   2396 
   2397 void Translation::BeginArgumentsObject(int args_length) {
   2398   buffer_->Add(ARGUMENTS_OBJECT);
   2399   buffer_->Add(args_length);
   2400 }
   2401 
   2402 
   2403 void Translation::BeginCapturedObject(int length) {
   2404   buffer_->Add(CAPTURED_OBJECT);
   2405   buffer_->Add(length);
   2406 }
   2407 
   2408 
   2409 void Translation::DuplicateObject(int object_index) {
   2410   buffer_->Add(DUPLICATED_OBJECT);
   2411   buffer_->Add(object_index);
   2412 }
   2413 
   2414 
   2415 void Translation::StoreRegister(Register reg) {
   2416   buffer_->Add(REGISTER);
   2417   buffer_->Add(reg.code());
   2418 }
   2419 
   2420 
   2421 void Translation::StoreInt32Register(Register reg) {
   2422   buffer_->Add(INT32_REGISTER);
   2423   buffer_->Add(reg.code());
   2424 }
   2425 
   2426 
   2427 void Translation::StoreUint32Register(Register reg) {
   2428   buffer_->Add(UINT32_REGISTER);
   2429   buffer_->Add(reg.code());
   2430 }
   2431 
   2432 
   2433 void Translation::StoreBoolRegister(Register reg) {
   2434   buffer_->Add(BOOL_REGISTER);
   2435   buffer_->Add(reg.code());
   2436 }
   2437 
   2438 void Translation::StoreFloatRegister(FloatRegister reg) {
   2439   buffer_->Add(FLOAT_REGISTER);
   2440   buffer_->Add(reg.code());
   2441 }
   2442 
   2443 void Translation::StoreDoubleRegister(DoubleRegister reg) {
   2444   buffer_->Add(DOUBLE_REGISTER);
   2445   buffer_->Add(reg.code());
   2446 }
   2447 
   2448 
   2449 void Translation::StoreStackSlot(int index) {
   2450   buffer_->Add(STACK_SLOT);
   2451   buffer_->Add(index);
   2452 }
   2453 
   2454 
   2455 void Translation::StoreInt32StackSlot(int index) {
   2456   buffer_->Add(INT32_STACK_SLOT);
   2457   buffer_->Add(index);
   2458 }
   2459 
   2460 
   2461 void Translation::StoreUint32StackSlot(int index) {
   2462   buffer_->Add(UINT32_STACK_SLOT);
   2463   buffer_->Add(index);
   2464 }
   2465 
   2466 
   2467 void Translation::StoreBoolStackSlot(int index) {
   2468   buffer_->Add(BOOL_STACK_SLOT);
   2469   buffer_->Add(index);
   2470 }
   2471 
   2472 void Translation::StoreFloatStackSlot(int index) {
   2473   buffer_->Add(FLOAT_STACK_SLOT);
   2474   buffer_->Add(index);
   2475 }
   2476 
   2477 void Translation::StoreDoubleStackSlot(int index) {
   2478   buffer_->Add(DOUBLE_STACK_SLOT);
   2479   buffer_->Add(index);
   2480 }
   2481 
   2482 
   2483 void Translation::StoreLiteral(int literal_id) {
   2484   buffer_->Add(LITERAL);
   2485   buffer_->Add(literal_id);
   2486 }
   2487 
   2488 
   2489 void Translation::StoreArgumentsObject(bool args_known,
   2490                                        int args_index,
   2491                                        int args_length) {
   2492   buffer_->Add(ARGUMENTS_OBJECT);
   2493   buffer_->Add(args_known);
   2494   buffer_->Add(args_index);
   2495   buffer_->Add(args_length);
   2496 }
   2497 
   2498 
   2499 void Translation::StoreJSFrameFunction() {
   2500   StoreStackSlot((StandardFrameConstants::kCallerPCOffset -
   2501                   StandardFrameConstants::kFunctionOffset) /
   2502                  kPointerSize);
   2503 }
   2504 
   2505 int Translation::NumberOfOperandsFor(Opcode opcode) {
   2506   switch (opcode) {
   2507     case GETTER_STUB_FRAME:
   2508     case SETTER_STUB_FRAME:
   2509     case DUPLICATED_OBJECT:
   2510     case ARGUMENTS_OBJECT:
   2511     case CAPTURED_OBJECT:
   2512     case REGISTER:
   2513     case INT32_REGISTER:
   2514     case UINT32_REGISTER:
   2515     case BOOL_REGISTER:
   2516     case FLOAT_REGISTER:
   2517     case DOUBLE_REGISTER:
   2518     case STACK_SLOT:
   2519     case INT32_STACK_SLOT:
   2520     case UINT32_STACK_SLOT:
   2521     case BOOL_STACK_SLOT:
   2522     case FLOAT_STACK_SLOT:
   2523     case DOUBLE_STACK_SLOT:
   2524     case LITERAL:
   2525     case COMPILED_STUB_FRAME:
   2526     case TAIL_CALLER_FRAME:
   2527       return 1;
   2528     case BEGIN:
   2529     case ARGUMENTS_ADAPTOR_FRAME:
   2530       return 2;
   2531     case JS_FRAME:
   2532     case INTERPRETED_FRAME:
   2533     case CONSTRUCT_STUB_FRAME:
   2534       return 3;
   2535   }
   2536   FATAL("Unexpected translation type");
   2537   return -1;
   2538 }
   2539 
   2540 
   2541 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
   2542 
   2543 const char* Translation::StringFor(Opcode opcode) {
   2544 #define TRANSLATION_OPCODE_CASE(item)   case item: return #item;
   2545   switch (opcode) {
   2546     TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
   2547   }
   2548 #undef TRANSLATION_OPCODE_CASE
   2549   UNREACHABLE();
   2550   return "";
   2551 }
   2552 
   2553 #endif
   2554 
   2555 
   2556 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
   2557   int index = StackIdToIndex(fp);
   2558   if (index == -1) {
   2559     return Handle<FixedArray>::null();
   2560   }
   2561   Handle<FixedArray> array = GetStackEntries();
   2562   CHECK_GT(array->length(), index);
   2563   return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate()));
   2564 }
   2565 
   2566 
   2567 void MaterializedObjectStore::Set(Address fp,
   2568                                   Handle<FixedArray> materialized_objects) {
   2569   int index = StackIdToIndex(fp);
   2570   if (index == -1) {
   2571     index = frame_fps_.length();
   2572     frame_fps_.Add(fp);
   2573   }
   2574 
   2575   Handle<FixedArray> array = EnsureStackEntries(index + 1);
   2576   array->set(index, *materialized_objects);
   2577 }
   2578 
   2579 
   2580 bool MaterializedObjectStore::Remove(Address fp) {
   2581   int index = StackIdToIndex(fp);
   2582   if (index == -1) {
   2583     return false;
   2584   }
   2585   CHECK_GE(index, 0);
   2586 
   2587   frame_fps_.Remove(index);
   2588   FixedArray* array = isolate()->heap()->materialized_objects();
   2589   CHECK_LT(index, array->length());
   2590   for (int i = index; i < frame_fps_.length(); i++) {
   2591     array->set(i, array->get(i + 1));
   2592   }
   2593   array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
   2594   return true;
   2595 }
   2596 
   2597 
   2598 int MaterializedObjectStore::StackIdToIndex(Address fp) {
   2599   for (int i = 0; i < frame_fps_.length(); i++) {
   2600     if (frame_fps_[i] == fp) {
   2601       return i;
   2602     }
   2603   }
   2604   return -1;
   2605 }
   2606 
   2607 
   2608 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
   2609   return Handle<FixedArray>(isolate()->heap()->materialized_objects());
   2610 }
   2611 
   2612 
   2613 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
   2614   Handle<FixedArray> array = GetStackEntries();
   2615   if (array->length() >= length) {
   2616     return array;
   2617   }
   2618 
   2619   int new_length = length > 10 ? length : 10;
   2620   if (new_length < 2 * array->length()) {
   2621     new_length = 2 * array->length();
   2622   }
   2623 
   2624   Handle<FixedArray> new_array =
   2625       isolate()->factory()->NewFixedArray(new_length, TENURED);
   2626   for (int i = 0; i < array->length(); i++) {
   2627     new_array->set(i, array->get(i));
   2628   }
   2629   for (int i = array->length(); i < length; i++) {
   2630     new_array->set(i, isolate()->heap()->undefined_value());
   2631   }
   2632   isolate()->heap()->SetRootMaterializedObjects(*new_array);
   2633   return new_array;
   2634 }
   2635 
   2636 namespace {
   2637 
   2638 Handle<Object> GetValueForDebugger(TranslatedFrame::iterator it,
   2639                                    Isolate* isolate) {
   2640   if (it->GetRawValue() == isolate->heap()->arguments_marker()) {
   2641     if (!it->IsMaterializableByDebugger()) {
   2642       return isolate->factory()->undefined_value();
   2643     }
   2644   }
   2645   return it->GetValue();
   2646 }
   2647 
   2648 }  // namespace
   2649 
   2650 DeoptimizedFrameInfo::DeoptimizedFrameInfo(TranslatedState* state,
   2651                                            TranslatedState::iterator frame_it,
   2652                                            Isolate* isolate) {
   2653   // If the previous frame is an adaptor frame, we will take the parameters
   2654   // from there.
   2655   TranslatedState::iterator parameter_frame = frame_it;
   2656   if (parameter_frame != state->begin()) {
   2657     parameter_frame--;
   2658   }
   2659   int parameter_count;
   2660   if (parameter_frame->kind() == TranslatedFrame::kArgumentsAdaptor) {
   2661     parameter_count = parameter_frame->height() - 1;  // Ignore the receiver.
   2662   } else {
   2663     parameter_frame = frame_it;
   2664     parameter_count =
   2665         frame_it->shared_info()->internal_formal_parameter_count();
   2666   }
   2667   TranslatedFrame::iterator parameter_it = parameter_frame->begin();
   2668   parameter_it++;  // Skip the function.
   2669   parameter_it++;  // Skip the receiver.
   2670 
   2671   // Figure out whether there is a construct stub frame on top of
   2672   // the parameter frame.
   2673   has_construct_stub_ =
   2674       parameter_frame != state->begin() &&
   2675       (parameter_frame - 1)->kind() == TranslatedFrame::kConstructStub;
   2676 
   2677   if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
   2678     source_position_ = Deoptimizer::ComputeSourcePositionFromBytecodeArray(
   2679         *frame_it->shared_info(), frame_it->node_id());
   2680   } else {
   2681     DCHECK_EQ(TranslatedFrame::kFunction, frame_it->kind());
   2682     source_position_ = Deoptimizer::ComputeSourcePositionFromBaselineCode(
   2683         *frame_it->shared_info(), frame_it->node_id());
   2684   }
   2685 
   2686   TranslatedFrame::iterator value_it = frame_it->begin();
   2687   // Get the function. Note that this might materialize the function.
   2688   // In case the debugger mutates this value, we should deoptimize
   2689   // the function and remember the value in the materialized value store.
   2690   function_ = Handle<JSFunction>::cast(value_it->GetValue());
   2691 
   2692   parameters_.resize(static_cast<size_t>(parameter_count));
   2693   for (int i = 0; i < parameter_count; i++) {
   2694     Handle<Object> parameter = GetValueForDebugger(parameter_it, isolate);
   2695     SetParameter(i, parameter);
   2696     parameter_it++;
   2697   }
   2698 
   2699   // Skip the function, the receiver and the arguments.
   2700   int skip_count =
   2701       frame_it->shared_info()->internal_formal_parameter_count() + 2;
   2702   TranslatedFrame::iterator stack_it = frame_it->begin();
   2703   for (int i = 0; i < skip_count; i++) {
   2704     stack_it++;
   2705   }
   2706 
   2707   // Get the context.
   2708   context_ = GetValueForDebugger(stack_it, isolate);
   2709   stack_it++;
   2710 
   2711   // Get the expression stack.
   2712   int stack_height = frame_it->height();
   2713   if (frame_it->kind() == TranslatedFrame::kFunction ||
   2714       frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
   2715     // For full-code frames, we should not count the context.
   2716     // For interpreter frames, we should not count the accumulator.
   2717     // TODO(jarin): Clean up the indexing in translated frames.
   2718     stack_height--;
   2719   }
   2720   expression_stack_.resize(static_cast<size_t>(stack_height));
   2721   for (int i = 0; i < stack_height; i++) {
   2722     Handle<Object> expression = GetValueForDebugger(stack_it, isolate);
   2723     SetExpression(i, expression);
   2724     stack_it++;
   2725   }
   2726 
   2727   // For interpreter frame, skip the accumulator.
   2728   if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
   2729     stack_it++;
   2730   }
   2731   CHECK(stack_it == frame_it->end());
   2732 }
   2733 
   2734 
   2735 Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) {
   2736   CHECK(code->instruction_start() <= pc && pc <= code->instruction_end());
   2737   SourcePosition last_position = SourcePosition::Unknown();
   2738   DeoptimizeReason last_reason = DeoptimizeReason::kNoReason;
   2739   int last_deopt_id = kNoDeoptimizationId;
   2740   int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
   2741              RelocInfo::ModeMask(RelocInfo::DEOPT_ID) |
   2742              RelocInfo::ModeMask(RelocInfo::DEOPT_SCRIPT_OFFSET) |
   2743              RelocInfo::ModeMask(RelocInfo::DEOPT_INLINING_ID);
   2744   for (RelocIterator it(code, mask); !it.done(); it.next()) {
   2745     RelocInfo* info = it.rinfo();
   2746     if (info->pc() >= pc) break;
   2747     if (info->rmode() == RelocInfo::DEOPT_SCRIPT_OFFSET) {
   2748       int script_offset = static_cast<int>(info->data());
   2749       it.next();
   2750       DCHECK(it.rinfo()->rmode() == RelocInfo::DEOPT_INLINING_ID);
   2751       int inlining_id = static_cast<int>(it.rinfo()->data());
   2752       last_position = SourcePosition(script_offset, inlining_id);
   2753     } else if (info->rmode() == RelocInfo::DEOPT_ID) {
   2754       last_deopt_id = static_cast<int>(info->data());
   2755     } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
   2756       last_reason = static_cast<DeoptimizeReason>(info->data());
   2757     }
   2758   }
   2759   return DeoptInfo(last_position, last_reason, last_deopt_id);
   2760 }
   2761 
   2762 
   2763 // static
   2764 int Deoptimizer::ComputeSourcePositionFromBaselineCode(
   2765     SharedFunctionInfo* shared, BailoutId node_id) {
   2766   DCHECK(shared->HasBaselineCode());
   2767   Code* code = shared->code();
   2768   FixedArray* raw_data = code->deoptimization_data();
   2769   DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
   2770   unsigned pc_and_state = Deoptimizer::GetOutputInfo(data, node_id, shared);
   2771   int code_offset =
   2772       static_cast<int>(FullCodeGenerator::PcField::decode(pc_and_state));
   2773   return AbstractCode::cast(code)->SourcePosition(code_offset);
   2774 }
   2775 
   2776 // static
   2777 int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
   2778     SharedFunctionInfo* shared, BailoutId node_id) {
   2779   DCHECK(shared->HasBytecodeArray());
   2780   return AbstractCode::cast(shared->bytecode_array())
   2781       ->SourcePosition(node_id.ToInt());
   2782 }
   2783 
   2784 // static
   2785 TranslatedValue TranslatedValue::NewArgumentsObject(TranslatedState* container,
   2786                                                     int length,
   2787                                                     int object_index) {
   2788   TranslatedValue slot(container, kArgumentsObject);
   2789   slot.materialization_info_ = {object_index, length};
   2790   return slot;
   2791 }
   2792 
   2793 
   2794 // static
   2795 TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container,
   2796                                                    int length,
   2797                                                    int object_index) {
   2798   TranslatedValue slot(container, kCapturedObject);
   2799   slot.materialization_info_ = {object_index, length};
   2800   return slot;
   2801 }
   2802 
   2803 
   2804 // static
   2805 TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container,
   2806                                                     int id) {
   2807   TranslatedValue slot(container, kDuplicatedObject);
   2808   slot.materialization_info_ = {id, -1};
   2809   return slot;
   2810 }
   2811 
   2812 
   2813 // static
   2814 TranslatedValue TranslatedValue::NewFloat(TranslatedState* container,
   2815                                           Float32 value) {
   2816   TranslatedValue slot(container, kFloat);
   2817   slot.float_value_ = value;
   2818   return slot;
   2819 }
   2820 
   2821 // static
   2822 TranslatedValue TranslatedValue::NewDouble(TranslatedState* container,
   2823                                            Float64 value) {
   2824   TranslatedValue slot(container, kDouble);
   2825   slot.double_value_ = value;
   2826   return slot;
   2827 }
   2828 
   2829 
   2830 // static
   2831 TranslatedValue TranslatedValue::NewInt32(TranslatedState* container,
   2832                                           int32_t value) {
   2833   TranslatedValue slot(container, kInt32);
   2834   slot.int32_value_ = value;
   2835   return slot;
   2836 }
   2837 
   2838 
   2839 // static
   2840 TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container,
   2841                                            uint32_t value) {
   2842   TranslatedValue slot(container, kUInt32);
   2843   slot.uint32_value_ = value;
   2844   return slot;
   2845 }
   2846 
   2847 
   2848 // static
   2849 TranslatedValue TranslatedValue::NewBool(TranslatedState* container,
   2850                                          uint32_t value) {
   2851   TranslatedValue slot(container, kBoolBit);
   2852   slot.uint32_value_ = value;
   2853   return slot;
   2854 }
   2855 
   2856 
   2857 // static
   2858 TranslatedValue TranslatedValue::NewTagged(TranslatedState* container,
   2859                                            Object* literal) {
   2860   TranslatedValue slot(container, kTagged);
   2861   slot.raw_literal_ = literal;
   2862   return slot;
   2863 }
   2864 
   2865 
   2866 // static
   2867 TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) {
   2868   return TranslatedValue(container, kInvalid);
   2869 }
   2870 
   2871 
   2872 Isolate* TranslatedValue::isolate() const { return container_->isolate(); }
   2873 
   2874 
   2875 Object* TranslatedValue::raw_literal() const {
   2876   DCHECK_EQ(kTagged, kind());
   2877   return raw_literal_;
   2878 }
   2879 
   2880 
   2881 int32_t TranslatedValue::int32_value() const {
   2882   DCHECK_EQ(kInt32, kind());
   2883   return int32_value_;
   2884 }
   2885 
   2886 
   2887 uint32_t TranslatedValue::uint32_value() const {
   2888   DCHECK(kind() == kUInt32 || kind() == kBoolBit);
   2889   return uint32_value_;
   2890 }
   2891 
   2892 Float32 TranslatedValue::float_value() const {
   2893   DCHECK_EQ(kFloat, kind());
   2894   return float_value_;
   2895 }
   2896 
   2897 Float64 TranslatedValue::double_value() const {
   2898   DCHECK_EQ(kDouble, kind());
   2899   return double_value_;
   2900 }
   2901 
   2902 
   2903 int TranslatedValue::object_length() const {
   2904   DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject);
   2905   return materialization_info_.length_;
   2906 }
   2907 
   2908 
   2909 int TranslatedValue::object_index() const {
   2910   DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject ||
   2911          kind() == kDuplicatedObject);
   2912   return materialization_info_.id_;
   2913 }
   2914 
   2915 
   2916 Object* TranslatedValue::GetRawValue() const {
   2917   // If we have a value, return it.
   2918   Handle<Object> result_handle;
   2919   if (value_.ToHandle(&result_handle)) {
   2920     return *result_handle;
   2921   }
   2922 
   2923   // Otherwise, do a best effort to get the value without allocation.
   2924   switch (kind()) {
   2925     case kTagged:
   2926       return raw_literal();
   2927 
   2928     case kInt32: {
   2929       bool is_smi = Smi::IsValid(int32_value());
   2930       if (is_smi) {
   2931         return Smi::FromInt(int32_value());
   2932       }
   2933       break;
   2934     }
   2935 
   2936     case kUInt32: {
   2937       bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue));
   2938       if (is_smi) {
   2939         return Smi::FromInt(static_cast<int32_t>(uint32_value()));
   2940       }
   2941       break;
   2942     }
   2943 
   2944     case kBoolBit: {
   2945       if (uint32_value() == 0) {
   2946         return isolate()->heap()->false_value();
   2947       } else {
   2948         CHECK_EQ(1U, uint32_value());
   2949         return isolate()->heap()->true_value();
   2950       }
   2951     }
   2952 
   2953     default:
   2954       break;
   2955   }
   2956 
   2957   // If we could not get the value without allocation, return the arguments
   2958   // marker.
   2959   return isolate()->heap()->arguments_marker();
   2960 }
   2961 
   2962 
   2963 Handle<Object> TranslatedValue::GetValue() {
   2964   Handle<Object> result;
   2965   // If we already have a value, then get it.
   2966   if (value_.ToHandle(&result)) return result;
   2967 
   2968   // Otherwise we have to materialize.
   2969   switch (kind()) {
   2970     case TranslatedValue::kTagged:
   2971     case TranslatedValue::kInt32:
   2972     case TranslatedValue::kUInt32:
   2973     case TranslatedValue::kBoolBit:
   2974     case TranslatedValue::kFloat:
   2975     case TranslatedValue::kDouble: {
   2976       MaterializeSimple();
   2977       return value_.ToHandleChecked();
   2978     }
   2979 
   2980     case TranslatedValue::kArgumentsObject:
   2981     case TranslatedValue::kCapturedObject:
   2982     case TranslatedValue::kDuplicatedObject:
   2983       return container_->MaterializeObjectAt(object_index());
   2984 
   2985     case TranslatedValue::kInvalid:
   2986       FATAL("unexpected case");
   2987       return Handle<Object>::null();
   2988   }
   2989 
   2990   FATAL("internal error: value missing");
   2991   return Handle<Object>::null();
   2992 }
   2993 
   2994 
   2995 void TranslatedValue::MaterializeSimple() {
   2996   // If we already have materialized, return.
   2997   if (!value_.is_null()) return;
   2998 
   2999   Object* raw_value = GetRawValue();
   3000   if (raw_value != isolate()->heap()->arguments_marker()) {
   3001     // We can get the value without allocation, just return it here.
   3002     value_ = Handle<Object>(raw_value, isolate());
   3003     return;
   3004   }
   3005 
   3006   switch (kind()) {
   3007     case kInt32:
   3008       value_ = Handle<Object>(isolate()->factory()->NewNumber(int32_value()));
   3009       return;
   3010 
   3011     case kUInt32:
   3012       value_ = Handle<Object>(isolate()->factory()->NewNumber(uint32_value()));
   3013       return;
   3014 
   3015     case kFloat: {
   3016       double scalar_value = float_value().get_scalar();
   3017       value_ = Handle<Object>(isolate()->factory()->NewNumber(scalar_value));
   3018       return;
   3019     }
   3020 
   3021     case kDouble: {
   3022       if (double_value().is_hole_nan()) {
   3023         value_ = isolate()->factory()->hole_nan_value();
   3024         return;
   3025       }
   3026       double scalar_value = double_value().get_scalar();
   3027       value_ = Handle<Object>(isolate()->factory()->NewNumber(scalar_value));
   3028       return;
   3029     }
   3030 
   3031     case kCapturedObject:
   3032     case kDuplicatedObject:
   3033     case kArgumentsObject:
   3034     case kInvalid:
   3035     case kTagged:
   3036     case kBoolBit:
   3037       FATAL("internal error: unexpected materialization.");
   3038       break;
   3039   }
   3040 }
   3041 
   3042 
   3043 bool TranslatedValue::IsMaterializedObject() const {
   3044   switch (kind()) {
   3045     case kCapturedObject:
   3046     case kDuplicatedObject:
   3047     case kArgumentsObject:
   3048       return true;
   3049     default:
   3050       return false;
   3051   }
   3052 }
   3053 
   3054 bool TranslatedValue::IsMaterializableByDebugger() const {
   3055   // At the moment, we only allow materialization of doubles.
   3056   return (kind() == kDouble);
   3057 }
   3058 
   3059 int TranslatedValue::GetChildrenCount() const {
   3060   if (kind() == kCapturedObject || kind() == kArgumentsObject) {
   3061     return object_length();
   3062   } else {
   3063     return 0;
   3064   }
   3065 }
   3066 
   3067 
   3068 uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) {
   3069   Address address = fp + slot_offset;
   3070 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
   3071   return Memory::uint32_at(address + kIntSize);
   3072 #else
   3073   return Memory::uint32_at(address);
   3074 #endif
   3075 }
   3076 
   3077 Float32 TranslatedState::GetFloatSlot(Address fp, int slot_offset) {
   3078   return Float32::FromBits(GetUInt32Slot(fp, slot_offset));
   3079 }
   3080 
   3081 Float64 TranslatedState::GetDoubleSlot(Address fp, int slot_offset) {
   3082   return Float64::FromBits(Memory::uint64_at(fp + slot_offset));
   3083 }
   3084 
   3085 void TranslatedValue::Handlify() {
   3086   if (kind() == kTagged) {
   3087     value_ = Handle<Object>(raw_literal(), isolate());
   3088     raw_literal_ = nullptr;
   3089   }
   3090 }
   3091 
   3092 
   3093 TranslatedFrame TranslatedFrame::JSFrame(BailoutId node_id,
   3094                                          SharedFunctionInfo* shared_info,
   3095                                          int height) {
   3096   TranslatedFrame frame(kFunction, shared_info->GetIsolate(), shared_info,
   3097                         height);
   3098   frame.node_id_ = node_id;
   3099   return frame;
   3100 }
   3101 
   3102 
   3103 TranslatedFrame TranslatedFrame::InterpretedFrame(
   3104     BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) {
   3105   TranslatedFrame frame(kInterpretedFunction, shared_info->GetIsolate(),
   3106                         shared_info, height);
   3107   frame.node_id_ = bytecode_offset;
   3108   return frame;
   3109 }
   3110 
   3111 
   3112 TranslatedFrame TranslatedFrame::AccessorFrame(
   3113     Kind kind, SharedFunctionInfo* shared_info) {
   3114   DCHECK(kind == kSetter || kind == kGetter);
   3115   return TranslatedFrame(kind, shared_info->GetIsolate(), shared_info);
   3116 }
   3117 
   3118 
   3119 TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame(
   3120     SharedFunctionInfo* shared_info, int height) {
   3121   return TranslatedFrame(kArgumentsAdaptor, shared_info->GetIsolate(),
   3122                          shared_info, height);
   3123 }
   3124 
   3125 TranslatedFrame TranslatedFrame::TailCallerFrame(
   3126     SharedFunctionInfo* shared_info) {
   3127   return TranslatedFrame(kTailCallerFunction, shared_info->GetIsolate(),
   3128                          shared_info, 0);
   3129 }
   3130 
   3131 TranslatedFrame TranslatedFrame::ConstructStubFrame(
   3132     BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
   3133   TranslatedFrame frame(kConstructStub, shared_info->GetIsolate(), shared_info,
   3134                         height);
   3135   frame.node_id_ = bailout_id;
   3136   return frame;
   3137 }
   3138 
   3139 
   3140 int TranslatedFrame::GetValueCount() {
   3141   switch (kind()) {
   3142     case kFunction: {
   3143       int parameter_count =
   3144           raw_shared_info_->internal_formal_parameter_count() + 1;
   3145       // + 1 for function.
   3146       return height_ + parameter_count + 1;
   3147     }
   3148 
   3149     case kInterpretedFunction: {
   3150       int parameter_count =
   3151           raw_shared_info_->internal_formal_parameter_count() + 1;
   3152       // + 2 for function and context.
   3153       return height_ + parameter_count + 2;
   3154     }
   3155 
   3156     case kGetter:
   3157       return 2;  // Function and receiver.
   3158 
   3159     case kSetter:
   3160       return 3;  // Function, receiver and the value to set.
   3161 
   3162     case kArgumentsAdaptor:
   3163     case kConstructStub:
   3164       return 1 + height_;
   3165 
   3166     case kTailCallerFunction:
   3167       return 1;  // Function.
   3168 
   3169     case kCompiledStub:
   3170       return height_;
   3171 
   3172     case kInvalid:
   3173       UNREACHABLE();
   3174       break;
   3175   }
   3176   UNREACHABLE();
   3177   return -1;
   3178 }
   3179 
   3180 
   3181 void TranslatedFrame::Handlify() {
   3182   if (raw_shared_info_ != nullptr) {
   3183     shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_);
   3184     raw_shared_info_ = nullptr;
   3185   }
   3186   for (auto& value : values_) {
   3187     value.Handlify();
   3188   }
   3189 }
   3190 
   3191 
   3192 TranslatedFrame TranslatedState::CreateNextTranslatedFrame(
   3193     TranslationIterator* iterator, FixedArray* literal_array, Address fp,
   3194     FILE* trace_file) {
   3195   Translation::Opcode opcode =
   3196       static_cast<Translation::Opcode>(iterator->Next());
   3197   switch (opcode) {
   3198     case Translation::JS_FRAME: {
   3199       BailoutId node_id = BailoutId(iterator->Next());
   3200       SharedFunctionInfo* shared_info =
   3201           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3202       int height = iterator->Next();
   3203       if (trace_file != nullptr) {
   3204         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3205         PrintF(trace_file, "  reading input frame %s", name.get());
   3206         int arg_count = shared_info->internal_formal_parameter_count() + 1;
   3207         PrintF(trace_file, " => node=%d, args=%d, height=%d; inputs:\n",
   3208                node_id.ToInt(), arg_count, height);
   3209       }
   3210       return TranslatedFrame::JSFrame(node_id, shared_info, height);
   3211     }
   3212 
   3213     case Translation::INTERPRETED_FRAME: {
   3214       BailoutId bytecode_offset = BailoutId(iterator->Next());
   3215       SharedFunctionInfo* shared_info =
   3216           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3217       int height = iterator->Next();
   3218       if (trace_file != nullptr) {
   3219         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3220         PrintF(trace_file, "  reading input frame %s", name.get());
   3221         int arg_count = shared_info->internal_formal_parameter_count() + 1;
   3222         PrintF(trace_file,
   3223                " => bytecode_offset=%d, args=%d, height=%d; inputs:\n",
   3224                bytecode_offset.ToInt(), arg_count, height);
   3225       }
   3226       return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info,
   3227                                                height);
   3228     }
   3229 
   3230     case Translation::ARGUMENTS_ADAPTOR_FRAME: {
   3231       SharedFunctionInfo* shared_info =
   3232           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3233       int height = iterator->Next();
   3234       if (trace_file != nullptr) {
   3235         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3236         PrintF(trace_file, "  reading arguments adaptor frame %s", name.get());
   3237         PrintF(trace_file, " => height=%d; inputs:\n", height);
   3238       }
   3239       return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height);
   3240     }
   3241 
   3242     case Translation::TAIL_CALLER_FRAME: {
   3243       SharedFunctionInfo* shared_info =
   3244           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3245       if (trace_file != nullptr) {
   3246         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3247         PrintF(trace_file, "  reading tail caller frame marker %s\n",
   3248                name.get());
   3249       }
   3250       return TranslatedFrame::TailCallerFrame(shared_info);
   3251     }
   3252 
   3253     case Translation::CONSTRUCT_STUB_FRAME: {
   3254       BailoutId bailout_id = BailoutId(iterator->Next());
   3255       SharedFunctionInfo* shared_info =
   3256           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3257       int height = iterator->Next();
   3258       if (trace_file != nullptr) {
   3259         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3260         PrintF(trace_file, "  reading construct stub frame %s", name.get());
   3261         PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
   3262                bailout_id.ToInt(), height);
   3263       }
   3264       return TranslatedFrame::ConstructStubFrame(bailout_id, shared_info,
   3265                                                  height);
   3266     }
   3267 
   3268     case Translation::GETTER_STUB_FRAME: {
   3269       SharedFunctionInfo* shared_info =
   3270           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3271       if (trace_file != nullptr) {
   3272         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3273         PrintF(trace_file, "  reading getter frame %s; inputs:\n", name.get());
   3274       }
   3275       return TranslatedFrame::AccessorFrame(TranslatedFrame::kGetter,
   3276                                             shared_info);
   3277     }
   3278 
   3279     case Translation::SETTER_STUB_FRAME: {
   3280       SharedFunctionInfo* shared_info =
   3281           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
   3282       if (trace_file != nullptr) {
   3283         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
   3284         PrintF(trace_file, "  reading setter frame %s; inputs:\n", name.get());
   3285       }
   3286       return TranslatedFrame::AccessorFrame(TranslatedFrame::kSetter,
   3287                                             shared_info);
   3288     }
   3289 
   3290     case Translation::COMPILED_STUB_FRAME: {
   3291       int height = iterator->Next();
   3292       if (trace_file != nullptr) {
   3293         PrintF(trace_file,
   3294                "  reading compiler stub frame => height=%d; inputs:\n", height);
   3295       }
   3296       return TranslatedFrame::CompiledStubFrame(height,
   3297                                                 literal_array->GetIsolate());
   3298     }
   3299 
   3300     case Translation::BEGIN:
   3301     case Translation::DUPLICATED_OBJECT:
   3302     case Translation::ARGUMENTS_OBJECT:
   3303     case Translation::CAPTURED_OBJECT:
   3304     case Translation::REGISTER:
   3305     case Translation::INT32_REGISTER:
   3306     case Translation::UINT32_REGISTER:
   3307     case Translation::BOOL_REGISTER:
   3308     case Translation::FLOAT_REGISTER:
   3309     case Translation::DOUBLE_REGISTER:
   3310     case Translation::STACK_SLOT:
   3311     case Translation::INT32_STACK_SLOT:
   3312     case Translation::UINT32_STACK_SLOT:
   3313     case Translation::BOOL_STACK_SLOT:
   3314     case Translation::FLOAT_STACK_SLOT:
   3315     case Translation::DOUBLE_STACK_SLOT:
   3316     case Translation::LITERAL:
   3317       break;
   3318   }
   3319   FATAL("We should never get here - unexpected deopt info.");
   3320   return TranslatedFrame::InvalidFrame();
   3321 }
   3322 
   3323 
   3324 // static
   3325 void TranslatedFrame::AdvanceIterator(
   3326     std::deque<TranslatedValue>::iterator* iter) {
   3327   int values_to_skip = 1;
   3328   while (values_to_skip > 0) {
   3329     // Consume the current element.
   3330     values_to_skip--;
   3331     // Add all the children.
   3332     values_to_skip += (*iter)->GetChildrenCount();
   3333 
   3334     (*iter)++;
   3335   }
   3336 }
   3337 
   3338 
   3339 // We can't intermix stack decoding and allocations because
   3340 // deoptimization infrastracture is not GC safe.
   3341 // Thus we build a temporary structure in malloced space.
   3342 TranslatedValue TranslatedState::CreateNextTranslatedValue(
   3343     int frame_index, int value_index, TranslationIterator* iterator,
   3344     FixedArray* literal_array, Address fp, RegisterValues* registers,
   3345     FILE* trace_file) {
   3346   disasm::NameConverter converter;
   3347 
   3348   Translation::Opcode opcode =
   3349       static_cast<Translation::Opcode>(iterator->Next());
   3350   switch (opcode) {
   3351     case Translation::BEGIN:
   3352     case Translation::JS_FRAME:
   3353     case Translation::INTERPRETED_FRAME:
   3354     case Translation::ARGUMENTS_ADAPTOR_FRAME:
   3355     case Translation::TAIL_CALLER_FRAME:
   3356     case Translation::CONSTRUCT_STUB_FRAME:
   3357     case Translation::GETTER_STUB_FRAME:
   3358     case Translation::SETTER_STUB_FRAME:
   3359     case Translation::COMPILED_STUB_FRAME:
   3360       // Peeled off before getting here.
   3361       break;
   3362 
   3363     case Translation::DUPLICATED_OBJECT: {
   3364       int object_id = iterator->Next();
   3365       if (trace_file != nullptr) {
   3366         PrintF(trace_file, "duplicated object #%d", object_id);
   3367       }
   3368       object_positions_.push_back(object_positions_[object_id]);
   3369       return TranslatedValue::NewDuplicateObject(this, object_id);
   3370     }
   3371 
   3372     case Translation::ARGUMENTS_OBJECT: {
   3373       int arg_count = iterator->Next();
   3374       int object_index = static_cast<int>(object_positions_.size());
   3375       if (trace_file != nullptr) {
   3376         PrintF(trace_file, "argumets object #%d (length = %d)", object_index,
   3377                arg_count);
   3378       }
   3379       object_positions_.push_back({frame_index, value_index});
   3380       return TranslatedValue::NewArgumentsObject(this, arg_count, object_index);
   3381     }
   3382 
   3383     case Translation::CAPTURED_OBJECT: {
   3384       int field_count = iterator->Next();
   3385       int object_index = static_cast<int>(object_positions_.size());
   3386       if (trace_file != nullptr) {
   3387         PrintF(trace_file, "captured object #%d (length = %d)", object_index,
   3388                field_count);
   3389       }
   3390       object_positions_.push_back({frame_index, value_index});
   3391       return TranslatedValue::NewDeferredObject(this, field_count,
   3392                                                 object_index);
   3393     }
   3394 
   3395     case Translation::REGISTER: {
   3396       int input_reg = iterator->Next();
   3397       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3398       intptr_t value = registers->GetRegister(input_reg);
   3399       if (trace_file != nullptr) {
   3400         PrintF(trace_file, "0x%08" V8PRIxPTR " ; %s ", value,
   3401                converter.NameOfCPURegister(input_reg));
   3402         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
   3403       }
   3404       return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
   3405     }
   3406 
   3407     case Translation::INT32_REGISTER: {
   3408       int input_reg = iterator->Next();
   3409       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3410       intptr_t value = registers->GetRegister(input_reg);
   3411       if (trace_file != nullptr) {
   3412         PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value,
   3413                converter.NameOfCPURegister(input_reg));
   3414       }
   3415       return TranslatedValue::NewInt32(this, static_cast<int32_t>(value));
   3416     }
   3417 
   3418     case Translation::UINT32_REGISTER: {
   3419       int input_reg = iterator->Next();
   3420       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3421       intptr_t value = registers->GetRegister(input_reg);
   3422       if (trace_file != nullptr) {
   3423         PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value,
   3424                converter.NameOfCPURegister(input_reg));
   3425         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
   3426       }
   3427       return TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value));
   3428     }
   3429 
   3430     case Translation::BOOL_REGISTER: {
   3431       int input_reg = iterator->Next();
   3432       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3433       intptr_t value = registers->GetRegister(input_reg);
   3434       if (trace_file != nullptr) {
   3435         PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value,
   3436                converter.NameOfCPURegister(input_reg));
   3437       }
   3438       return TranslatedValue::NewBool(this, static_cast<uint32_t>(value));
   3439     }
   3440 
   3441     case Translation::FLOAT_REGISTER: {
   3442       int input_reg = iterator->Next();
   3443       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3444       Float32 value = registers->GetFloatRegister(input_reg);
   3445       if (trace_file != nullptr) {
   3446         PrintF(trace_file, "%e ; %s (float)", value.get_scalar(),
   3447                RegisterConfiguration::Crankshaft()->GetFloatRegisterName(
   3448                    input_reg));
   3449       }
   3450       return TranslatedValue::NewFloat(this, value);
   3451     }
   3452 
   3453     case Translation::DOUBLE_REGISTER: {
   3454       int input_reg = iterator->Next();
   3455       if (registers == nullptr) return TranslatedValue::NewInvalid(this);
   3456       Float64 value = registers->GetDoubleRegister(input_reg);
   3457       if (trace_file != nullptr) {
   3458         PrintF(trace_file, "%e ; %s (double)", value.get_scalar(),
   3459                RegisterConfiguration::Crankshaft()->GetDoubleRegisterName(
   3460                    input_reg));
   3461       }
   3462       return TranslatedValue::NewDouble(this, value);
   3463     }
   3464 
   3465     case Translation::STACK_SLOT: {
   3466       int slot_offset =
   3467           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3468       intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
   3469       if (trace_file != nullptr) {
   3470         PrintF(trace_file, "0x%08" V8PRIxPTR " ; [fp %c %d] ", value,
   3471                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
   3472         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
   3473       }
   3474       return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
   3475     }
   3476 
   3477     case Translation::INT32_STACK_SLOT: {
   3478       int slot_offset =
   3479           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3480       uint32_t value = GetUInt32Slot(fp, slot_offset);
   3481       if (trace_file != nullptr) {
   3482         PrintF(trace_file, "%d ; (int) [fp %c %d] ",
   3483                static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+',
   3484                std::abs(slot_offset));
   3485       }
   3486       return TranslatedValue::NewInt32(this, value);
   3487     }
   3488 
   3489     case Translation::UINT32_STACK_SLOT: {
   3490       int slot_offset =
   3491           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3492       uint32_t value = GetUInt32Slot(fp, slot_offset);
   3493       if (trace_file != nullptr) {
   3494         PrintF(trace_file, "%u ; (uint) [fp %c %d] ", value,
   3495                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
   3496       }
   3497       return TranslatedValue::NewUInt32(this, value);
   3498     }
   3499 
   3500     case Translation::BOOL_STACK_SLOT: {
   3501       int slot_offset =
   3502           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3503       uint32_t value = GetUInt32Slot(fp, slot_offset);
   3504       if (trace_file != nullptr) {
   3505         PrintF(trace_file, "%u ; (bool) [fp %c %d] ", value,
   3506                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
   3507       }
   3508       return TranslatedValue::NewBool(this, value);
   3509     }
   3510 
   3511     case Translation::FLOAT_STACK_SLOT: {
   3512       int slot_offset =
   3513           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3514       Float32 value = GetFloatSlot(fp, slot_offset);
   3515       if (trace_file != nullptr) {
   3516         PrintF(trace_file, "%e ; (float) [fp %c %d] ", value.get_scalar(),
   3517                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
   3518       }
   3519       return TranslatedValue::NewFloat(this, value);
   3520     }
   3521 
   3522     case Translation::DOUBLE_STACK_SLOT: {
   3523       int slot_offset =
   3524           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
   3525       Float64 value = GetDoubleSlot(fp, slot_offset);
   3526       if (trace_file != nullptr) {
   3527         PrintF(trace_file, "%e ; (double) [fp %c %d] ", value.get_scalar(),
   3528                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
   3529       }
   3530       return TranslatedValue::NewDouble(this, value);
   3531     }
   3532 
   3533     case Translation::LITERAL: {
   3534       int literal_index = iterator->Next();
   3535       Object* value = literal_array->get(literal_index);
   3536       if (trace_file != nullptr) {
   3537         PrintF(trace_file, "0x%08" V8PRIxPTR " ; (literal %d) ",
   3538                reinterpret_cast<intptr_t>(value), literal_index);
   3539         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
   3540       }
   3541 
   3542       return TranslatedValue::NewTagged(this, value);
   3543     }
   3544   }
   3545 
   3546   FATAL("We should never get here - unexpected deopt info.");
   3547   return TranslatedValue(nullptr, TranslatedValue::kInvalid);
   3548 }
   3549 
   3550 
   3551 TranslatedState::TranslatedState(JavaScriptFrame* frame)
   3552     : isolate_(nullptr),
   3553       stack_frame_pointer_(nullptr),
   3554       has_adapted_arguments_(false) {
   3555   int deopt_index = Safepoint::kNoDeoptimizationIndex;
   3556   DeoptimizationInputData* data =
   3557       static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
   3558   DCHECK(data != nullptr && deopt_index != Safepoint::kNoDeoptimizationIndex);
   3559   TranslationIterator it(data->TranslationByteArray(),
   3560                          data->TranslationIndex(deopt_index)->value());
   3561   Init(frame->fp(), &it, data->LiteralArray(), nullptr /* registers */,
   3562        nullptr /* trace file */);
   3563 }
   3564 
   3565 
   3566 TranslatedState::TranslatedState()
   3567     : isolate_(nullptr),
   3568       stack_frame_pointer_(nullptr),
   3569       has_adapted_arguments_(false) {}
   3570 
   3571 
   3572 void TranslatedState::Init(Address input_frame_pointer,
   3573                            TranslationIterator* iterator,
   3574                            FixedArray* literal_array, RegisterValues* registers,
   3575                            FILE* trace_file) {
   3576   DCHECK(frames_.empty());
   3577 
   3578   isolate_ = literal_array->GetIsolate();
   3579   // Read out the 'header' translation.
   3580   Translation::Opcode opcode =
   3581       static_cast<Translation::Opcode>(iterator->Next());
   3582   CHECK(opcode == Translation::BEGIN);
   3583 
   3584   int count = iterator->Next();
   3585   iterator->Next();  // Drop JS frames count.
   3586 
   3587   frames_.reserve(count);
   3588 
   3589   std::stack<int> nested_counts;
   3590 
   3591   // Read the frames
   3592   for (int i = 0; i < count; i++) {
   3593     // Read the frame descriptor.
   3594     frames_.push_back(CreateNextTranslatedFrame(
   3595         iterator, literal_array, input_frame_pointer, trace_file));
   3596     TranslatedFrame& frame = frames_.back();
   3597 
   3598     // Read the values.
   3599     int values_to_process = frame.GetValueCount();
   3600     while (values_to_process > 0 || !nested_counts.empty()) {
   3601       if (trace_file != nullptr) {
   3602         if (nested_counts.empty()) {
   3603           // For top level values, print the value number.
   3604           PrintF(trace_file, "    %3i: ",
   3605                  frame.GetValueCount() - values_to_process);
   3606         } else {
   3607           // Take care of indenting for nested values.
   3608           PrintF(trace_file, "         ");
   3609           for (size_t j = 0; j < nested_counts.size(); j++) {
   3610             PrintF(trace_file, "  ");
   3611           }
   3612         }
   3613       }
   3614 
   3615       TranslatedValue value = CreateNextTranslatedValue(
   3616           i, static_cast<int>(frame.values_.size()), iterator, literal_array,
   3617           input_frame_pointer, registers, trace_file);
   3618       frame.Add(value);
   3619 
   3620       if (trace_file != nullptr) {
   3621         PrintF(trace_file, "\n");
   3622       }
   3623 
   3624       // Update the value count and resolve the nesting.
   3625       values_to_process--;
   3626       int children_count = value.GetChildrenCount();
   3627       if (children_count > 0) {
   3628         nested_counts.push(values_to_process);
   3629         values_to_process = children_count;
   3630       } else {
   3631         while (values_to_process == 0 && !nested_counts.empty()) {
   3632           values_to_process = nested_counts.top();
   3633           nested_counts.pop();
   3634         }
   3635       }
   3636     }
   3637   }
   3638 
   3639   CHECK(!iterator->HasNext() ||
   3640         static_cast<Translation::Opcode>(iterator->Next()) ==
   3641             Translation::BEGIN);
   3642 }
   3643 
   3644 
   3645 void TranslatedState::Prepare(bool has_adapted_arguments,
   3646                               Address stack_frame_pointer) {
   3647   for (auto& frame : frames_) frame.Handlify();
   3648 
   3649   stack_frame_pointer_ = stack_frame_pointer;
   3650   has_adapted_arguments_ = has_adapted_arguments;
   3651 
   3652   UpdateFromPreviouslyMaterializedObjects();
   3653 }
   3654 
   3655 class TranslatedState::CapturedObjectMaterializer {
   3656  public:
   3657   CapturedObjectMaterializer(TranslatedState* state, int frame_index,
   3658                              int field_count)
   3659       : state_(state), frame_index_(frame_index), field_count_(field_count) {}
   3660 
   3661   Handle<Object> FieldAt(int* value_index) {
   3662     CHECK(field_count_ > 0);
   3663     --field_count_;
   3664     return state_->MaterializeAt(frame_index_, value_index);
   3665   }
   3666 
   3667   ~CapturedObjectMaterializer() { CHECK_EQ(0, field_count_); }
   3668 
   3669  private:
   3670   TranslatedState* state_;
   3671   int frame_index_;
   3672   int field_count_;
   3673 };
   3674 
   3675 Handle<Object> TranslatedState::MaterializeCapturedObjectAt(
   3676     TranslatedValue* slot, int frame_index, int* value_index) {
   3677   int length = slot->GetChildrenCount();
   3678 
   3679   CapturedObjectMaterializer materializer(this, frame_index, length);
   3680 
   3681   Handle<Object> result;
   3682   if (slot->value_.ToHandle(&result)) {
   3683     // This has been previously materialized, return the previous value.
   3684     // We still need to skip all the nested objects.
   3685     for (int i = 0; i < length; i++) {
   3686       materializer.FieldAt(value_index);
   3687     }
   3688 
   3689     return result;
   3690   }
   3691 
   3692   Handle<Object> map_object = materializer.FieldAt(value_index);
   3693   Handle<Map> map = Map::GeneralizeAllFields(Handle<Map>::cast(map_object));
   3694   switch (map->instance_type()) {
   3695     case MUTABLE_HEAP_NUMBER_TYPE:
   3696     case HEAP_NUMBER_TYPE: {
   3697       // Reuse the HeapNumber value directly as it is already properly
   3698       // tagged and skip materializing the HeapNumber explicitly.
   3699       Handle<Object> object = materializer.FieldAt(value_index);
   3700       slot->value_ = object;
   3701       // On 32-bit architectures, there is an extra slot there because
   3702       // the escape analysis calculates the number of slots as
   3703       // object-size/pointer-size. To account for this, we read out
   3704       // any extra slots.
   3705       for (int i = 0; i < length - 2; i++) {
   3706         materializer.FieldAt(value_index);
   3707       }
   3708       return object;
   3709     }
   3710     case JS_OBJECT_TYPE:
   3711     case JS_ERROR_TYPE:
   3712     case JS_ARGUMENTS_TYPE: {
   3713       Handle<JSObject> object =
   3714           isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED);
   3715       slot->value_ = object;
   3716       Handle<Object> properties = materializer.FieldAt(value_index);
   3717       Handle<Object> elements = materializer.FieldAt(value_index);
   3718       object->set_properties(FixedArray::cast(*properties));
   3719       object->set_elements(FixedArrayBase::cast(*elements));
   3720       for (int i = 0; i < length - 3; ++i) {
   3721         Handle<Object> value = materializer.FieldAt(value_index);
   3722         FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
   3723         object->FastPropertyAtPut(index, *value);
   3724       }
   3725       return object;
   3726     }
   3727     case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
   3728     case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
   3729     case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
   3730     case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3731     case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3732     case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3733     case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3734     case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3735     case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3736     case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3737     case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3738     case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3739     case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3740     case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3741     case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3742     case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3743     case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3744     case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3745     case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
   3746     case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
   3747     case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
   3748     case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
   3749     case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
   3750     case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
   3751     case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
   3752     case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
   3753     case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
   3754     case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
   3755     case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
   3756     case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
   3757     case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
   3758     case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
   3759     case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
   3760     case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
   3761     case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE: {
   3762       Handle<JSArrayIterator> object = Handle<JSArrayIterator>::cast(
   3763           isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
   3764       slot->value_ = object;
   3765       // Initialize the index to zero to make the heap verifier happy.
   3766       object->set_index(Smi::FromInt(0));
   3767       Handle<Object> properties = materializer.FieldAt(value_index);
   3768       Handle<Object> elements = materializer.FieldAt(value_index);
   3769       Handle<Object> iterated_object = materializer.FieldAt(value_index);
   3770       Handle<Object> next_index = materializer.FieldAt(value_index);
   3771       Handle<Object> iterated_object_map = materializer.FieldAt(value_index);
   3772       object->set_properties(FixedArray::cast(*properties));
   3773       object->set_elements(FixedArrayBase::cast(*elements));
   3774       object->set_object(*iterated_object);
   3775       object->set_index(*next_index);
   3776       object->set_object_map(*iterated_object_map);
   3777       return object;
   3778     }
   3779     case JS_STRING_ITERATOR_TYPE: {
   3780       Handle<JSStringIterator> object = Handle<JSStringIterator>::cast(
   3781           isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
   3782       slot->value_ = object;
   3783       // Initialize the index to zero to make the heap verifier happy.
   3784       object->set_index(0);
   3785       Handle<Object> properties = materializer.FieldAt(value_index);
   3786       Handle<Object> elements = materializer.FieldAt(value_index);
   3787       Handle<Object> iterated_string = materializer.FieldAt(value_index);
   3788       Handle<Object> next_index = materializer.FieldAt(value_index);
   3789       object->set_properties(FixedArray::cast(*properties));
   3790       object->set_elements(FixedArrayBase::cast(*elements));
   3791       CHECK(iterated_string->IsString());
   3792       object->set_string(String::cast(*iterated_string));
   3793       CHECK(next_index->IsSmi());
   3794       object->set_index(Smi::cast(*next_index)->value());
   3795       return object;
   3796     }
   3797     case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE: {
   3798       Handle<JSAsyncFromSyncIterator> object =
   3799           Handle<JSAsyncFromSyncIterator>::cast(
   3800               isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
   3801       slot->value_ = object;
   3802       Handle<Object> properties = materializer.FieldAt(value_index);
   3803       Handle<Object> elements = materializer.FieldAt(value_index);
   3804       Handle<Object> sync_iterator = materializer.FieldAt(value_index);
   3805       object->set_properties(FixedArray::cast(*properties));
   3806       object->set_elements(FixedArrayBase::cast(*elements));
   3807       object->set_sync_iterator(JSReceiver::cast(*sync_iterator));
   3808       return object;
   3809     }
   3810     case JS_ARRAY_TYPE: {
   3811       Handle<JSArray> object = Handle<JSArray>::cast(
   3812           isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
   3813       slot->value_ = object;
   3814       Handle<Object> properties = materializer.FieldAt(value_index);
   3815       Handle<Object> elements = materializer.FieldAt(value_index);
   3816       Handle<Object> length = materializer.FieldAt(value_index);
   3817       object->set_properties(FixedArray::cast(*properties));
   3818       object->set_elements(FixedArrayBase::cast(*elements));
   3819       object->set_length(*length);
   3820       return object;
   3821     }
   3822     case JS_FUNCTION_TYPE: {
   3823       Handle<SharedFunctionInfo> temporary_shared =
   3824           isolate_->factory()->NewSharedFunctionInfo(
   3825               isolate_->factory()->empty_string(), MaybeHandle<Code>(), false);
   3826       Handle<JSFunction> object =
   3827           isolate_->factory()->NewFunctionFromSharedFunctionInfo(
   3828               map, temporary_shared, isolate_->factory()->undefined_value(),
   3829               NOT_TENURED);
   3830       slot->value_ = object;
   3831       Handle<Object> properties = materializer.FieldAt(value_index);
   3832       Handle<Object> elements = materializer.FieldAt(value_index);
   3833       Handle<Object> prototype = materializer.FieldAt(value_index);
   3834       Handle<Object> shared = materializer.FieldAt(value_index);
   3835       Handle<Object> context = materializer.FieldAt(value_index);
   3836       Handle<Object> vector_cell = materializer.FieldAt(value_index);
   3837       Handle<Object> entry = materializer.FieldAt(value_index);
   3838       Handle<Object> next_link = materializer.FieldAt(value_index);
   3839       object->ReplaceCode(*isolate_->builtins()->CompileLazy());
   3840       object->set_map(*map);
   3841       object->set_properties(FixedArray::cast(*properties));
   3842       object->set_elements(FixedArrayBase::cast(*elements));
   3843       object->set_prototype_or_initial_map(*prototype);
   3844       object->set_shared(SharedFunctionInfo::cast(*shared));
   3845       object->set_context(Context::cast(*context));
   3846       object->set_feedback_vector_cell(Cell::cast(*vector_cell));
   3847       CHECK(entry->IsNumber());  // Entry to compile lazy stub.
   3848       CHECK(next_link->IsUndefined(isolate_));
   3849       return object;
   3850     }
   3851     case CONS_STRING_TYPE: {
   3852       Handle<ConsString> object = Handle<ConsString>::cast(
   3853           isolate_->factory()
   3854               ->NewConsString(isolate_->factory()->undefined_string(),
   3855                               isolate_->factory()->undefined_string())
   3856               .ToHandleChecked());
   3857       slot->value_ = object;
   3858       Handle<Object> hash = materializer.FieldAt(value_index);
   3859       Handle<Object> length = materializer.FieldAt(value_index);
   3860       Handle<Object> first = materializer.FieldAt(value_index);
   3861       Handle<Object> second = materializer.FieldAt(value_index);
   3862       object->set_map(*map);
   3863       object->set_length(Smi::cast(*length)->value());
   3864       object->set_first(String::cast(*first));
   3865       object->set_second(String::cast(*second));
   3866       CHECK(hash->IsNumber());  // The {Name::kEmptyHashField} value.
   3867       return object;
   3868     }
   3869     case CONTEXT_EXTENSION_TYPE: {
   3870       Handle<ContextExtension> object =
   3871           isolate_->factory()->NewContextExtension(
   3872               isolate_->factory()->NewScopeInfo(1),
   3873               isolate_->factory()->undefined_value());
   3874       slot->value_ = object;
   3875       Handle<Object> scope_info = materializer.FieldAt(value_index);
   3876       Handle<Object> extension = materializer.FieldAt(value_index);
   3877       object->set_scope_info(ScopeInfo::cast(*scope_info));
   3878       object->set_extension(*extension);
   3879       return object;
   3880     }
   3881     case FIXED_ARRAY_TYPE: {
   3882       Handle<Object> lengthObject = materializer.FieldAt(value_index);
   3883       int32_t length = 0;
   3884       CHECK(lengthObject->ToInt32(&length));
   3885       Handle<FixedArray> object = isolate_->factory()->NewFixedArray(length);
   3886       // We need to set the map, because the fixed array we are
   3887       // materializing could be a context or an arguments object,
   3888       // in which case we must retain that information.
   3889       object->set_map(*map);
   3890       slot->value_ = object;
   3891       for (int i = 0; i < length; ++i) {
   3892         Handle<Object> value = materializer.FieldAt(value_index);
   3893         object->set(i, *value);
   3894       }
   3895       return object;
   3896     }
   3897     case FIXED_DOUBLE_ARRAY_TYPE: {
   3898       DCHECK_EQ(*map, isolate_->heap()->fixed_double_array_map());
   3899       Handle<Object> lengthObject = materializer.FieldAt(value_index);
   3900       int32_t length = 0;
   3901       CHECK(lengthObject->ToInt32(&length));
   3902       Handle<FixedArrayBase> object =
   3903           isolate_->factory()->NewFixedDoubleArray(length);
   3904       slot->value_ = object;
   3905       if (length > 0) {
   3906         Handle<FixedDoubleArray> double_array =
   3907             Handle<FixedDoubleArray>::cast(object);
   3908         for (int i = 0; i < length; ++i) {
   3909           Handle<Object> value = materializer.FieldAt(value_index);
   3910           CHECK(value->IsNumber());
   3911           if (value.is_identical_to(isolate_->factory()->hole_nan_value())) {
   3912             double_array->set_the_hole(isolate_, i);
   3913           } else {
   3914             double_array->set(i, value->Number());
   3915           }
   3916         }
   3917       }
   3918       return object;
   3919     }
   3920     case STRING_TYPE:
   3921     case ONE_BYTE_STRING_TYPE:
   3922     case CONS_ONE_BYTE_STRING_TYPE:
   3923     case SLICED_STRING_TYPE:
   3924     case SLICED_ONE_BYTE_STRING_TYPE:
   3925     case EXTERNAL_STRING_TYPE:
   3926     case EXTERNAL_ONE_BYTE_STRING_TYPE:
   3927     case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
   3928     case SHORT_EXTERNAL_STRING_TYPE:
   3929     case SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE:
   3930     case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
   3931     case THIN_STRING_TYPE:
   3932     case THIN_ONE_BYTE_STRING_TYPE:
   3933     case INTERNALIZED_STRING_TYPE:
   3934     case ONE_BYTE_INTERNALIZED_STRING_TYPE:
   3935     case EXTERNAL_INTERNALIZED_STRING_TYPE:
   3936     case EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
   3937     case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
   3938     case SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE:
   3939     case SHORT_EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
   3940     case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
   3941     case SYMBOL_TYPE:
   3942     case ODDBALL_TYPE:
   3943     case JS_GLOBAL_OBJECT_TYPE:
   3944     case JS_GLOBAL_PROXY_TYPE:
   3945     case JS_API_OBJECT_TYPE:
   3946     case JS_SPECIAL_API_OBJECT_TYPE:
   3947     case JS_VALUE_TYPE:
   3948     case JS_MESSAGE_OBJECT_TYPE:
   3949     case JS_DATE_TYPE:
   3950     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
   3951     case JS_GENERATOR_OBJECT_TYPE:
   3952     case JS_MODULE_NAMESPACE_TYPE:
   3953     case JS_ARRAY_BUFFER_TYPE:
   3954     case JS_REGEXP_TYPE:
   3955     case JS_TYPED_ARRAY_TYPE:
   3956     case JS_DATA_VIEW_TYPE:
   3957     case JS_SET_TYPE:
   3958     case JS_MAP_TYPE:
   3959     case JS_SET_ITERATOR_TYPE:
   3960     case JS_MAP_ITERATOR_TYPE:
   3961     case JS_WEAK_MAP_TYPE:
   3962     case JS_WEAK_SET_TYPE:
   3963     case JS_PROMISE_CAPABILITY_TYPE:
   3964     case JS_PROMISE_TYPE:
   3965     case JS_BOUND_FUNCTION_TYPE:
   3966     case JS_PROXY_TYPE:
   3967     case MAP_TYPE:
   3968     case ALLOCATION_SITE_TYPE:
   3969     case ACCESSOR_INFO_TYPE:
   3970     case SHARED_FUNCTION_INFO_TYPE:
   3971     case FUNCTION_TEMPLATE_INFO_TYPE:
   3972     case ACCESSOR_PAIR_TYPE:
   3973     case BYTE_ARRAY_TYPE:
   3974     case BYTECODE_ARRAY_TYPE:
   3975     case TRANSITION_ARRAY_TYPE:
   3976     case FOREIGN_TYPE:
   3977     case SCRIPT_TYPE:
   3978     case CODE_TYPE:
   3979     case PROPERTY_CELL_TYPE:
   3980     case MODULE_TYPE:
   3981     case MODULE_INFO_ENTRY_TYPE:
   3982     case FREE_SPACE_TYPE:
   3983 #define FIXED_TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
   3984   case FIXED_##TYPE##_ARRAY_TYPE:
   3985       TYPED_ARRAYS(FIXED_TYPED_ARRAY_CASE)
   3986 #undef FIXED_TYPED_ARRAY_CASE
   3987     case FILLER_TYPE:
   3988     case ACCESS_CHECK_INFO_TYPE:
   3989     case INTERCEPTOR_INFO_TYPE:
   3990     case CALL_HANDLER_INFO_TYPE:
   3991     case OBJECT_TEMPLATE_INFO_TYPE:
   3992     case ALLOCATION_MEMENTO_TYPE:
   3993     case TYPE_FEEDBACK_INFO_TYPE:
   3994     case ALIASED_ARGUMENTS_ENTRY_TYPE:
   3995     case PROMISE_RESOLVE_THENABLE_JOB_INFO_TYPE:
   3996     case PROMISE_REACTION_JOB_INFO_TYPE:
   3997     case DEBUG_INFO_TYPE:
   3998     case BREAK_POINT_INFO_TYPE:
   3999     case CELL_TYPE:
   4000     case WEAK_CELL_TYPE:
   4001     case PROTOTYPE_INFO_TYPE:
   4002     case TUPLE2_TYPE:
   4003     case TUPLE3_TYPE:
   4004     case CONSTANT_ELEMENTS_PAIR_TYPE:
   4005       OFStream os(stderr);
   4006       os << "[couldn't handle instance type " << map->instance_type() << "]"
   4007          << std::endl;
   4008       UNREACHABLE();
   4009       break;
   4010   }
   4011   UNREACHABLE();
   4012   return Handle<Object>::null();
   4013 }
   4014 
   4015 Handle<Object> TranslatedState::MaterializeAt(int frame_index,
   4016                                               int* value_index) {
   4017   CHECK_LT(static_cast<size_t>(frame_index), frames().size());
   4018   TranslatedFrame* frame = &(frames_[frame_index]);
   4019   CHECK_LT(static_cast<size_t>(*value_index), frame->values_.size());
   4020 
   4021   TranslatedValue* slot = &(frame->values_[*value_index]);
   4022   (*value_index)++;
   4023 
   4024   switch (slot->kind()) {
   4025     case TranslatedValue::kTagged:
   4026     case TranslatedValue::kInt32:
   4027     case TranslatedValue::kUInt32:
   4028     case TranslatedValue::kBoolBit:
   4029     case TranslatedValue::kFloat:
   4030     case TranslatedValue::kDouble: {
   4031       slot->MaterializeSimple();
   4032       Handle<Object> value = slot->GetValue();
   4033       if (value->IsMutableHeapNumber()) {
   4034         HeapNumber::cast(*value)->set_map(isolate()->heap()->heap_number_map());
   4035       }
   4036       return value;
   4037     }
   4038 
   4039     case TranslatedValue::kArgumentsObject: {
   4040       int length = slot->GetChildrenCount();
   4041       Handle<JSObject> arguments;
   4042       if (GetAdaptedArguments(&arguments, frame_index)) {
   4043         // Store the materialized object and consume the nested values.
   4044         for (int i = 0; i < length; ++i) {
   4045           MaterializeAt(frame_index, value_index);
   4046         }
   4047       } else {
   4048         Handle<JSFunction> function =
   4049             Handle<JSFunction>::cast(frame->front().GetValue());
   4050         arguments = isolate_->factory()->NewArgumentsObject(function, length);
   4051         Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
   4052         DCHECK_EQ(array->length(), length);
   4053         arguments->set_elements(*array);
   4054         for (int i = 0; i < length; ++i) {
   4055           Handle<Object> value = MaterializeAt(frame_index, value_index);
   4056           array->set(i, *value);
   4057         }
   4058       }
   4059       slot->value_ = arguments;
   4060       return arguments;
   4061     }
   4062     case TranslatedValue::kCapturedObject: {
   4063       // The map must be a tagged object.
   4064       CHECK(frame->values_[*value_index].kind() == TranslatedValue::kTagged);
   4065       CHECK(frame->values_[*value_index].GetValue()->IsMap());
   4066       return MaterializeCapturedObjectAt(slot, frame_index, value_index);
   4067     }
   4068     case TranslatedValue::kDuplicatedObject: {
   4069       int object_index = slot->object_index();
   4070       TranslatedState::ObjectPosition pos = object_positions_[object_index];
   4071 
   4072       // Make sure the duplicate is refering to a previous object.
   4073       CHECK(pos.frame_index_ < frame_index ||
   4074             (pos.frame_index_ == frame_index &&
   4075              pos.value_index_ < *value_index - 1));
   4076 
   4077       Handle<Object> object =
   4078           frames_[pos.frame_index_].values_[pos.value_index_].GetValue();
   4079 
   4080       // The object should have a (non-sentinel) value.
   4081       CHECK(!object.is_null() &&
   4082             !object.is_identical_to(isolate_->factory()->arguments_marker()));
   4083 
   4084       slot->value_ = object;
   4085       return object;
   4086     }
   4087 
   4088     case TranslatedValue::kInvalid:
   4089       UNREACHABLE();
   4090       break;
   4091   }
   4092 
   4093   FATAL("We should never get here - unexpected deopt slot kind.");
   4094   return Handle<Object>::null();
   4095 }
   4096 
   4097 Handle<Object> TranslatedState::MaterializeObjectAt(int object_index) {
   4098   CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
   4099   TranslatedState::ObjectPosition pos = object_positions_[object_index];
   4100   return MaterializeAt(pos.frame_index_, &(pos.value_index_));
   4101 }
   4102 
   4103 bool TranslatedState::GetAdaptedArguments(Handle<JSObject>* result,
   4104                                           int frame_index) {
   4105   if (frame_index == 0) {
   4106     // Top level frame -> we need to go to the parent frame on the stack.
   4107     if (!has_adapted_arguments_) return false;
   4108 
   4109     // This is top level frame, so we need to go to the stack to get
   4110     // this function's argument. (Note that this relies on not inlining
   4111     // recursive functions!)
   4112     Handle<JSFunction> function =
   4113         Handle<JSFunction>::cast(frames_[frame_index].front().GetValue());
   4114     *result = Accessors::FunctionGetArguments(function);
   4115     return true;
   4116   } else {
   4117     TranslatedFrame* previous_frame = &(frames_[frame_index]);
   4118     if (previous_frame->kind() != TranslatedFrame::kArgumentsAdaptor) {
   4119       return false;
   4120     }
   4121     // We get the adapted arguments from the parent translation.
   4122     int length = previous_frame->height();
   4123     Handle<JSFunction> function =
   4124         Handle<JSFunction>::cast(previous_frame->front().GetValue());
   4125     Handle<JSObject> arguments =
   4126         isolate_->factory()->NewArgumentsObject(function, length);
   4127     Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
   4128     arguments->set_elements(*array);
   4129     TranslatedFrame::iterator arg_iterator = previous_frame->begin();
   4130     arg_iterator++;  // Skip function.
   4131     for (int i = 0; i < length; ++i) {
   4132       Handle<Object> value = arg_iterator->GetValue();
   4133       array->set(i, *value);
   4134       arg_iterator++;
   4135     }
   4136     CHECK(arg_iterator == previous_frame->end());
   4137     *result = arguments;
   4138     return true;
   4139   }
   4140 }
   4141 
   4142 TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
   4143     int jsframe_index, int* args_count) {
   4144   for (size_t i = 0; i < frames_.size(); i++) {
   4145     if (frames_[i].kind() == TranslatedFrame::kFunction ||
   4146         frames_[i].kind() == TranslatedFrame::kInterpretedFunction) {
   4147       if (jsframe_index > 0) {
   4148         jsframe_index--;
   4149       } else {
   4150         // We have the JS function frame, now check if it has arguments
   4151         // adaptor.
   4152         if (i > 0 &&
   4153             frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) {
   4154           *args_count = frames_[i - 1].height();
   4155           return &(frames_[i - 1]);
   4156         }
   4157         *args_count =
   4158             frames_[i].shared_info()->internal_formal_parameter_count() + 1;
   4159         return &(frames_[i]);
   4160       }
   4161     }
   4162   }
   4163   return nullptr;
   4164 }
   4165 
   4166 void TranslatedState::StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame) {
   4167   MaterializedObjectStore* materialized_store =
   4168       isolate_->materialized_object_store();
   4169   Handle<FixedArray> previously_materialized_objects =
   4170       materialized_store->Get(stack_frame_pointer_);
   4171 
   4172   Handle<Object> marker = isolate_->factory()->arguments_marker();
   4173 
   4174   int length = static_cast<int>(object_positions_.size());
   4175   bool new_store = false;
   4176   if (previously_materialized_objects.is_null()) {
   4177     previously_materialized_objects =
   4178         isolate_->factory()->NewFixedArray(length);
   4179     for (int i = 0; i < length; i++) {
   4180       previously_materialized_objects->set(i, *marker);
   4181     }
   4182     new_store = true;
   4183   }
   4184 
   4185   CHECK_EQ(length, previously_materialized_objects->length());
   4186 
   4187   bool value_changed = false;
   4188   for (int i = 0; i < length; i++) {
   4189     TranslatedState::ObjectPosition pos = object_positions_[i];
   4190     TranslatedValue* value_info =
   4191         &(frames_[pos.frame_index_].values_[pos.value_index_]);
   4192 
   4193     CHECK(value_info->IsMaterializedObject());
   4194 
   4195     Handle<Object> value(value_info->GetRawValue(), isolate_);
   4196 
   4197     if (!value.is_identical_to(marker)) {
   4198       if (previously_materialized_objects->get(i) == *marker) {
   4199         previously_materialized_objects->set(i, *value);
   4200         value_changed = true;
   4201       } else {
   4202         CHECK(previously_materialized_objects->get(i) == *value);
   4203       }
   4204     }
   4205   }
   4206   if (new_store && value_changed) {
   4207     materialized_store->Set(stack_frame_pointer_,
   4208                             previously_materialized_objects);
   4209     CHECK(frames_[0].kind() == TranslatedFrame::kFunction ||
   4210           frames_[0].kind() == TranslatedFrame::kInterpretedFunction ||
   4211           frames_[0].kind() == TranslatedFrame::kTailCallerFunction);
   4212     CHECK_EQ(frame->function(), frames_[0].front().GetRawValue());
   4213     Deoptimizer::DeoptimizeFunction(frame->function(), frame->LookupCode());
   4214   }
   4215 }
   4216 
   4217 void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
   4218   MaterializedObjectStore* materialized_store =
   4219       isolate_->materialized_object_store();
   4220   Handle<FixedArray> previously_materialized_objects =
   4221       materialized_store->Get(stack_frame_pointer_);
   4222 
   4223   // If we have no previously materialized objects, there is nothing to do.
   4224   if (previously_materialized_objects.is_null()) return;
   4225 
   4226   Handle<Object> marker = isolate_->factory()->arguments_marker();
   4227 
   4228   int length = static_cast<int>(object_positions_.size());
   4229   CHECK_EQ(length, previously_materialized_objects->length());
   4230 
   4231   for (int i = 0; i < length; i++) {
   4232     // For a previously materialized objects, inject their value into the
   4233     // translated values.
   4234     if (previously_materialized_objects->get(i) != *marker) {
   4235       TranslatedState::ObjectPosition pos = object_positions_[i];
   4236       TranslatedValue* value_info =
   4237           &(frames_[pos.frame_index_].values_[pos.value_index_]);
   4238       CHECK(value_info->IsMaterializedObject());
   4239 
   4240       value_info->value_ =
   4241           Handle<Object>(previously_materialized_objects->get(i), isolate_);
   4242     }
   4243   }
   4244 }
   4245 
   4246 }  // namespace internal
   4247 }  // namespace v8
   4248