Home | History | Annotate | Download | only in src
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "accessors.h"
     31 #include "codegen.h"
     32 #include "deoptimizer.h"
     33 #include "disasm.h"
     34 #include "full-codegen.h"
     35 #include "global-handles.h"
     36 #include "macro-assembler.h"
     37 #include "prettyprinter.h"
     38 
     39 
     40 namespace v8 {
     41 namespace internal {
     42 
     43 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
     44   return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
     45                                   OS::CommitPageSize(),
     46 #if defined(__native_client__)
     47   // The Native Client port of V8 uses an interpreter,
     48   // so code pages don't need PROT_EXEC.
     49                                   NOT_EXECUTABLE,
     50 #else
     51                                   EXECUTABLE,
     52 #endif
     53                                   NULL);
     54 }
     55 
     56 
     57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
     58     : allocator_(allocator),
     59       current_(NULL),
     60 #ifdef ENABLE_DEBUGGER_SUPPORT
     61       deoptimized_frame_info_(NULL),
     62 #endif
     63       deoptimizing_code_list_(NULL) {
     64   for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
     65     deopt_entry_code_entries_[i] = -1;
     66     deopt_entry_code_[i] = AllocateCodeChunk(allocator);
     67   }
     68 }
     69 
     70 
     71 DeoptimizerData::~DeoptimizerData() {
     72   for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
     73     allocator_->Free(deopt_entry_code_[i]);
     74     deopt_entry_code_[i] = NULL;
     75   }
     76 
     77   DeoptimizingCodeListNode* current = deoptimizing_code_list_;
     78   while (current != NULL) {
     79     DeoptimizingCodeListNode* prev = current;
     80     current = current->next();
     81     delete prev;
     82   }
     83   deoptimizing_code_list_ = NULL;
     84 }
     85 
     86 
     87 #ifdef ENABLE_DEBUGGER_SUPPORT
     88 void DeoptimizerData::Iterate(ObjectVisitor* v) {
     89   if (deoptimized_frame_info_ != NULL) {
     90     deoptimized_frame_info_->Iterate(v);
     91   }
     92 }
     93 #endif
     94 
     95 
     96 Code* DeoptimizerData::FindDeoptimizingCode(Address addr) {
     97   for (DeoptimizingCodeListNode* node = deoptimizing_code_list_;
     98        node != NULL;
     99        node = node->next()) {
    100     if (node->code()->contains(addr)) return *node->code();
    101   }
    102   return NULL;
    103 }
    104 
    105 
    106 void DeoptimizerData::RemoveDeoptimizingCode(Code* code) {
    107   for (DeoptimizingCodeListNode *prev = NULL, *cur = deoptimizing_code_list_;
    108        cur != NULL;
    109        prev = cur, cur = cur->next()) {
    110     if (*cur->code() == code) {
    111       if (prev == NULL) {
    112         deoptimizing_code_list_ = cur->next();
    113       } else {
    114         prev->set_next(cur->next());
    115       }
    116       delete cur;
    117       return;
    118     }
    119   }
    120   // Deoptimizing code is removed through weak callback. Each object is expected
    121   // to be removed once and only once.
    122   UNREACHABLE();
    123 }
    124 
    125 
    126 // We rely on this function not causing a GC.  It is called from generated code
    127 // without having a real stack frame in place.
    128 Deoptimizer* Deoptimizer::New(JSFunction* function,
    129                               BailoutType type,
    130                               unsigned bailout_id,
    131                               Address from,
    132                               int fp_to_sp_delta,
    133                               Isolate* isolate) {
    134   Deoptimizer* deoptimizer = new Deoptimizer(isolate,
    135                                              function,
    136                                              type,
    137                                              bailout_id,
    138                                              from,
    139                                              fp_to_sp_delta,
    140                                              NULL);
    141   ASSERT(isolate->deoptimizer_data()->current_ == NULL);
    142   isolate->deoptimizer_data()->current_ = deoptimizer;
    143   return deoptimizer;
    144 }
    145 
    146 
    147 // No larger than 2K on all platforms
    148 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
    149 
    150 
    151 size_t Deoptimizer::GetMaxDeoptTableSize() {
    152   int entries_size =
    153       Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
    154   int commit_page_size = static_cast<int>(OS::CommitPageSize());
    155   int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
    156                     commit_page_size) + 1;
    157   return static_cast<size_t>(commit_page_size * page_count);
    158 }
    159 
    160 
    161 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
    162   Deoptimizer* result = isolate->deoptimizer_data()->current_;
    163   ASSERT(result != NULL);
    164   result->DeleteFrameDescriptions();
    165   isolate->deoptimizer_data()->current_ = NULL;
    166   return result;
    167 }
    168 
    169 
    170 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
    171   if (jsframe_index == 0) return 0;
    172 
    173   int frame_index = 0;
    174   while (jsframe_index >= 0) {
    175     FrameDescription* frame = output_[frame_index];
    176     if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
    177       jsframe_index--;
    178     }
    179     frame_index++;
    180   }
    181 
    182   return frame_index - 1;
    183 }
    184 
    185 
    186 #ifdef ENABLE_DEBUGGER_SUPPORT
    187 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
    188     JavaScriptFrame* frame,
    189     int jsframe_index,
    190     Isolate* isolate) {
    191   ASSERT(frame->is_optimized());
    192   ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
    193 
    194   // Get the function and code from the frame.
    195   JSFunction* function = frame->function();
    196   Code* code = frame->LookupCode();
    197 
    198   // Locate the deoptimization point in the code. As we are at a call the
    199   // return address must be at a place in the code with deoptimization support.
    200   SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
    201   int deoptimization_index = safepoint_entry.deoptimization_index();
    202   ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex);
    203 
    204   // Always use the actual stack slots when calculating the fp to sp
    205   // delta adding two for the function and context.
    206   unsigned stack_slots = code->stack_slots();
    207   unsigned fp_to_sp_delta = ((stack_slots + 2) * kPointerSize);
    208 
    209   Deoptimizer* deoptimizer = new Deoptimizer(isolate,
    210                                              function,
    211                                              Deoptimizer::DEBUGGER,
    212                                              deoptimization_index,
    213                                              frame->pc(),
    214                                              fp_to_sp_delta,
    215                                              code);
    216   Address tos = frame->fp() - fp_to_sp_delta;
    217   deoptimizer->FillInputFrame(tos, frame);
    218 
    219   // Calculate the output frames.
    220   Deoptimizer::ComputeOutputFrames(deoptimizer);
    221 
    222   // Create the GC safe output frame information and register it for GC
    223   // handling.
    224   ASSERT_LT(jsframe_index, deoptimizer->jsframe_count());
    225 
    226   // Convert JS frame index into frame index.
    227   int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
    228 
    229   bool has_arguments_adaptor =
    230       frame_index > 0 &&
    231       deoptimizer->output_[frame_index - 1]->GetFrameType() ==
    232       StackFrame::ARGUMENTS_ADAPTOR;
    233 
    234   int construct_offset = has_arguments_adaptor ? 2 : 1;
    235   bool has_construct_stub =
    236       frame_index >= construct_offset &&
    237       deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
    238       StackFrame::CONSTRUCT;
    239 
    240   DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
    241                                                         frame_index,
    242                                                         has_arguments_adaptor,
    243                                                         has_construct_stub);
    244   isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
    245 
    246   // Get the "simulated" top and size for the requested frame.
    247   FrameDescription* parameters_frame =
    248       deoptimizer->output_[
    249           has_arguments_adaptor ? (frame_index - 1) : frame_index];
    250 
    251   uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
    252   Address parameters_top = reinterpret_cast<Address>(
    253       parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
    254                                     parameters_size));
    255 
    256   uint32_t expressions_size = info->expression_count() * kPointerSize;
    257   Address expressions_top = reinterpret_cast<Address>(
    258       deoptimizer->output_[frame_index]->GetTop());
    259 
    260   // Done with the GC-unsafe frame descriptions. This re-enables allocation.
    261   deoptimizer->DeleteFrameDescriptions();
    262 
    263   // Allocate a heap number for the doubles belonging to this frame.
    264   deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
    265       parameters_top, parameters_size, expressions_top, expressions_size, info);
    266 
    267   // Finished using the deoptimizer instance.
    268   delete deoptimizer;
    269 
    270   return info;
    271 }
    272 
    273 
    274 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
    275                                                  Isolate* isolate) {
    276   ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
    277   delete info;
    278   isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
    279 }
    280 #endif
    281 
    282 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
    283                                                 int count,
    284                                                 BailoutType type) {
    285   TableEntryGenerator generator(masm, type, count);
    286   generator.Generate();
    287 }
    288 
    289 
    290 void Deoptimizer::VisitAllOptimizedFunctionsForContext(
    291     Context* context, OptimizedFunctionVisitor* visitor) {
    292   Isolate* isolate = context->GetIsolate();
    293   Zone zone(isolate);
    294   DisallowHeapAllocation no_allocation;
    295 
    296   ASSERT(context->IsNativeContext());
    297 
    298   visitor->EnterContext(context);
    299 
    300   // Create a snapshot of the optimized functions list. This is needed because
    301   // visitors might remove more than one link from the list at once.
    302   ZoneList<JSFunction*> snapshot(1, &zone);
    303   Object* element = context->OptimizedFunctionsListHead();
    304   while (!element->IsUndefined()) {
    305     JSFunction* element_function = JSFunction::cast(element);
    306     snapshot.Add(element_function, &zone);
    307     element = element_function->next_function_link();
    308   }
    309 
    310   // Run through the snapshot of optimized functions and visit them.
    311   for (int i = 0; i < snapshot.length(); ++i) {
    312     visitor->VisitFunction(snapshot.at(i));
    313   }
    314 
    315   visitor->LeaveContext(context);
    316 }
    317 
    318 
    319 void Deoptimizer::VisitAllOptimizedFunctions(
    320     Isolate* isolate,
    321     OptimizedFunctionVisitor* visitor) {
    322   DisallowHeapAllocation no_allocation;
    323 
    324   // Run through the list of all native contexts and deoptimize.
    325   Object* context = isolate->heap()->native_contexts_list();
    326   while (!context->IsUndefined()) {
    327     VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
    328     context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
    329   }
    330 }
    331 
    332 
    333 // Removes the functions selected by the given filter from the optimized
    334 // function list of the given context and adds their code to the list of
    335 // code objects to be deoptimized.
    336 static void SelectCodeToDeoptimize(Context* context,
    337                                    OptimizedFunctionFilter* filter,
    338                                    ZoneList<Code*>* codes,
    339                                    Zone* zone,
    340                                    Object* undefined) {
    341   DisallowHeapAllocation no_allocation;
    342   Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
    343   Object* remainder_head = undefined;
    344   Object* remainder_tail = undefined;
    345 
    346   // TODO(titzer): rewrite to not modify unselected functions.
    347   while (current != undefined) {
    348     JSFunction* function = JSFunction::cast(current);
    349     current = function->next_function_link();
    350     if (filter->TakeFunction(function)) {
    351       // Extract this function from the context's list and remember the code.
    352       Code* code = function->code();
    353       ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
    354       if (code->marked_for_deoptimization()) {
    355         ASSERT(codes->Contains(code));
    356       } else {
    357         code->set_marked_for_deoptimization(true);
    358         codes->Add(code, zone);
    359       }
    360       SharedFunctionInfo* shared = function->shared();
    361       // Replace the function's code with the shared code.
    362       function->set_code(shared->code());
    363       // Evict the code from the optimized code map.
    364       shared->EvictFromOptimizedCodeMap(code, "deoptimized function");
    365       // Remove the function from the optimized functions list.
    366       function->set_next_function_link(undefined);
    367 
    368       if (FLAG_trace_deopt) {
    369         PrintF("[forced deoptimization: ");
    370         function->PrintName();
    371         PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
    372       }
    373     } else {
    374       // Don't select this function; link it back into the list.
    375       if (remainder_head == undefined) {
    376         remainder_head = function;
    377       } else {
    378         JSFunction::cast(remainder_tail)->set_next_function_link(function);
    379       }
    380       remainder_tail = function;
    381     }
    382   }
    383   if (remainder_tail != undefined) {
    384     JSFunction::cast(remainder_tail)->set_next_function_link(undefined);
    385   }
    386   context->set(Context::OPTIMIZED_FUNCTIONS_LIST, remainder_head);
    387 }
    388 
    389 
    390 class DeoptimizeAllFilter : public OptimizedFunctionFilter {
    391  public:
    392   virtual bool TakeFunction(JSFunction* function) {
    393     return true;
    394   }
    395 };
    396 
    397 
    398 class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter {
    399  public:
    400   explicit DeoptimizeWithMatchingCodeFilter(Code* code) : code_(code) {}
    401   virtual bool TakeFunction(JSFunction* function) {
    402     return function->code() == code_;
    403   }
    404  private:
    405   Code* code_;
    406 };
    407 
    408 
    409 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
    410  public:
    411   virtual bool TakeFunction(JSFunction* function) {
    412     return function->code()->marked_for_deoptimization();
    413   }
    414 };
    415 
    416 
    417 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
    418   DisallowHeapAllocation no_allocation;
    419 
    420   if (FLAG_trace_deopt) {
    421     PrintF("[deoptimize all contexts]\n");
    422   }
    423 
    424   DeoptimizeAllFilter filter;
    425   DeoptimizeAllFunctionsWith(isolate, &filter);
    426 }
    427 
    428 
    429 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
    430   DisallowHeapAllocation no_allocation;
    431   DeoptimizeAllFilter filter;
    432   if (object->IsJSGlobalProxy()) {
    433     Object* proto = object->GetPrototype();
    434     ASSERT(proto->IsJSGlobalObject());
    435     DeoptimizeAllFunctionsForContext(
    436         GlobalObject::cast(proto)->native_context(), &filter);
    437   } else if (object->IsGlobalObject()) {
    438     DeoptimizeAllFunctionsForContext(
    439         GlobalObject::cast(object)->native_context(), &filter);
    440   }
    441 }
    442 
    443 
    444 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
    445   Code* code = function->code();
    446   if (code->kind() != Code::OPTIMIZED_FUNCTION) return;
    447   DeoptimizeWithMatchingCodeFilter filter(code);
    448   DeoptimizeAllFunctionsForContext(
    449       function->context()->native_context(), &filter);
    450 }
    451 
    452 
    453 void Deoptimizer::DeoptimizeAllFunctionsForContext(
    454     Context* context, OptimizedFunctionFilter* filter) {
    455   ASSERT(context->IsNativeContext());
    456   Isolate* isolate = context->GetIsolate();
    457   Object* undefined = isolate->heap()->undefined_value();
    458   Zone zone(isolate);
    459   ZoneList<Code*> codes(4, &zone);
    460   SelectCodeToDeoptimize(context, filter, &codes, &zone, undefined);
    461   for (int i = 0; i < codes.length(); i++) {
    462     DeoptimizeCode(isolate, codes.at(i));
    463   }
    464 }
    465 
    466 
    467 void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate,
    468                                              OptimizedFunctionFilter* filter) {
    469   DisallowHeapAllocation no_allocation;
    470 
    471   // Run through the list of all native contexts and deoptimize.
    472   Object* context = isolate->heap()->native_contexts_list();
    473   while (!context->IsUndefined()) {
    474     DeoptimizeAllFunctionsForContext(Context::cast(context), filter);
    475     context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
    476   }
    477 }
    478 
    479 
    480 void Deoptimizer::DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes) {
    481   if (codes->length() == 0) return;  // Nothing to do.
    482 
    483   // Mark the code; any functions refering to this code will be selected.
    484   for (int i = 0; i < codes->length(); i++) {
    485     ASSERT(!codes->at(i)->marked_for_deoptimization());
    486     codes->at(i)->set_marked_for_deoptimization(true);
    487   }
    488 
    489   // For all contexts, remove optimized functions that refer to the selected
    490   // code from the optimized function lists.
    491   Object* undefined = isolate->heap()->undefined_value();
    492   Zone zone(isolate);
    493   Object* list = isolate->heap()->native_contexts_list();
    494   DeoptimizeMarkedCodeFilter filter;
    495   while (!list->IsUndefined()) {
    496     Context* context = Context::cast(list);
    497     // Note that selecting code unlinks the functions that refer to it.
    498     SelectCodeToDeoptimize(context, &filter, codes, &zone, undefined);
    499     list = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
    500   }
    501 
    502   // Now deoptimize all the code.
    503   for (int i = 0; i < codes->length(); i++) {
    504     DeoptimizeCode(isolate, codes->at(i));
    505   }
    506 }
    507 
    508 
    509 void Deoptimizer::DeoptimizeCode(Isolate* isolate, Code* code) {
    510   HandleScope scope(isolate);
    511   DisallowHeapAllocation nha;
    512 
    513   // Do platform-specific patching of the optimized code.
    514   PatchCodeForDeoptimization(isolate, code);
    515 
    516   // Add the deoptimizing code to the list.
    517   DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
    518   DeoptimizerData* data = isolate->deoptimizer_data();
    519   node->set_next(data->deoptimizing_code_list_);
    520   data->deoptimizing_code_list_ = node;
    521 
    522   // We might be in the middle of incremental marking with compaction.
    523   // Tell collector to treat this code object in a special way and
    524   // ignore all slots that might have been recorded on it.
    525   isolate->heap()->mark_compact_collector()->InvalidateCode(code);
    526 }
    527 
    528 
    529 void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate,
    530                                             v8::Persistent<v8::Value>* obj,
    531                                             void* parameter) {
    532   DeoptimizingCodeListNode* node =
    533       reinterpret_cast<DeoptimizingCodeListNode*>(parameter);
    534   DeoptimizerData* data =
    535       reinterpret_cast<Isolate*>(isolate)->deoptimizer_data();
    536   data->RemoveDeoptimizingCode(*node->code());
    537 #ifdef DEBUG
    538   for (DeoptimizingCodeListNode* current = data->deoptimizing_code_list_;
    539        current != NULL;
    540        current = current->next()) {
    541     ASSERT(current != node);
    542   }
    543 #endif
    544 }
    545 
    546 
    547 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
    548   deoptimizer->DoComputeOutputFrames();
    549 }
    550 
    551 
    552 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
    553                                   StackFrame::Type frame_type) {
    554   switch (deopt_type) {
    555     case EAGER:
    556     case SOFT:
    557     case LAZY:
    558     case DEBUGGER:
    559       return (frame_type == StackFrame::STUB)
    560           ? FLAG_trace_stub_failures
    561           : FLAG_trace_deopt;
    562     case OSR:
    563       return FLAG_trace_osr;
    564   }
    565   UNREACHABLE();
    566   return false;
    567 }
    568 
    569 
    570 const char* Deoptimizer::MessageFor(BailoutType type) {
    571   switch (type) {
    572     case EAGER: return "eager";
    573     case SOFT: return "soft";
    574     case LAZY: return "lazy";
    575     case DEBUGGER: return "debugger";
    576     case OSR: return "OSR";
    577   }
    578   UNREACHABLE();
    579   return NULL;
    580 }
    581 
    582 
    583 Deoptimizer::Deoptimizer(Isolate* isolate,
    584                          JSFunction* function,
    585                          BailoutType type,
    586                          unsigned bailout_id,
    587                          Address from,
    588                          int fp_to_sp_delta,
    589                          Code* optimized_code)
    590     : isolate_(isolate),
    591       function_(function),
    592       bailout_id_(bailout_id),
    593       bailout_type_(type),
    594       from_(from),
    595       fp_to_sp_delta_(fp_to_sp_delta),
    596       has_alignment_padding_(0),
    597       input_(NULL),
    598       output_count_(0),
    599       jsframe_count_(0),
    600       output_(NULL),
    601       deferred_objects_tagged_values_(0),
    602       deferred_objects_double_values_(0),
    603       deferred_objects_(0),
    604       deferred_heap_numbers_(0),
    605       jsframe_functions_(0),
    606       jsframe_has_adapted_arguments_(0),
    607       materialized_values_(NULL),
    608       materialized_objects_(NULL),
    609       materialization_value_index_(0),
    610       materialization_object_index_(0),
    611       trace_(false) {
    612   // For COMPILED_STUBs called from builtins, the function pointer is a SMI
    613   // indicating an internal frame.
    614   if (function->IsSmi()) {
    615     function = NULL;
    616   }
    617   ASSERT(from != NULL);
    618   if (function != NULL && function->IsOptimized()) {
    619     function->shared()->increment_deopt_count();
    620     if (bailout_type_ == Deoptimizer::SOFT) {
    621       isolate->counters()->soft_deopts_executed()->Increment();
    622       // Soft deopts shouldn't count against the overall re-optimization count
    623       // that can eventually lead to disabling optimization for a function.
    624       int opt_count = function->shared()->opt_count();
    625       if (opt_count > 0) opt_count--;
    626       function->shared()->set_opt_count(opt_count);
    627     }
    628   }
    629   compiled_code_ = FindOptimizedCode(function, optimized_code);
    630   StackFrame::Type frame_type = function == NULL
    631       ? StackFrame::STUB
    632       : StackFrame::JAVA_SCRIPT;
    633   trace_ = TraceEnabledFor(type, frame_type);
    634 #ifdef DEBUG
    635   CHECK(AllowHeapAllocation::IsAllowed());
    636   disallow_heap_allocation_ = new DisallowHeapAllocation();
    637 #endif  // DEBUG
    638   unsigned size = ComputeInputFrameSize();
    639   input_ = new(size) FrameDescription(size, function);
    640   input_->SetFrameType(frame_type);
    641 }
    642 
    643 
    644 Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
    645                                      Code* optimized_code) {
    646   switch (bailout_type_) {
    647     case Deoptimizer::SOFT:
    648     case Deoptimizer::EAGER:
    649     case Deoptimizer::LAZY: {
    650       Code* compiled_code =
    651           isolate_->deoptimizer_data()->FindDeoptimizingCode(from_);
    652       return (compiled_code == NULL)
    653           ? static_cast<Code*>(isolate_->FindCodeObject(from_))
    654           : compiled_code;
    655     }
    656     case Deoptimizer::OSR: {
    657       // The function has already been optimized and we're transitioning
    658       // from the unoptimized shared version to the optimized one in the
    659       // function. The return address (from_) points to unoptimized code.
    660       Code* compiled_code = function->code();
    661       ASSERT(compiled_code->kind() == Code::OPTIMIZED_FUNCTION);
    662       ASSERT(!compiled_code->contains(from_));
    663       return compiled_code;
    664     }
    665     case Deoptimizer::DEBUGGER:
    666       ASSERT(optimized_code->contains(from_));
    667       return optimized_code;
    668   }
    669   UNREACHABLE();
    670   return NULL;
    671 }
    672 
    673 
    674 void Deoptimizer::PrintFunctionName() {
    675   if (function_->IsJSFunction()) {
    676     function_->PrintName();
    677   } else {
    678     PrintF("%s", Code::Kind2String(compiled_code_->kind()));
    679   }
    680 }
    681 
    682 
    683 Deoptimizer::~Deoptimizer() {
    684   ASSERT(input_ == NULL && output_ == NULL);
    685   ASSERT(disallow_heap_allocation_ == NULL);
    686 }
    687 
    688 
    689 void Deoptimizer::DeleteFrameDescriptions() {
    690   delete input_;
    691   for (int i = 0; i < output_count_; ++i) {
    692     if (output_[i] != input_) delete output_[i];
    693   }
    694   delete[] output_;
    695   input_ = NULL;
    696   output_ = NULL;
    697 #ifdef DEBUG
    698   CHECK(!AllowHeapAllocation::IsAllowed());
    699   CHECK(disallow_heap_allocation_ != NULL);
    700   delete disallow_heap_allocation_;
    701   disallow_heap_allocation_ = NULL;
    702 #endif  // DEBUG
    703 }
    704 
    705 
    706 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
    707                                             int id,
    708                                             BailoutType type,
    709                                             GetEntryMode mode) {
    710   ASSERT(id >= 0);
    711   if (id >= kMaxNumberOfEntries) return NULL;
    712   if (mode == ENSURE_ENTRY_CODE) {
    713     EnsureCodeForDeoptimizationEntry(isolate, type, id);
    714   } else {
    715     ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
    716   }
    717   DeoptimizerData* data = isolate->deoptimizer_data();
    718   ASSERT(type < kBailoutTypesWithCodeEntry);
    719   MemoryChunk* base = data->deopt_entry_code_[type];
    720   return base->area_start() + (id * table_entry_size_);
    721 }
    722 
    723 
    724 int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
    725                                      Address addr,
    726                                      BailoutType type) {
    727   DeoptimizerData* data = isolate->deoptimizer_data();
    728   MemoryChunk* base = data->deopt_entry_code_[type];
    729   Address start = base->area_start();
    730   if (base == NULL ||
    731       addr < start ||
    732       addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
    733     return kNotDeoptimizationEntry;
    734   }
    735   ASSERT_EQ(0,
    736             static_cast<int>(addr - start) % table_entry_size_);
    737   return static_cast<int>(addr - start) / table_entry_size_;
    738 }
    739 
    740 
    741 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
    742                                BailoutId id,
    743                                SharedFunctionInfo* shared) {
    744   // TODO(kasperl): For now, we do a simple linear search for the PC
    745   // offset associated with the given node id. This should probably be
    746   // changed to a binary search.
    747   int length = data->DeoptPoints();
    748   for (int i = 0; i < length; i++) {
    749     if (data->AstId(i) == id) {
    750       return data->PcAndState(i)->value();
    751     }
    752   }
    753   PrintF("[couldn't find pc offset for node=%d]\n", id.ToInt());
    754   PrintF("[method: %s]\n", *shared->DebugName()->ToCString());
    755   // Print the source code if available.
    756   HeapStringAllocator string_allocator;
    757   StringStream stream(&string_allocator);
    758   shared->SourceCodePrint(&stream, -1);
    759   PrintF("[source:\n%s\n]", *stream.ToCString());
    760 
    761   FATAL("unable to find pc offset during deoptimization");
    762   return -1;
    763 }
    764 
    765 
    766 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
    767   int length = 0;
    768   DeoptimizingCodeListNode* node =
    769       isolate->deoptimizer_data()->deoptimizing_code_list_;
    770   while (node != NULL) {
    771     length++;
    772     node = node->next();
    773   }
    774   return length;
    775 }
    776 
    777 
    778 // We rely on this function not causing a GC.  It is called from generated code
    779 // without having a real stack frame in place.
    780 void Deoptimizer::DoComputeOutputFrames() {
    781   if (bailout_type_ == OSR) {
    782     DoComputeOsrOutputFrame();
    783     return;
    784   }
    785 
    786   // Print some helpful diagnostic information.
    787   int64_t start = OS::Ticks();
    788   if (FLAG_log_timer_events &&
    789       compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
    790     LOG(isolate(), CodeDeoptEvent(compiled_code_));
    791   }
    792   if (trace_) {
    793     PrintF("[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ",
    794            MessageFor(bailout_type_),
    795            reinterpret_cast<intptr_t>(function_));
    796     PrintFunctionName();
    797     PrintF(" @%d, FP to SP delta: %d]\n", bailout_id_, fp_to_sp_delta_);
    798     if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
    799       compiled_code_->PrintDeoptLocation(bailout_id_);
    800     }
    801   }
    802 
    803   // Determine basic deoptimization information.  The optimized frame is
    804   // described by the input data.
    805   DeoptimizationInputData* input_data =
    806       DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
    807   BailoutId node_id = input_data->AstId(bailout_id_);
    808   ByteArray* translations = input_data->TranslationByteArray();
    809   unsigned translation_index =
    810       input_data->TranslationIndex(bailout_id_)->value();
    811 
    812   // Do the input frame to output frame(s) translation.
    813   TranslationIterator iterator(translations, translation_index);
    814   Translation::Opcode opcode =
    815       static_cast<Translation::Opcode>(iterator.Next());
    816   ASSERT(Translation::BEGIN == opcode);
    817   USE(opcode);
    818   // Read the number of output frames and allocate an array for their
    819   // descriptions.
    820   int count = iterator.Next();
    821   iterator.Next();  // Drop JS frames count.
    822   ASSERT(output_ == NULL);
    823   output_ = new FrameDescription*[count];
    824   for (int i = 0; i < count; ++i) {
    825     output_[i] = NULL;
    826   }
    827   output_count_ = count;
    828 
    829   // Translate each output frame.
    830   for (int i = 0; i < count; ++i) {
    831     // Read the ast node id, function, and frame height for this output frame.
    832     Translation::Opcode opcode =
    833         static_cast<Translation::Opcode>(iterator.Next());
    834     switch (opcode) {
    835       case Translation::JS_FRAME:
    836         DoComputeJSFrame(&iterator, i);
    837         jsframe_count_++;
    838         break;
    839       case Translation::ARGUMENTS_ADAPTOR_FRAME:
    840         DoComputeArgumentsAdaptorFrame(&iterator, i);
    841         break;
    842       case Translation::CONSTRUCT_STUB_FRAME:
    843         DoComputeConstructStubFrame(&iterator, i);
    844         break;
    845       case Translation::GETTER_STUB_FRAME:
    846         DoComputeAccessorStubFrame(&iterator, i, false);
    847         break;
    848       case Translation::SETTER_STUB_FRAME:
    849         DoComputeAccessorStubFrame(&iterator, i, true);
    850         break;
    851       case Translation::COMPILED_STUB_FRAME:
    852         DoComputeCompiledStubFrame(&iterator, i);
    853         break;
    854       case Translation::BEGIN:
    855       case Translation::REGISTER:
    856       case Translation::INT32_REGISTER:
    857       case Translation::UINT32_REGISTER:
    858       case Translation::DOUBLE_REGISTER:
    859       case Translation::STACK_SLOT:
    860       case Translation::INT32_STACK_SLOT:
    861       case Translation::UINT32_STACK_SLOT:
    862       case Translation::DOUBLE_STACK_SLOT:
    863       case Translation::LITERAL:
    864       case Translation::ARGUMENTS_OBJECT:
    865       default:
    866         UNREACHABLE();
    867         break;
    868     }
    869   }
    870 
    871   // Print some helpful diagnostic information.
    872   if (trace_) {
    873     double ms = static_cast<double>(OS::Ticks() - start) / 1000;
    874     int index = output_count_ - 1;  // Index of the topmost frame.
    875     JSFunction* function = output_[index]->GetFunction();
    876     PrintF("[deoptimizing (%s): end 0x%08" V8PRIxPTR " ",
    877            MessageFor(bailout_type_),
    878            reinterpret_cast<intptr_t>(function));
    879     PrintFunctionName();
    880     PrintF(" @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
    881            " took %0.3f ms]\n",
    882            bailout_id_,
    883            node_id.ToInt(),
    884            output_[index]->GetPc(),
    885            FullCodeGenerator::State2String(
    886                static_cast<FullCodeGenerator::State>(
    887                    output_[index]->GetState()->value())),
    888            has_alignment_padding_ ? "with padding" : "no padding",
    889            ms);
    890   }
    891 }
    892 
    893 
    894 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
    895                                    int frame_index) {
    896   BailoutId node_id = BailoutId(iterator->Next());
    897   JSFunction* function;
    898   if (frame_index != 0) {
    899     function = JSFunction::cast(ComputeLiteral(iterator->Next()));
    900   } else {
    901     int closure_id = iterator->Next();
    902     USE(closure_id);
    903     ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
    904     function = function_;
    905   }
    906   unsigned height = iterator->Next();
    907   unsigned height_in_bytes = height * kPointerSize;
    908   if (trace_) {
    909     PrintF("  translating ");
    910     function->PrintName();
    911     PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
    912   }
    913 
    914   // The 'fixed' part of the frame consists of the incoming parameters and
    915   // the part described by JavaScriptFrameConstants.
    916   unsigned fixed_frame_size = ComputeFixedSize(function);
    917   unsigned input_frame_size = input_->GetFrameSize();
    918   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
    919 
    920   // Allocate and store the output frame description.
    921   FrameDescription* output_frame =
    922       new(output_frame_size) FrameDescription(output_frame_size, function);
    923   output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
    924 
    925   bool is_bottommost = (0 == frame_index);
    926   bool is_topmost = (output_count_ - 1 == frame_index);
    927   ASSERT(frame_index >= 0 && frame_index < output_count_);
    928   ASSERT(output_[frame_index] == NULL);
    929   output_[frame_index] = output_frame;
    930 
    931   // The top address for the bottommost output frame can be computed from
    932   // the input frame pointer and the output frame's height.  For all
    933   // subsequent output frames, it can be computed from the previous one's
    934   // top address and the current frame's size.
    935   Register fp_reg = JavaScriptFrame::fp_register();
    936   intptr_t top_address;
    937   if (is_bottommost) {
    938     // Determine whether the input frame contains alignment padding.
    939     has_alignment_padding_ = HasAlignmentPadding(function) ? 1 : 0;
    940     // 2 = context and function in the frame.
    941     // If the optimized frame had alignment padding, adjust the frame pointer
    942     // to point to the new position of the old frame pointer after padding
    943     // is removed. Subtract 2 * kPointerSize for the context and function slots.
    944     top_address = input_->GetRegister(fp_reg.code()) - (2 * kPointerSize) -
    945         height_in_bytes + has_alignment_padding_ * kPointerSize;
    946   } else {
    947     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
    948   }
    949   output_frame->SetTop(top_address);
    950 
    951   // Compute the incoming parameter translation.
    952   int parameter_count = function->shared()->formal_parameter_count() + 1;
    953   unsigned output_offset = output_frame_size;
    954   unsigned input_offset = input_frame_size;
    955   for (int i = 0; i < parameter_count; ++i) {
    956     output_offset -= kPointerSize;
    957     DoTranslateCommand(iterator, frame_index, output_offset);
    958   }
    959   input_offset -= (parameter_count * kPointerSize);
    960 
    961   // There are no translation commands for the caller's pc and fp, the
    962   // context, and the function.  Synthesize their values and set them up
    963   // explicitly.
    964   //
    965   // The caller's pc for the bottommost output frame is the same as in the
    966   // input frame.  For all subsequent output frames, it can be read from the
    967   // previous one.  This frame's pc can be computed from the non-optimized
    968   // function code and AST id of the bailout.
    969   output_offset -= kPCOnStackSize;
    970   input_offset -= kPCOnStackSize;
    971   intptr_t value;
    972   if (is_bottommost) {
    973     value = input_->GetFrameSlot(input_offset);
    974   } else {
    975     value = output_[frame_index - 1]->GetPc();
    976   }
    977   output_frame->SetCallerPc(output_offset, value);
    978   if (trace_) {
    979     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
    980            V8PRIxPTR  " ; caller's pc\n",
    981            top_address + output_offset, output_offset, value);
    982   }
    983 
    984   // The caller's frame pointer for the bottommost output frame is the same
    985   // as in the input frame.  For all subsequent output frames, it can be
    986   // read from the previous one.  Also compute and set this frame's frame
    987   // pointer.
    988   output_offset -= kFPOnStackSize;
    989   input_offset -= kFPOnStackSize;
    990   if (is_bottommost) {
    991     value = input_->GetFrameSlot(input_offset);
    992   } else {
    993     value = output_[frame_index - 1]->GetFp();
    994   }
    995   output_frame->SetCallerFp(output_offset, value);
    996   intptr_t fp_value = top_address + output_offset;
    997   ASSERT(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
    998       has_alignment_padding_ * kPointerSize) == fp_value);
    999   output_frame->SetFp(fp_value);
   1000   if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
   1001   if (trace_) {
   1002     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1003            V8PRIxPTR " ; caller's fp\n",
   1004            fp_value, output_offset, value);
   1005   }
   1006   ASSERT(!is_bottommost || !has_alignment_padding_ ||
   1007          (fp_value & kPointerSize) != 0);
   1008 
   1009   // For the bottommost output frame the context can be gotten from the input
   1010   // frame. For all subsequent output frames it can be gotten from the function
   1011   // so long as we don't inline functions that need local contexts.
   1012   Register context_reg = JavaScriptFrame::context_register();
   1013   output_offset -= kPointerSize;
   1014   input_offset -= kPointerSize;
   1015   if (is_bottommost) {
   1016     value = input_->GetFrameSlot(input_offset);
   1017   } else {
   1018     value = reinterpret_cast<intptr_t>(function->context());
   1019   }
   1020   output_frame->SetFrameSlot(output_offset, value);
   1021   output_frame->SetContext(value);
   1022   if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
   1023   if (trace_) {
   1024     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1025            V8PRIxPTR "; context\n",
   1026            top_address + output_offset, output_offset, value);
   1027   }
   1028 
   1029   // The function was mentioned explicitly in the BEGIN_FRAME.
   1030   output_offset -= kPointerSize;
   1031   input_offset -= kPointerSize;
   1032   value = reinterpret_cast<intptr_t>(function);
   1033   // The function for the bottommost output frame should also agree with the
   1034   // input frame.
   1035   ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
   1036   output_frame->SetFrameSlot(output_offset, value);
   1037   if (trace_) {
   1038     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1039            V8PRIxPTR "; function\n",
   1040            top_address + output_offset, output_offset, value);
   1041   }
   1042 
   1043   // Translate the rest of the frame.
   1044   for (unsigned i = 0; i < height; ++i) {
   1045     output_offset -= kPointerSize;
   1046     DoTranslateCommand(iterator, frame_index, output_offset);
   1047   }
   1048   ASSERT(0 == output_offset);
   1049 
   1050   // Compute this frame's PC, state, and continuation.
   1051   Code* non_optimized_code = function->shared()->code();
   1052   FixedArray* raw_data = non_optimized_code->deoptimization_data();
   1053   DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
   1054   Address start = non_optimized_code->instruction_start();
   1055   unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
   1056   unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
   1057   intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
   1058   output_frame->SetPc(pc_value);
   1059 
   1060   FullCodeGenerator::State state =
   1061       FullCodeGenerator::StateField::decode(pc_and_state);
   1062   output_frame->SetState(Smi::FromInt(state));
   1063 
   1064   // Set the continuation for the topmost frame.
   1065   if (is_topmost && bailout_type_ != DEBUGGER) {
   1066     Builtins* builtins = isolate_->builtins();
   1067     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
   1068     if (bailout_type_ == LAZY) {
   1069       continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
   1070     } else if (bailout_type_ == SOFT) {
   1071       continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
   1072     } else {
   1073       ASSERT(bailout_type_ == EAGER);
   1074     }
   1075     output_frame->SetContinuation(
   1076         reinterpret_cast<intptr_t>(continuation->entry()));
   1077   }
   1078 }
   1079 
   1080 
   1081 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
   1082                                                  int frame_index) {
   1083   JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
   1084   unsigned height = iterator->Next();
   1085   unsigned height_in_bytes = height * kPointerSize;
   1086   if (trace_) {
   1087     PrintF("  translating arguments adaptor => height=%d\n", height_in_bytes);
   1088   }
   1089 
   1090   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
   1091   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1092 
   1093   // Allocate and store the output frame description.
   1094   FrameDescription* output_frame =
   1095       new(output_frame_size) FrameDescription(output_frame_size, function);
   1096   output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
   1097 
   1098   // Arguments adaptor can not be topmost or bottommost.
   1099   ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
   1100   ASSERT(output_[frame_index] == NULL);
   1101   output_[frame_index] = output_frame;
   1102 
   1103   // The top address of the frame is computed from the previous
   1104   // frame's top and this frame's size.
   1105   intptr_t top_address;
   1106   top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1107   output_frame->SetTop(top_address);
   1108 
   1109   // Compute the incoming parameter translation.
   1110   int parameter_count = height;
   1111   unsigned output_offset = output_frame_size;
   1112   for (int i = 0; i < parameter_count; ++i) {
   1113     output_offset -= kPointerSize;
   1114     DoTranslateCommand(iterator, frame_index, output_offset);
   1115   }
   1116 
   1117   // Read caller's PC from the previous frame.
   1118   output_offset -= kPCOnStackSize;
   1119   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   1120   output_frame->SetCallerPc(output_offset, callers_pc);
   1121   if (trace_) {
   1122     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1123            V8PRIxPTR " ; caller's pc\n",
   1124            top_address + output_offset, output_offset, callers_pc);
   1125   }
   1126 
   1127   // Read caller's FP from the previous frame, and set this frame's FP.
   1128   output_offset -= kFPOnStackSize;
   1129   intptr_t value = output_[frame_index - 1]->GetFp();
   1130   output_frame->SetCallerFp(output_offset, value);
   1131   intptr_t fp_value = top_address + output_offset;
   1132   output_frame->SetFp(fp_value);
   1133   if (trace_) {
   1134     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1135            V8PRIxPTR " ; caller's fp\n",
   1136            fp_value, output_offset, value);
   1137   }
   1138 
   1139   // A marker value is used in place of the context.
   1140   output_offset -= kPointerSize;
   1141   intptr_t context = reinterpret_cast<intptr_t>(
   1142       Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1143   output_frame->SetFrameSlot(output_offset, context);
   1144   if (trace_) {
   1145     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1146            V8PRIxPTR " ; context (adaptor sentinel)\n",
   1147            top_address + output_offset, output_offset, context);
   1148   }
   1149 
   1150   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
   1151   output_offset -= kPointerSize;
   1152   value = reinterpret_cast<intptr_t>(function);
   1153   output_frame->SetFrameSlot(output_offset, value);
   1154   if (trace_) {
   1155     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1156            V8PRIxPTR " ; function\n",
   1157            top_address + output_offset, output_offset, value);
   1158   }
   1159 
   1160   // Number of incoming arguments.
   1161   output_offset -= kPointerSize;
   1162   value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
   1163   output_frame->SetFrameSlot(output_offset, value);
   1164   if (trace_) {
   1165     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1166            V8PRIxPTR " ; argc (%d)\n",
   1167            top_address + output_offset, output_offset, value, height - 1);
   1168   }
   1169 
   1170   ASSERT(0 == output_offset);
   1171 
   1172   Builtins* builtins = isolate_->builtins();
   1173   Code* adaptor_trampoline =
   1174       builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
   1175   intptr_t pc_value = reinterpret_cast<intptr_t>(
   1176       adaptor_trampoline->instruction_start() +
   1177       isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
   1178   output_frame->SetPc(pc_value);
   1179 }
   1180 
   1181 
   1182 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
   1183                                               int frame_index) {
   1184   Builtins* builtins = isolate_->builtins();
   1185   Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
   1186   JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
   1187   unsigned height = iterator->Next();
   1188   unsigned height_in_bytes = height * kPointerSize;
   1189   if (trace_) {
   1190     PrintF("  translating construct stub => height=%d\n", height_in_bytes);
   1191   }
   1192 
   1193   unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
   1194   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1195 
   1196   // Allocate and store the output frame description.
   1197   FrameDescription* output_frame =
   1198       new(output_frame_size) FrameDescription(output_frame_size, function);
   1199   output_frame->SetFrameType(StackFrame::CONSTRUCT);
   1200 
   1201   // Construct stub can not be topmost or bottommost.
   1202   ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
   1203   ASSERT(output_[frame_index] == NULL);
   1204   output_[frame_index] = output_frame;
   1205 
   1206   // The top address of the frame is computed from the previous
   1207   // frame's top and this frame's size.
   1208   intptr_t top_address;
   1209   top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1210   output_frame->SetTop(top_address);
   1211 
   1212   // Compute the incoming parameter translation.
   1213   int parameter_count = height;
   1214   unsigned output_offset = output_frame_size;
   1215   for (int i = 0; i < parameter_count; ++i) {
   1216     output_offset -= kPointerSize;
   1217     int deferred_object_index = deferred_objects_.length();
   1218     DoTranslateCommand(iterator, frame_index, output_offset);
   1219     // The allocated receiver of a construct stub frame is passed as the
   1220     // receiver parameter through the translation. It might be encoding
   1221     // a captured object, patch the slot address for a captured object.
   1222     if (i == 0 && deferred_objects_.length() > deferred_object_index) {
   1223       ASSERT(!deferred_objects_[deferred_object_index].is_arguments());
   1224       deferred_objects_[deferred_object_index].patch_slot_address(top_address);
   1225     }
   1226   }
   1227 
   1228   // Read caller's PC from the previous frame.
   1229   output_offset -= kPCOnStackSize;
   1230   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   1231   output_frame->SetCallerPc(output_offset, callers_pc);
   1232   if (trace_) {
   1233     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1234            V8PRIxPTR " ; caller's pc\n",
   1235            top_address + output_offset, output_offset, callers_pc);
   1236   }
   1237 
   1238   // Read caller's FP from the previous frame, and set this frame's FP.
   1239   output_offset -= kFPOnStackSize;
   1240   intptr_t value = output_[frame_index - 1]->GetFp();
   1241   output_frame->SetCallerFp(output_offset, value);
   1242   intptr_t fp_value = top_address + output_offset;
   1243   output_frame->SetFp(fp_value);
   1244   if (trace_) {
   1245     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1246            V8PRIxPTR " ; caller's fp\n",
   1247            fp_value, output_offset, value);
   1248   }
   1249 
   1250   // The context can be gotten from the previous frame.
   1251   output_offset -= kPointerSize;
   1252   value = output_[frame_index - 1]->GetContext();
   1253   output_frame->SetFrameSlot(output_offset, value);
   1254   if (trace_) {
   1255     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1256            V8PRIxPTR " ; context\n",
   1257            top_address + output_offset, output_offset, value);
   1258   }
   1259 
   1260   // A marker value is used in place of the function.
   1261   output_offset -= kPointerSize;
   1262   value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
   1263   output_frame->SetFrameSlot(output_offset, value);
   1264   if (trace_) {
   1265     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1266            V8PRIxPTR " ; function (construct sentinel)\n",
   1267            top_address + output_offset, output_offset, value);
   1268   }
   1269 
   1270   // The output frame reflects a JSConstructStubGeneric frame.
   1271   output_offset -= kPointerSize;
   1272   value = reinterpret_cast<intptr_t>(construct_stub);
   1273   output_frame->SetFrameSlot(output_offset, value);
   1274   if (trace_) {
   1275     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1276            V8PRIxPTR " ; code object\n",
   1277            top_address + output_offset, output_offset, value);
   1278   }
   1279 
   1280   // Number of incoming arguments.
   1281   output_offset -= kPointerSize;
   1282   value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
   1283   output_frame->SetFrameSlot(output_offset, value);
   1284   if (trace_) {
   1285     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1286            V8PRIxPTR " ; argc (%d)\n",
   1287            top_address + output_offset, output_offset, value, height - 1);
   1288   }
   1289 
   1290   // Constructor function being invoked by the stub (only present on some
   1291   // architectures, indicated by kConstructorOffset).
   1292   if (ConstructFrameConstants::kConstructorOffset != kMinInt) {
   1293     output_offset -= kPointerSize;
   1294     value = reinterpret_cast<intptr_t>(function);
   1295     output_frame->SetFrameSlot(output_offset, value);
   1296     if (trace_) {
   1297       PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1298              V8PRIxPTR " ; constructor function\n",
   1299              top_address + output_offset, output_offset, value);
   1300     }
   1301   }
   1302 
   1303   // The newly allocated object was passed as receiver in the artificial
   1304   // constructor stub environment created by HEnvironment::CopyForInlining().
   1305   output_offset -= kPointerSize;
   1306   value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
   1307   output_frame->SetFrameSlot(output_offset, value);
   1308   if (trace_) {
   1309     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1310            V8PRIxPTR " ; allocated receiver\n",
   1311            top_address + output_offset, output_offset, value);
   1312   }
   1313 
   1314   ASSERT(0 == output_offset);
   1315 
   1316   intptr_t pc = reinterpret_cast<intptr_t>(
   1317       construct_stub->instruction_start() +
   1318       isolate_->heap()->construct_stub_deopt_pc_offset()->value());
   1319   output_frame->SetPc(pc);
   1320 }
   1321 
   1322 
   1323 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
   1324                                              int frame_index,
   1325                                              bool is_setter_stub_frame) {
   1326   JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
   1327   // The receiver (and the implicit return value, if any) are expected in
   1328   // registers by the LoadIC/StoreIC, so they don't belong to the output stack
   1329   // frame. This means that we have to use a height of 0.
   1330   unsigned height = 0;
   1331   unsigned height_in_bytes = height * kPointerSize;
   1332   const char* kind = is_setter_stub_frame ? "setter" : "getter";
   1333   if (trace_) {
   1334     PrintF("  translating %s stub => height=%u\n", kind, height_in_bytes);
   1335   }
   1336 
   1337   // We need 1 stack entry for the return address + 4 stack entries from
   1338   // StackFrame::INTERNAL (FP, context, frame type, code object, see
   1339   // MacroAssembler::EnterFrame). For a setter stub frame we need one additional
   1340   // entry for the implicit return value, see
   1341   // StoreStubCompiler::CompileStoreViaSetter.
   1342   unsigned fixed_frame_entries = (kPCOnStackSize / kPointerSize) +
   1343                                  (kFPOnStackSize / kPointerSize) + 3 +
   1344                                  (is_setter_stub_frame ? 1 : 0);
   1345   unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
   1346   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
   1347 
   1348   // Allocate and store the output frame description.
   1349   FrameDescription* output_frame =
   1350       new(output_frame_size) FrameDescription(output_frame_size, accessor);
   1351   output_frame->SetFrameType(StackFrame::INTERNAL);
   1352 
   1353   // A frame for an accessor stub can not be the topmost or bottommost one.
   1354   ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
   1355   ASSERT(output_[frame_index] == NULL);
   1356   output_[frame_index] = output_frame;
   1357 
   1358   // The top address of the frame is computed from the previous frame's top and
   1359   // this frame's size.
   1360   intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
   1361   output_frame->SetTop(top_address);
   1362 
   1363   unsigned output_offset = output_frame_size;
   1364 
   1365   // Read caller's PC from the previous frame.
   1366   output_offset -= kPCOnStackSize;
   1367   intptr_t callers_pc = output_[frame_index - 1]->GetPc();
   1368   output_frame->SetCallerPc(output_offset, callers_pc);
   1369   if (trace_) {
   1370     PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
   1371            " ; caller's pc\n",
   1372            top_address + output_offset, output_offset, callers_pc);
   1373   }
   1374 
   1375   // Read caller's FP from the previous frame, and set this frame's FP.
   1376   output_offset -= kFPOnStackSize;
   1377   intptr_t value = output_[frame_index - 1]->GetFp();
   1378   output_frame->SetCallerFp(output_offset, value);
   1379   intptr_t fp_value = top_address + output_offset;
   1380   output_frame->SetFp(fp_value);
   1381   if (trace_) {
   1382     PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
   1383            " ; caller's fp\n",
   1384            fp_value, output_offset, value);
   1385   }
   1386 
   1387   // The context can be gotten from the previous frame.
   1388   output_offset -= kPointerSize;
   1389   value = output_[frame_index - 1]->GetContext();
   1390   output_frame->SetFrameSlot(output_offset, value);
   1391   if (trace_) {
   1392     PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
   1393            " ; context\n",
   1394            top_address + output_offset, output_offset, value);
   1395   }
   1396 
   1397   // A marker value is used in place of the function.
   1398   output_offset -= kPointerSize;
   1399   value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
   1400   output_frame->SetFrameSlot(output_offset, value);
   1401   if (trace_) {
   1402     PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
   1403            " ; function (%s sentinel)\n",
   1404            top_address + output_offset, output_offset, value, kind);
   1405   }
   1406 
   1407   // Get Code object from accessor stub.
   1408   output_offset -= kPointerSize;
   1409   Builtins::Name name = is_setter_stub_frame ?
   1410       Builtins::kStoreIC_Setter_ForDeopt :
   1411       Builtins::kLoadIC_Getter_ForDeopt;
   1412   Code* accessor_stub = isolate_->builtins()->builtin(name);
   1413   value = reinterpret_cast<intptr_t>(accessor_stub);
   1414   output_frame->SetFrameSlot(output_offset, value);
   1415   if (trace_) {
   1416     PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
   1417            " ; code object\n",
   1418            top_address + output_offset, output_offset, value);
   1419   }
   1420 
   1421   // Skip receiver.
   1422   Translation::Opcode opcode =
   1423       static_cast<Translation::Opcode>(iterator->Next());
   1424   iterator->Skip(Translation::NumberOfOperandsFor(opcode));
   1425 
   1426   if (is_setter_stub_frame) {
   1427     // The implicit return value was part of the artificial setter stub
   1428     // environment.
   1429     output_offset -= kPointerSize;
   1430     DoTranslateCommand(iterator, frame_index, output_offset);
   1431   }
   1432 
   1433   ASSERT(0 == output_offset);
   1434 
   1435   Smi* offset = is_setter_stub_frame ?
   1436       isolate_->heap()->setter_stub_deopt_pc_offset() :
   1437       isolate_->heap()->getter_stub_deopt_pc_offset();
   1438   intptr_t pc = reinterpret_cast<intptr_t>(
   1439       accessor_stub->instruction_start() + offset->value());
   1440   output_frame->SetPc(pc);
   1441 }
   1442 
   1443 
   1444 void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
   1445                                              int frame_index) {
   1446   //
   1447   //               FROM                                  TO
   1448   //    |          ....           |          |          ....           |
   1449   //    +-------------------------+          +-------------------------+
   1450   //    | JSFunction continuation |          | JSFunction continuation |
   1451   //    +-------------------------+          +-------------------------+
   1452   // |  |    saved frame (FP)     |          |    saved frame (FP)     |
   1453   // |  +=========================+<-fpreg   +=========================+<-fpreg
   1454   // |  |   JSFunction context    |          |   JSFunction context    |
   1455   // v  +-------------------------+          +-------------------------|
   1456   //    |   COMPILED_STUB marker  |          |   STUB_FAILURE marker   |
   1457   //    +-------------------------+          +-------------------------+
   1458   //    |                         |          |  caller args.arguments_ |
   1459   //    | ...                     |          +-------------------------+
   1460   //    |                         |          |  caller args.length_    |
   1461   //    |-------------------------|<-spreg   +-------------------------+
   1462   //                                         |  caller args pointer    |
   1463   //                                         +-------------------------+
   1464   //                                         |  caller stack param 1   |
   1465   //      parameters in registers            +-------------------------+
   1466   //       and spilled to stack              |           ....          |
   1467   //                                         +-------------------------+
   1468   //                                         |  caller stack param n   |
   1469   //                                         +-------------------------+<-spreg
   1470   //                                         reg = number of parameters
   1471   //                                         reg = failure handler address
   1472   //                                         reg = saved frame
   1473   //                                         reg = JSFunction context
   1474   //
   1475 
   1476   ASSERT(compiled_code_->is_crankshafted() &&
   1477          compiled_code_->kind() != Code::OPTIMIZED_FUNCTION);
   1478   int major_key = compiled_code_->major_key();
   1479   CodeStubInterfaceDescriptor* descriptor =
   1480       isolate_->code_stub_interface_descriptor(major_key);
   1481 
   1482   // The output frame must have room for all pushed register parameters
   1483   // and the standard stack frame slots.  Include space for an argument
   1484   // object to the callee and optionally the space to pass the argument
   1485   // object to the stub failure handler.
   1486   ASSERT(descriptor->register_param_count_ >= 0);
   1487   int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
   1488       sizeof(Arguments) + kPointerSize;
   1489   int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
   1490   int input_frame_size = input_->GetFrameSize();
   1491   int output_frame_size = height_in_bytes + fixed_frame_size;
   1492   if (trace_) {
   1493     PrintF("  translating %s => StubFailureTrampolineStub, height=%d\n",
   1494            CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
   1495            height_in_bytes);
   1496   }
   1497 
   1498   // The stub failure trampoline is a single frame.
   1499   FrameDescription* output_frame =
   1500       new(output_frame_size) FrameDescription(output_frame_size, NULL);
   1501   output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
   1502   ASSERT(frame_index == 0);
   1503   output_[frame_index] = output_frame;
   1504 
   1505   // The top address for the output frame can be computed from the input
   1506   // frame pointer and the output frame's height. Subtract space for the
   1507   // context and function slots.
   1508   Register fp_reg = StubFailureTrampolineFrame::fp_register();
   1509   intptr_t top_address = input_->GetRegister(fp_reg.code()) -
   1510       (2 * kPointerSize) - height_in_bytes;
   1511   output_frame->SetTop(top_address);
   1512 
   1513   // Read caller's PC (JSFunction continuation) from the input frame.
   1514   unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
   1515   unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
   1516   intptr_t value = input_->GetFrameSlot(input_frame_offset);
   1517   output_frame->SetCallerPc(output_frame_offset, value);
   1518   if (trace_) {
   1519     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1520            V8PRIxPTR " ; caller's pc\n",
   1521            top_address + output_frame_offset, output_frame_offset, value);
   1522   }
   1523 
   1524   // Read caller's FP from the input frame, and set this frame's FP.
   1525   input_frame_offset -= kFPOnStackSize;
   1526   value = input_->GetFrameSlot(input_frame_offset);
   1527   output_frame_offset -= kFPOnStackSize;
   1528   output_frame->SetCallerFp(output_frame_offset, value);
   1529   intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
   1530   output_frame->SetRegister(fp_reg.code(), frame_ptr);
   1531   output_frame->SetFp(frame_ptr);
   1532   if (trace_) {
   1533     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1534            V8PRIxPTR " ; caller's fp\n",
   1535            top_address + output_frame_offset, output_frame_offset, value);
   1536   }
   1537 
   1538   // The context can be gotten from the input frame.
   1539   Register context_reg = StubFailureTrampolineFrame::context_register();
   1540   input_frame_offset -= kPointerSize;
   1541   value = input_->GetFrameSlot(input_frame_offset);
   1542   output_frame->SetRegister(context_reg.code(), value);
   1543   output_frame_offset -= kPointerSize;
   1544   output_frame->SetFrameSlot(output_frame_offset, value);
   1545   if (trace_) {
   1546     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1547            V8PRIxPTR " ; context\n",
   1548            top_address + output_frame_offset, output_frame_offset, value);
   1549   }
   1550 
   1551   // A marker value is used in place of the function.
   1552   output_frame_offset -= kPointerSize;
   1553   value = reinterpret_cast<intptr_t>(
   1554       Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
   1555   output_frame->SetFrameSlot(output_frame_offset, value);
   1556   if (trace_) {
   1557     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1558            V8PRIxPTR " ; function (stub failure sentinel)\n",
   1559            top_address + output_frame_offset, output_frame_offset, value);
   1560   }
   1561 
   1562   intptr_t caller_arg_count = 0;
   1563   bool arg_count_known = descriptor->stack_parameter_count_ == NULL;
   1564 
   1565   // Build the Arguments object for the caller's parameters and a pointer to it.
   1566   output_frame_offset -= kPointerSize;
   1567   int args_arguments_offset = output_frame_offset;
   1568   intptr_t the_hole = reinterpret_cast<intptr_t>(
   1569       isolate_->heap()->the_hole_value());
   1570   if (arg_count_known) {
   1571     value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
   1572         (caller_arg_count - 1) * kPointerSize;
   1573   } else {
   1574     value = the_hole;
   1575   }
   1576 
   1577   output_frame->SetFrameSlot(args_arguments_offset, value);
   1578   if (trace_) {
   1579     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1580            V8PRIxPTR " ; args.arguments %s\n",
   1581            top_address + args_arguments_offset, args_arguments_offset, value,
   1582            arg_count_known ? "" : "(the hole)");
   1583   }
   1584 
   1585   output_frame_offset -= kPointerSize;
   1586   int length_frame_offset = output_frame_offset;
   1587   value = arg_count_known ? caller_arg_count : the_hole;
   1588   output_frame->SetFrameSlot(length_frame_offset, value);
   1589   if (trace_) {
   1590     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1591            V8PRIxPTR " ; args.length %s\n",
   1592            top_address + length_frame_offset, length_frame_offset, value,
   1593            arg_count_known ? "" : "(the hole)");
   1594   }
   1595 
   1596   output_frame_offset -= kPointerSize;
   1597   value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
   1598       (output_frame_size - output_frame_offset) + kPointerSize;
   1599   output_frame->SetFrameSlot(output_frame_offset, value);
   1600   if (trace_) {
   1601     PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1602            V8PRIxPTR " ; args*\n",
   1603            top_address + output_frame_offset, output_frame_offset, value);
   1604   }
   1605 
   1606   // Copy the register parameters to the failure frame.
   1607   for (int i = 0; i < descriptor->register_param_count_; ++i) {
   1608     output_frame_offset -= kPointerSize;
   1609     DoTranslateCommand(iterator, 0, output_frame_offset);
   1610   }
   1611 
   1612   if (!arg_count_known) {
   1613     DoTranslateCommand(iterator, 0, length_frame_offset,
   1614                        TRANSLATED_VALUE_IS_NATIVE);
   1615     caller_arg_count = output_frame->GetFrameSlot(length_frame_offset);
   1616     value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
   1617         (caller_arg_count - 1) * kPointerSize;
   1618     output_frame->SetFrameSlot(args_arguments_offset, value);
   1619     if (trace_) {
   1620       PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
   1621              V8PRIxPTR " ; args.arguments\n",
   1622              top_address + args_arguments_offset, args_arguments_offset, value);
   1623     }
   1624   }
   1625 
   1626   ASSERT(0 == output_frame_offset);
   1627 
   1628   // Copy the double registers from the input into the output frame.
   1629   CopyDoubleRegisters(output_frame);
   1630 
   1631   // Fill registers containing handler and number of parameters.
   1632   SetPlatformCompiledStubRegisters(output_frame, descriptor);
   1633 
   1634   // Compute this frame's PC, state, and continuation.
   1635   Code* trampoline = NULL;
   1636   StubFunctionMode function_mode = descriptor->function_mode_;
   1637   StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
   1638                                                            isolate_);
   1639   ASSERT(trampoline != NULL);
   1640   output_frame->SetPc(reinterpret_cast<intptr_t>(
   1641       trampoline->instruction_start()));
   1642   output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
   1643   Code* notify_failure =
   1644       isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
   1645   output_frame->SetContinuation(
   1646       reinterpret_cast<intptr_t>(notify_failure->entry()));
   1647 }
   1648 
   1649 
   1650 Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
   1651   int object_index = materialization_object_index_++;
   1652   ObjectMaterializationDescriptor desc = deferred_objects_[object_index];
   1653   const int length = desc.object_length();
   1654 
   1655   if (desc.duplicate_object() >= 0) {
   1656     // Found a previously materialized object by de-duplication.
   1657     object_index = desc.duplicate_object();
   1658     materialized_objects_->Add(Handle<Object>());
   1659   } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) {
   1660     // Use the arguments adapter frame we just built to materialize the
   1661     // arguments object. FunctionGetArguments can't throw an exception.
   1662     Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
   1663     Handle<JSObject> arguments = Handle<JSObject>::cast(
   1664         Accessors::FunctionGetArguments(function));
   1665     materialized_objects_->Add(arguments);
   1666     materialization_value_index_ += length;
   1667   } else if (desc.is_arguments()) {
   1668     // Construct an arguments object and copy the parameters to a newly
   1669     // allocated arguments object backing store.
   1670     Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
   1671     Handle<JSObject> arguments =
   1672         isolate_->factory()->NewArgumentsObject(function, length);
   1673     Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
   1674     ASSERT(array->length() == length);
   1675     arguments->set_elements(*array);
   1676     materialized_objects_->Add(arguments);
   1677     for (int i = 0; i < length; ++i) {
   1678       Handle<Object> value = MaterializeNextValue();
   1679       array->set(i, *value);
   1680     }
   1681   } else {
   1682     // Dispatch on the instance type of the object to be materialized.
   1683     Handle<Map> map = Handle<Map>::cast(MaterializeNextValue());
   1684     switch (map->instance_type()) {
   1685       case HEAP_NUMBER_TYPE: {
   1686         Handle<HeapNumber> number =
   1687             Handle<HeapNumber>::cast(MaterializeNextValue());
   1688         materialized_objects_->Add(number);
   1689         materialization_value_index_ += kDoubleSize / kPointerSize - 1;
   1690         break;
   1691       }
   1692       case JS_OBJECT_TYPE: {
   1693         Handle<JSObject> object =
   1694             isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
   1695         materialized_objects_->Add(object);
   1696         Handle<Object> properties = MaterializeNextValue();
   1697         Handle<Object> elements = MaterializeNextValue();
   1698         object->set_properties(FixedArray::cast(*properties));
   1699         object->set_elements(FixedArray::cast(*elements));
   1700         for (int i = 0; i < length - 3; ++i) {
   1701           Handle<Object> value = MaterializeNextValue();
   1702           object->FastPropertyAtPut(i, *value);
   1703         }
   1704         break;
   1705       }
   1706       default:
   1707         PrintF("[couldn't handle instance type %d]\n", map->instance_type());
   1708         UNREACHABLE();
   1709     }
   1710   }
   1711 
   1712   return materialized_objects_->at(object_index);
   1713 }
   1714 
   1715 
   1716 Handle<Object> Deoptimizer::MaterializeNextValue() {
   1717   int value_index = materialization_value_index_++;
   1718   Handle<Object> value = materialized_values_->at(value_index);
   1719   if (*value == isolate_->heap()->arguments_marker()) {
   1720     value = MaterializeNextHeapObject();
   1721   }
   1722   return value;
   1723 }
   1724 
   1725 
   1726 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
   1727   ASSERT_NE(DEBUGGER, bailout_type_);
   1728 
   1729   // Walk all JavaScript output frames with the given frame iterator.
   1730   for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
   1731     if (frame_index != 0) it->Advance();
   1732     JavaScriptFrame* frame = it->frame();
   1733     jsframe_functions_.Add(handle(frame->function(), isolate_));
   1734     jsframe_has_adapted_arguments_.Add(frame->has_adapted_arguments());
   1735   }
   1736 
   1737   // Handlify all tagged object values before triggering any allocation.
   1738   List<Handle<Object> > values(deferred_objects_tagged_values_.length());
   1739   for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) {
   1740     values.Add(Handle<Object>(deferred_objects_tagged_values_[i], isolate_));
   1741   }
   1742 
   1743   // Play it safe and clear all unhandlified values before we continue.
   1744   deferred_objects_tagged_values_.Clear();
   1745 
   1746   // Materialize all heap numbers before looking at arguments because when the
   1747   // output frames are used to materialize arguments objects later on they need
   1748   // to already contain valid heap numbers.
   1749   for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
   1750     HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
   1751     Handle<Object> num = isolate_->factory()->NewNumber(d.value());
   1752     if (trace_) {
   1753       PrintF("Materialized a new heap number %p [%e] in slot %p\n",
   1754              reinterpret_cast<void*>(*num),
   1755              d.value(),
   1756              d.slot_address());
   1757     }
   1758     Memory::Object_at(d.slot_address()) = *num;
   1759   }
   1760 
   1761   // Materialize all heap numbers required for arguments/captured objects.
   1762   for (int i = 0; i < values.length(); i++) {
   1763     if (!values.at(i)->IsTheHole()) continue;
   1764     double double_value = deferred_objects_double_values_[i];
   1765     Handle<Object> num = isolate_->factory()->NewNumber(double_value);
   1766     if (trace_) {
   1767       PrintF("Materialized a new heap number %p [%e] for object\n",
   1768              reinterpret_cast<void*>(*num), double_value);
   1769     }
   1770     values.Set(i, num);
   1771   }
   1772 
   1773   // Materialize arguments/captured objects.
   1774   if (!deferred_objects_.is_empty()) {
   1775     List<Handle<Object> > materialized_objects(deferred_objects_.length());
   1776     materialized_objects_ = &materialized_objects;
   1777     materialized_values_ = &values;
   1778 
   1779     while (materialization_object_index_ < deferred_objects_.length()) {
   1780       int object_index = materialization_object_index_;
   1781       ObjectMaterializationDescriptor descriptor =
   1782           deferred_objects_.at(object_index);
   1783 
   1784       // Find a previously materialized object by de-duplication or
   1785       // materialize a new instance of the object if necessary. Store
   1786       // the materialized object into the frame slot.
   1787       Handle<Object> object = MaterializeNextHeapObject();
   1788       Memory::Object_at(descriptor.slot_address()) = *object;
   1789       if (trace_) {
   1790         if (descriptor.is_arguments()) {
   1791           PrintF("Materialized %sarguments object of length %d for %p: ",
   1792                  ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "",
   1793                  Handle<JSObject>::cast(object)->elements()->length(),
   1794                  reinterpret_cast<void*>(descriptor.slot_address()));
   1795         } else {
   1796           PrintF("Materialized captured object of size %d for %p: ",
   1797                  Handle<HeapObject>::cast(object)->Size(),
   1798                  reinterpret_cast<void*>(descriptor.slot_address()));
   1799         }
   1800         object->ShortPrint();
   1801         PrintF("\n");
   1802       }
   1803     }
   1804 
   1805     ASSERT(materialization_object_index_ == materialized_objects_->length());
   1806     ASSERT(materialization_value_index_ == materialized_values_->length());
   1807   }
   1808 }
   1809 
   1810 
   1811 #ifdef ENABLE_DEBUGGER_SUPPORT
   1812 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
   1813     Address parameters_top,
   1814     uint32_t parameters_size,
   1815     Address expressions_top,
   1816     uint32_t expressions_size,
   1817     DeoptimizedFrameInfo* info) {
   1818   ASSERT_EQ(DEBUGGER, bailout_type_);
   1819   Address parameters_bottom = parameters_top + parameters_size;
   1820   Address expressions_bottom = expressions_top + expressions_size;
   1821   for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
   1822     HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
   1823 
   1824     // Check of the heap number to materialize actually belong to the frame
   1825     // being extracted.
   1826     Address slot = d.slot_address();
   1827     if (parameters_top <= slot && slot < parameters_bottom) {
   1828       Handle<Object> num = isolate_->factory()->NewNumber(d.value());
   1829 
   1830       int index = (info->parameters_count() - 1) -
   1831           static_cast<int>(slot - parameters_top) / kPointerSize;
   1832 
   1833       if (trace_) {
   1834         PrintF("Materializing a new heap number %p [%e] in slot %p"
   1835                "for parameter slot #%d\n",
   1836                reinterpret_cast<void*>(*num),
   1837                d.value(),
   1838                d.slot_address(),
   1839                index);
   1840       }
   1841 
   1842       info->SetParameter(index, *num);
   1843     } else if (expressions_top <= slot && slot < expressions_bottom) {
   1844       Handle<Object> num = isolate_->factory()->NewNumber(d.value());
   1845 
   1846       int index = info->expression_count() - 1 -
   1847           static_cast<int>(slot - expressions_top) / kPointerSize;
   1848 
   1849       if (trace_) {
   1850         PrintF("Materializing a new heap number %p [%e] in slot %p"
   1851                "for expression slot #%d\n",
   1852                reinterpret_cast<void*>(*num),
   1853                d.value(),
   1854                d.slot_address(),
   1855                index);
   1856       }
   1857 
   1858       info->SetExpression(index, *num);
   1859     }
   1860   }
   1861 }
   1862 #endif
   1863 
   1864 
   1865 static const char* TraceValueType(bool is_smi, bool is_native = false) {
   1866   if (is_native) {
   1867     return "native";
   1868   } else if (is_smi) {
   1869     return "smi";
   1870   }
   1871 
   1872   return "heap number";
   1873 }
   1874 
   1875 
   1876 void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
   1877                                     int object_index,
   1878                                     int field_index) {
   1879   disasm::NameConverter converter;
   1880   Address object_slot = deferred_objects_[object_index].slot_address();
   1881 
   1882   Translation::Opcode opcode =
   1883       static_cast<Translation::Opcode>(iterator->Next());
   1884 
   1885   switch (opcode) {
   1886     case Translation::BEGIN:
   1887     case Translation::JS_FRAME:
   1888     case Translation::ARGUMENTS_ADAPTOR_FRAME:
   1889     case Translation::CONSTRUCT_STUB_FRAME:
   1890     case Translation::GETTER_STUB_FRAME:
   1891     case Translation::SETTER_STUB_FRAME:
   1892     case Translation::COMPILED_STUB_FRAME:
   1893       UNREACHABLE();
   1894       return;
   1895 
   1896     case Translation::REGISTER: {
   1897       int input_reg = iterator->Next();
   1898       intptr_t input_value = input_->GetRegister(input_reg);
   1899       if (trace_) {
   1900         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1901                reinterpret_cast<intptr_t>(object_slot),
   1902                field_index);
   1903         PrintF("0x%08" V8PRIxPTR " ; %s ", input_value,
   1904                converter.NameOfCPURegister(input_reg));
   1905         reinterpret_cast<Object*>(input_value)->ShortPrint();
   1906         PrintF("\n");
   1907       }
   1908       AddObjectTaggedValue(input_value);
   1909       return;
   1910     }
   1911 
   1912     case Translation::INT32_REGISTER: {
   1913       int input_reg = iterator->Next();
   1914       intptr_t value = input_->GetRegister(input_reg);
   1915       bool is_smi = Smi::IsValid(value);
   1916       if (trace_) {
   1917         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1918                reinterpret_cast<intptr_t>(object_slot),
   1919                field_index);
   1920         PrintF("%" V8PRIdPTR " ; %s (%s)\n", value,
   1921                converter.NameOfCPURegister(input_reg),
   1922                TraceValueType(is_smi));
   1923       }
   1924       if (is_smi) {
   1925         intptr_t tagged_value =
   1926             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   1927         AddObjectTaggedValue(tagged_value);
   1928       } else {
   1929         double double_value = static_cast<double>(static_cast<int32_t>(value));
   1930         AddObjectDoubleValue(double_value);
   1931       }
   1932       return;
   1933     }
   1934 
   1935     case Translation::UINT32_REGISTER: {
   1936       int input_reg = iterator->Next();
   1937       uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
   1938       bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
   1939       if (trace_) {
   1940         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1941                reinterpret_cast<intptr_t>(object_slot),
   1942                field_index);
   1943         PrintF("%" V8PRIdPTR " ; uint %s (%s)\n", value,
   1944                converter.NameOfCPURegister(input_reg),
   1945                TraceValueType(is_smi));
   1946       }
   1947       if (is_smi) {
   1948         intptr_t tagged_value =
   1949             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   1950         AddObjectTaggedValue(tagged_value);
   1951       } else {
   1952         double double_value = static_cast<double>(static_cast<uint32_t>(value));
   1953         AddObjectDoubleValue(double_value);
   1954       }
   1955       return;
   1956     }
   1957 
   1958     case Translation::DOUBLE_REGISTER: {
   1959       int input_reg = iterator->Next();
   1960       double value = input_->GetDoubleRegister(input_reg);
   1961       if (trace_) {
   1962         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1963                reinterpret_cast<intptr_t>(object_slot),
   1964                field_index);
   1965         PrintF("%e ; %s\n", value,
   1966                DoubleRegister::AllocationIndexToString(input_reg));
   1967       }
   1968       AddObjectDoubleValue(value);
   1969       return;
   1970     }
   1971 
   1972     case Translation::STACK_SLOT: {
   1973       int input_slot_index = iterator->Next();
   1974       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   1975       intptr_t input_value = input_->GetFrameSlot(input_offset);
   1976       if (trace_) {
   1977         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1978                reinterpret_cast<intptr_t>(object_slot),
   1979                field_index);
   1980         PrintF("0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset);
   1981         reinterpret_cast<Object*>(input_value)->ShortPrint();
   1982         PrintF("\n");
   1983       }
   1984       AddObjectTaggedValue(input_value);
   1985       return;
   1986     }
   1987 
   1988     case Translation::INT32_STACK_SLOT: {
   1989       int input_slot_index = iterator->Next();
   1990       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   1991       intptr_t value = input_->GetFrameSlot(input_offset);
   1992       bool is_smi = Smi::IsValid(value);
   1993       if (trace_) {
   1994         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   1995                reinterpret_cast<intptr_t>(object_slot),
   1996                field_index);
   1997         PrintF("%" V8PRIdPTR " ; [sp + %d] (%s)\n",
   1998                value, input_offset, TraceValueType(is_smi));
   1999       }
   2000       if (is_smi) {
   2001         intptr_t tagged_value =
   2002             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2003         AddObjectTaggedValue(tagged_value);
   2004       } else {
   2005         double double_value = static_cast<double>(static_cast<int32_t>(value));
   2006         AddObjectDoubleValue(double_value);
   2007       }
   2008       return;
   2009     }
   2010 
   2011     case Translation::UINT32_STACK_SLOT: {
   2012       int input_slot_index = iterator->Next();
   2013       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2014       uintptr_t value =
   2015           static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
   2016       bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
   2017       if (trace_) {
   2018         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   2019                reinterpret_cast<intptr_t>(object_slot),
   2020                field_index);
   2021         PrintF("%" V8PRIdPTR " ; [sp + %d] (uint %s)\n",
   2022                value, input_offset, TraceValueType(is_smi));
   2023       }
   2024       if (is_smi) {
   2025         intptr_t tagged_value =
   2026             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2027         AddObjectTaggedValue(tagged_value);
   2028       } else {
   2029         double double_value = static_cast<double>(static_cast<uint32_t>(value));
   2030         AddObjectDoubleValue(double_value);
   2031       }
   2032       return;
   2033     }
   2034 
   2035     case Translation::DOUBLE_STACK_SLOT: {
   2036       int input_slot_index = iterator->Next();
   2037       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2038       double value = input_->GetDoubleFrameSlot(input_offset);
   2039       if (trace_) {
   2040         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   2041                reinterpret_cast<intptr_t>(object_slot),
   2042                field_index);
   2043         PrintF("%e ; [sp + %d]\n", value, input_offset);
   2044       }
   2045       AddObjectDoubleValue(value);
   2046       return;
   2047     }
   2048 
   2049     case Translation::LITERAL: {
   2050       Object* literal = ComputeLiteral(iterator->Next());
   2051       if (trace_) {
   2052         PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
   2053                reinterpret_cast<intptr_t>(object_slot),
   2054                field_index);
   2055         literal->ShortPrint();
   2056         PrintF(" ; literal\n");
   2057       }
   2058       intptr_t value = reinterpret_cast<intptr_t>(literal);
   2059       AddObjectTaggedValue(value);
   2060       return;
   2061     }
   2062 
   2063     case Translation::DUPLICATED_OBJECT: {
   2064       int object_index = iterator->Next();
   2065       if (trace_) {
   2066         PrintF("      nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
   2067                reinterpret_cast<intptr_t>(object_slot),
   2068                field_index);
   2069         isolate_->heap()->arguments_marker()->ShortPrint();
   2070         PrintF(" ; duplicate of object #%d\n", object_index);
   2071       }
   2072       // Use the materialization marker value as a sentinel and fill in
   2073       // the object after the deoptimized frame is built.
   2074       intptr_t value = reinterpret_cast<intptr_t>(
   2075           isolate_->heap()->arguments_marker());
   2076       AddObjectDuplication(0, object_index);
   2077       AddObjectTaggedValue(value);
   2078       return;
   2079     }
   2080 
   2081     case Translation::ARGUMENTS_OBJECT:
   2082     case Translation::CAPTURED_OBJECT: {
   2083       int length = iterator->Next();
   2084       bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
   2085       if (trace_) {
   2086         PrintF("      nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
   2087                reinterpret_cast<intptr_t>(object_slot),
   2088                field_index);
   2089         isolate_->heap()->arguments_marker()->ShortPrint();
   2090         PrintF(" ; object (length = %d, is_args = %d)\n", length, is_args);
   2091       }
   2092       // Use the materialization marker value as a sentinel and fill in
   2093       // the object after the deoptimized frame is built.
   2094       intptr_t value = reinterpret_cast<intptr_t>(
   2095           isolate_->heap()->arguments_marker());
   2096       AddObjectStart(0, length, is_args);
   2097       AddObjectTaggedValue(value);
   2098       // We save the object values on the side and materialize the actual
   2099       // object after the deoptimized frame is built.
   2100       int object_index = deferred_objects_.length() - 1;
   2101       for (int i = 0; i < length; i++) {
   2102         DoTranslateObject(iterator, object_index, i);
   2103       }
   2104       return;
   2105     }
   2106   }
   2107 }
   2108 
   2109 
   2110 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
   2111     int frame_index,
   2112     unsigned output_offset,
   2113     DeoptimizerTranslatedValueType value_type) {
   2114   disasm::NameConverter converter;
   2115   // A GC-safe temporary placeholder that we can put in the output frame.
   2116   const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
   2117   bool is_native = value_type == TRANSLATED_VALUE_IS_NATIVE;
   2118 
   2119   Translation::Opcode opcode =
   2120       static_cast<Translation::Opcode>(iterator->Next());
   2121 
   2122   switch (opcode) {
   2123     case Translation::BEGIN:
   2124     case Translation::JS_FRAME:
   2125     case Translation::ARGUMENTS_ADAPTOR_FRAME:
   2126     case Translation::CONSTRUCT_STUB_FRAME:
   2127     case Translation::GETTER_STUB_FRAME:
   2128     case Translation::SETTER_STUB_FRAME:
   2129     case Translation::COMPILED_STUB_FRAME:
   2130       UNREACHABLE();
   2131       return;
   2132 
   2133     case Translation::REGISTER: {
   2134       int input_reg = iterator->Next();
   2135       intptr_t input_value = input_->GetRegister(input_reg);
   2136       if (trace_) {
   2137         PrintF(
   2138             "    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
   2139             output_[frame_index]->GetTop() + output_offset,
   2140             output_offset,
   2141             input_value,
   2142             converter.NameOfCPURegister(input_reg));
   2143         reinterpret_cast<Object*>(input_value)->ShortPrint();
   2144         PrintF("\n");
   2145       }
   2146       output_[frame_index]->SetFrameSlot(output_offset, input_value);
   2147       return;
   2148     }
   2149 
   2150     case Translation::INT32_REGISTER: {
   2151       int input_reg = iterator->Next();
   2152       intptr_t value = input_->GetRegister(input_reg);
   2153       bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
   2154           Smi::IsValid(value);
   2155       if (trace_) {
   2156         PrintF(
   2157             "    0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
   2158             output_[frame_index]->GetTop() + output_offset,
   2159             output_offset,
   2160             value,
   2161             converter.NameOfCPURegister(input_reg),
   2162             TraceValueType(is_smi, is_native));
   2163       }
   2164       if (is_smi) {
   2165         intptr_t tagged_value =
   2166             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2167         output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
   2168       } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
   2169         output_[frame_index]->SetFrameSlot(output_offset, value);
   2170       } else {
   2171         // We save the untagged value on the side and store a GC-safe
   2172         // temporary placeholder in the frame.
   2173         ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
   2174         AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
   2175                        static_cast<double>(static_cast<int32_t>(value)));
   2176         output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2177       }
   2178       return;
   2179     }
   2180 
   2181     case Translation::UINT32_REGISTER: {
   2182       int input_reg = iterator->Next();
   2183       uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
   2184       bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
   2185           (value <= static_cast<uintptr_t>(Smi::kMaxValue));
   2186       if (trace_) {
   2187         PrintF(
   2188             "    0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
   2189             " ; uint %s (%s)\n",
   2190             output_[frame_index]->GetTop() + output_offset,
   2191             output_offset,
   2192             value,
   2193             converter.NameOfCPURegister(input_reg),
   2194             TraceValueType(is_smi, is_native));
   2195       }
   2196       if (is_smi) {
   2197         intptr_t tagged_value =
   2198             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2199         output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
   2200       } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
   2201         output_[frame_index]->SetFrameSlot(output_offset, value);
   2202       } else {
   2203         // We save the untagged value on the side and store a GC-safe
   2204         // temporary placeholder in the frame.
   2205         ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
   2206         AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
   2207                        static_cast<double>(static_cast<uint32_t>(value)));
   2208         output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2209       }
   2210       return;
   2211     }
   2212 
   2213     case Translation::DOUBLE_REGISTER: {
   2214       int input_reg = iterator->Next();
   2215       double value = input_->GetDoubleRegister(input_reg);
   2216       if (trace_) {
   2217         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
   2218                output_[frame_index]->GetTop() + output_offset,
   2219                output_offset,
   2220                value,
   2221                DoubleRegister::AllocationIndexToString(input_reg));
   2222       }
   2223       // We save the untagged value on the side and store a GC-safe
   2224       // temporary placeholder in the frame.
   2225       AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
   2226       output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2227       return;
   2228     }
   2229 
   2230     case Translation::STACK_SLOT: {
   2231       int input_slot_index = iterator->Next();
   2232       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2233       intptr_t input_value = input_->GetFrameSlot(input_offset);
   2234       if (trace_) {
   2235         PrintF("    0x%08" V8PRIxPTR ": ",
   2236                output_[frame_index]->GetTop() + output_offset);
   2237         PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
   2238                output_offset,
   2239                input_value,
   2240                input_offset);
   2241         reinterpret_cast<Object*>(input_value)->ShortPrint();
   2242         PrintF("\n");
   2243       }
   2244       output_[frame_index]->SetFrameSlot(output_offset, input_value);
   2245       return;
   2246     }
   2247 
   2248     case Translation::INT32_STACK_SLOT: {
   2249       int input_slot_index = iterator->Next();
   2250       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2251       intptr_t value = input_->GetFrameSlot(input_offset);
   2252       bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
   2253           Smi::IsValid(value);
   2254       if (trace_) {
   2255         PrintF("    0x%08" V8PRIxPTR ": ",
   2256                output_[frame_index]->GetTop() + output_offset);
   2257         PrintF("[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
   2258                output_offset,
   2259                value,
   2260                input_offset,
   2261                TraceValueType(is_smi, is_native));
   2262       }
   2263       if (is_smi) {
   2264         intptr_t tagged_value =
   2265             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2266         output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
   2267       } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
   2268         output_[frame_index]->SetFrameSlot(output_offset, value);
   2269       } else {
   2270         // We save the untagged value on the side and store a GC-safe
   2271         // temporary placeholder in the frame.
   2272         ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
   2273         AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
   2274                        static_cast<double>(static_cast<int32_t>(value)));
   2275         output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2276       }
   2277       return;
   2278     }
   2279 
   2280     case Translation::UINT32_STACK_SLOT: {
   2281       int input_slot_index = iterator->Next();
   2282       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2283       uintptr_t value =
   2284           static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
   2285       bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
   2286           (value <= static_cast<uintptr_t>(Smi::kMaxValue));
   2287       if (trace_) {
   2288         PrintF("    0x%08" V8PRIxPTR ": ",
   2289                output_[frame_index]->GetTop() + output_offset);
   2290         PrintF("[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
   2291                output_offset,
   2292                value,
   2293                input_offset,
   2294                TraceValueType(is_smi, is_native));
   2295       }
   2296       if (is_smi) {
   2297         intptr_t tagged_value =
   2298             reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
   2299         output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
   2300       } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
   2301         output_[frame_index]->SetFrameSlot(output_offset, value);
   2302       } else {
   2303         // We save the untagged value on the side and store a GC-safe
   2304         // temporary placeholder in the frame.
   2305         ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
   2306         AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
   2307                        static_cast<double>(static_cast<uint32_t>(value)));
   2308         output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2309       }
   2310       return;
   2311     }
   2312 
   2313     case Translation::DOUBLE_STACK_SLOT: {
   2314       int input_slot_index = iterator->Next();
   2315       unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
   2316       double value = input_->GetDoubleFrameSlot(input_offset);
   2317       if (trace_) {
   2318         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
   2319                output_[frame_index]->GetTop() + output_offset,
   2320                output_offset,
   2321                value,
   2322                input_offset);
   2323       }
   2324       // We save the untagged value on the side and store a GC-safe
   2325       // temporary placeholder in the frame.
   2326       AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
   2327       output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
   2328       return;
   2329     }
   2330 
   2331     case Translation::LITERAL: {
   2332       Object* literal = ComputeLiteral(iterator->Next());
   2333       if (trace_) {
   2334         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
   2335                output_[frame_index]->GetTop() + output_offset,
   2336                output_offset);
   2337         literal->ShortPrint();
   2338         PrintF(" ; literal\n");
   2339       }
   2340       intptr_t value = reinterpret_cast<intptr_t>(literal);
   2341       output_[frame_index]->SetFrameSlot(output_offset, value);
   2342       return;
   2343     }
   2344 
   2345     case Translation::DUPLICATED_OBJECT: {
   2346       int object_index = iterator->Next();
   2347       if (trace_) {
   2348         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
   2349                output_[frame_index]->GetTop() + output_offset,
   2350                output_offset);
   2351         isolate_->heap()->arguments_marker()->ShortPrint();
   2352         PrintF(" ; duplicate of object #%d\n", object_index);
   2353       }
   2354       // Use the materialization marker value as a sentinel and fill in
   2355       // the object after the deoptimized frame is built.
   2356       intptr_t value = reinterpret_cast<intptr_t>(
   2357           isolate_->heap()->arguments_marker());
   2358       AddObjectDuplication(output_[frame_index]->GetTop() + output_offset,
   2359                            object_index);
   2360       output_[frame_index]->SetFrameSlot(output_offset, value);
   2361       return;
   2362     }
   2363 
   2364     case Translation::ARGUMENTS_OBJECT:
   2365     case Translation::CAPTURED_OBJECT: {
   2366       int length = iterator->Next();
   2367       bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
   2368       if (trace_) {
   2369         PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
   2370                output_[frame_index]->GetTop() + output_offset,
   2371                output_offset);
   2372         isolate_->heap()->arguments_marker()->ShortPrint();
   2373         PrintF(" ; object (length = %d, is_args = %d)\n", length, is_args);
   2374       }
   2375       // Use the materialization marker value as a sentinel and fill in
   2376       // the object after the deoptimized frame is built.
   2377       intptr_t value = reinterpret_cast<intptr_t>(
   2378           isolate_->heap()->arguments_marker());
   2379       AddObjectStart(output_[frame_index]->GetTop() + output_offset,
   2380                      length, is_args);
   2381       output_[frame_index]->SetFrameSlot(output_offset, value);
   2382       // We save the object values on the side and materialize the actual
   2383       // object after the deoptimized frame is built.
   2384       int object_index = deferred_objects_.length() - 1;
   2385       for (int i = 0; i < length; i++) {
   2386         DoTranslateObject(iterator, object_index, i);
   2387       }
   2388       return;
   2389     }
   2390   }
   2391 }
   2392 
   2393 
   2394 bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
   2395                                         int* input_offset) {
   2396   disasm::NameConverter converter;
   2397   FrameDescription* output = output_[0];
   2398 
   2399   // The input values are all part of the unoptimized frame so they
   2400   // are all tagged pointers.
   2401   uintptr_t input_value = input_->GetFrameSlot(*input_offset);
   2402   Object* input_object = reinterpret_cast<Object*>(input_value);
   2403 
   2404   Translation::Opcode opcode =
   2405       static_cast<Translation::Opcode>(iterator->Next());
   2406 
   2407   switch (opcode) {
   2408     case Translation::BEGIN:
   2409     case Translation::JS_FRAME:
   2410     case Translation::ARGUMENTS_ADAPTOR_FRAME:
   2411     case Translation::CONSTRUCT_STUB_FRAME:
   2412     case Translation::GETTER_STUB_FRAME:
   2413     case Translation::SETTER_STUB_FRAME:
   2414     case Translation::COMPILED_STUB_FRAME:
   2415       UNREACHABLE();  // Malformed input.
   2416       return false;
   2417 
   2418     case Translation::REGISTER: {
   2419       int output_reg = iterator->Next();
   2420       if (FLAG_trace_osr) {
   2421         PrintF("    %s <- 0x%08" V8PRIxPTR " ; [sp + %d]\n",
   2422                converter.NameOfCPURegister(output_reg),
   2423                input_value,
   2424                *input_offset);
   2425       }
   2426       output->SetRegister(output_reg, input_value);
   2427       break;
   2428     }
   2429 
   2430     case Translation::INT32_REGISTER: {
   2431       int32_t int32_value = 0;
   2432       if (!input_object->ToInt32(&int32_value)) return false;
   2433 
   2434       int output_reg = iterator->Next();
   2435       if (FLAG_trace_osr) {
   2436         PrintF("    %s <- %d (int32) ; [sp + %d]\n",
   2437                converter.NameOfCPURegister(output_reg),
   2438                int32_value,
   2439                *input_offset);
   2440       }
   2441       output->SetRegister(output_reg, int32_value);
   2442       break;
   2443     }
   2444 
   2445     case Translation::UINT32_REGISTER: {
   2446       uint32_t uint32_value = 0;
   2447       if (!input_object->ToUint32(&uint32_value)) return false;
   2448 
   2449       int output_reg = iterator->Next();
   2450       if (FLAG_trace_osr) {
   2451         PrintF("    %s <- %u (uint32) ; [sp + %d]\n",
   2452                converter.NameOfCPURegister(output_reg),
   2453                uint32_value,
   2454                *input_offset);
   2455       }
   2456       output->SetRegister(output_reg, static_cast<int32_t>(uint32_value));
   2457     }
   2458 
   2459 
   2460     case Translation::DOUBLE_REGISTER: {
   2461       // Abort OSR if we don't have a number.
   2462       if (!input_object->IsNumber()) return false;
   2463 
   2464       int output_reg = iterator->Next();
   2465       double double_value = input_object->Number();
   2466       if (FLAG_trace_osr) {
   2467         PrintF("    %s <- %g (double) ; [sp + %d]\n",
   2468                DoubleRegister::AllocationIndexToString(output_reg),
   2469                double_value,
   2470                *input_offset);
   2471       }
   2472       output->SetDoubleRegister(output_reg, double_value);
   2473       break;
   2474     }
   2475 
   2476     case Translation::STACK_SLOT: {
   2477       int output_index = iterator->Next();
   2478       unsigned output_offset =
   2479           output->GetOffsetFromSlotIndex(output_index);
   2480       if (FLAG_trace_osr) {
   2481         PrintF("    [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
   2482                output_offset,
   2483                input_value,
   2484                *input_offset);
   2485         reinterpret_cast<Object*>(input_value)->ShortPrint();
   2486         PrintF("\n");
   2487       }
   2488       output->SetFrameSlot(output_offset, input_value);
   2489       break;
   2490     }
   2491 
   2492     case Translation::INT32_STACK_SLOT: {
   2493       int32_t int32_value = 0;
   2494       if (!input_object->ToInt32(&int32_value)) return false;
   2495 
   2496       int output_index = iterator->Next();
   2497       unsigned output_offset =
   2498           output->GetOffsetFromSlotIndex(output_index);
   2499       if (FLAG_trace_osr) {
   2500         PrintF("    [sp + %d] <- %d (int32) ; [sp + %d]\n",
   2501                output_offset,
   2502                int32_value,
   2503                *input_offset);
   2504       }
   2505       output->SetFrameSlot(output_offset, int32_value);
   2506       break;
   2507     }
   2508 
   2509     case Translation::UINT32_STACK_SLOT: {
   2510       uint32_t uint32_value = 0;
   2511       if (!input_object->ToUint32(&uint32_value)) return false;
   2512 
   2513       int output_index = iterator->Next();
   2514       unsigned output_offset =
   2515           output->GetOffsetFromSlotIndex(output_index);
   2516       if (FLAG_trace_osr) {
   2517         PrintF("    [sp + %d] <- %u (uint32) ; [sp + %d]\n",
   2518                output_offset,
   2519                uint32_value,
   2520                *input_offset);
   2521       }
   2522       output->SetFrameSlot(output_offset, static_cast<int32_t>(uint32_value));
   2523       break;
   2524     }
   2525 
   2526     case Translation::DOUBLE_STACK_SLOT: {
   2527       static const int kLowerOffset = 0 * kPointerSize;
   2528       static const int kUpperOffset = 1 * kPointerSize;
   2529 
   2530       // Abort OSR if we don't have a number.
   2531       if (!input_object->IsNumber()) return false;
   2532 
   2533       int output_index = iterator->Next();
   2534       unsigned output_offset =
   2535           output->GetOffsetFromSlotIndex(output_index);
   2536       double double_value = input_object->Number();
   2537       uint64_t int_value = BitCast<uint64_t, double>(double_value);
   2538       int32_t lower = static_cast<int32_t>(int_value);
   2539       int32_t upper = static_cast<int32_t>(int_value >> kBitsPerInt);
   2540       if (FLAG_trace_osr) {
   2541         PrintF("    [sp + %d] <- 0x%08x (upper bits of %g) ; [sp + %d]\n",
   2542                output_offset + kUpperOffset,
   2543                upper,
   2544                double_value,
   2545                *input_offset);
   2546         PrintF("    [sp + %d] <- 0x%08x (lower bits of %g) ; [sp + %d]\n",
   2547                output_offset + kLowerOffset,
   2548                lower,
   2549                double_value,
   2550                *input_offset);
   2551       }
   2552       output->SetFrameSlot(output_offset + kLowerOffset, lower);
   2553       output->SetFrameSlot(output_offset + kUpperOffset, upper);
   2554       break;
   2555     }
   2556 
   2557     case Translation::LITERAL: {
   2558       // Just ignore non-materialized literals.
   2559       iterator->Next();
   2560       break;
   2561     }
   2562 
   2563     case Translation::DUPLICATED_OBJECT:
   2564     case Translation::ARGUMENTS_OBJECT:
   2565     case Translation::CAPTURED_OBJECT: {
   2566       // Optimized code assumes that the argument object has not been
   2567       // materialized and so bypasses it when doing arguments access.
   2568       // We should have bailed out before starting the frame
   2569       // translation.
   2570       UNREACHABLE();
   2571       return false;
   2572     }
   2573   }
   2574 
   2575   *input_offset -= kPointerSize;
   2576   return true;
   2577 }
   2578 
   2579 
   2580 void Deoptimizer::PatchInterruptCode(Code* unoptimized_code,
   2581                                      Code* interrupt_code,
   2582                                      Code* replacement_code) {
   2583   // Iterate over the back edge table and patch every interrupt
   2584   // call to an unconditional call to the replacement code.
   2585   int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
   2586 
   2587   for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
   2588        !back_edges.Done();
   2589        back_edges.Next()) {
   2590     if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) {
   2591       PatchInterruptCodeAt(unoptimized_code,
   2592                            back_edges.pc(),
   2593                            interrupt_code,
   2594                            replacement_code);
   2595     }
   2596   }
   2597 
   2598   unoptimized_code->set_back_edges_patched_for_osr(true);
   2599 #ifdef DEBUG
   2600   Deoptimizer::VerifyInterruptCode(
   2601       unoptimized_code, interrupt_code, replacement_code, loop_nesting_level);
   2602 #endif  // DEBUG
   2603 }
   2604 
   2605 
   2606 void Deoptimizer::RevertInterruptCode(Code* unoptimized_code,
   2607                                       Code* interrupt_code,
   2608                                       Code* replacement_code) {
   2609   // Iterate over the back edge table and revert the patched interrupt calls.
   2610   ASSERT(unoptimized_code->back_edges_patched_for_osr());
   2611   int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
   2612 
   2613   for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
   2614        !back_edges.Done();
   2615        back_edges.Next()) {
   2616     if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) {
   2617       RevertInterruptCodeAt(unoptimized_code,
   2618                             back_edges.pc(),
   2619                             interrupt_code,
   2620                             replacement_code);
   2621     }
   2622   }
   2623 
   2624   unoptimized_code->set_back_edges_patched_for_osr(false);
   2625   unoptimized_code->set_allow_osr_at_loop_nesting_level(0);
   2626 #ifdef DEBUG
   2627   // Assert that none of the back edges are patched anymore.
   2628   Deoptimizer::VerifyInterruptCode(
   2629       unoptimized_code, interrupt_code, replacement_code, -1);
   2630 #endif  // DEBUG
   2631 }
   2632 
   2633 
   2634 #ifdef DEBUG
   2635 void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code,
   2636                                       Code* interrupt_code,
   2637                                       Code* replacement_code,
   2638                                       int loop_nesting_level) {
   2639   for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
   2640        !back_edges.Done();
   2641        back_edges.Next()) {
   2642     uint32_t loop_depth = back_edges.loop_depth();
   2643     CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
   2644     // Assert that all back edges for shallower loops (and only those)
   2645     // have already been patched.
   2646     CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
   2647              InterruptCodeIsPatched(unoptimized_code,
   2648                                     back_edges.pc(),
   2649                                     interrupt_code,
   2650                                     replacement_code));
   2651   }
   2652 }
   2653 #endif  // DEBUG
   2654 
   2655 
   2656 unsigned Deoptimizer::ComputeInputFrameSize() const {
   2657   unsigned fixed_size = ComputeFixedSize(function_);
   2658   // The fp-to-sp delta already takes the context and the function
   2659   // into account so we have to avoid double counting them (-2).
   2660   unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize);
   2661 #ifdef DEBUG
   2662   if (bailout_type_ == OSR) {
   2663     // TODO(kasperl): It would be nice if we could verify that the
   2664     // size matches with the stack height we can compute based on the
   2665     // environment at the OSR entry. The code for that his built into
   2666     // the DoComputeOsrOutputFrame function for now.
   2667   } else if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
   2668     unsigned stack_slots = compiled_code_->stack_slots();
   2669     unsigned outgoing_size = ComputeOutgoingArgumentSize();
   2670     ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
   2671   }
   2672 #endif
   2673   return result;
   2674 }
   2675 
   2676 
   2677 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
   2678   // The fixed part of the frame consists of the return address, frame
   2679   // pointer, function, context, and all the incoming arguments.
   2680   return ComputeIncomingArgumentSize(function) +
   2681       StandardFrameConstants::kFixedFrameSize;
   2682 }
   2683 
   2684 
   2685 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
   2686   // The incoming arguments is the values for formal parameters and
   2687   // the receiver. Every slot contains a pointer.
   2688   if (function->IsSmi()) {
   2689     ASSERT(Smi::cast(function) == Smi::FromInt(StackFrame::STUB));
   2690     return 0;
   2691   }
   2692   unsigned arguments = function->shared()->formal_parameter_count() + 1;
   2693   return arguments * kPointerSize;
   2694 }
   2695 
   2696 
   2697 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const {
   2698   DeoptimizationInputData* data = DeoptimizationInputData::cast(
   2699       compiled_code_->deoptimization_data());
   2700   unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
   2701   return height * kPointerSize;
   2702 }
   2703 
   2704 
   2705 Object* Deoptimizer::ComputeLiteral(int index) const {
   2706   DeoptimizationInputData* data = DeoptimizationInputData::cast(
   2707       compiled_code_->deoptimization_data());
   2708   FixedArray* literals = data->LiteralArray();
   2709   return literals->get(index);
   2710 }
   2711 
   2712 
   2713 void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) {
   2714   ObjectMaterializationDescriptor object_desc(
   2715       reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args);
   2716   deferred_objects_.Add(object_desc);
   2717 }
   2718 
   2719 
   2720 void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) {
   2721   ObjectMaterializationDescriptor object_desc(
   2722       reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false);
   2723   deferred_objects_.Add(object_desc);
   2724 }
   2725 
   2726 
   2727 void Deoptimizer::AddObjectTaggedValue(intptr_t value) {
   2728   deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value));
   2729   deferred_objects_double_values_.Add(isolate()->heap()->nan_value()->value());
   2730 }
   2731 
   2732 
   2733 void Deoptimizer::AddObjectDoubleValue(double value) {
   2734   deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value());
   2735   deferred_objects_double_values_.Add(value);
   2736 }
   2737 
   2738 
   2739 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
   2740   HeapNumberMaterializationDescriptor value_desc(
   2741       reinterpret_cast<Address>(slot_address), value);
   2742   deferred_heap_numbers_.Add(value_desc);
   2743 }
   2744 
   2745 
   2746 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
   2747                                                    BailoutType type,
   2748                                                    int max_entry_id) {
   2749   // We cannot run this if the serializer is enabled because this will
   2750   // cause us to emit relocation information for the external
   2751   // references. This is fine because the deoptimizer's code section
   2752   // isn't meant to be serialized at all.
   2753   ASSERT(type == EAGER || type == SOFT || type == LAZY);
   2754   DeoptimizerData* data = isolate->deoptimizer_data();
   2755   int entry_count = data->deopt_entry_code_entries_[type];
   2756   if (max_entry_id < entry_count) return;
   2757   entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
   2758   while (max_entry_id >= entry_count) entry_count *= 2;
   2759   ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries);
   2760 
   2761   MacroAssembler masm(isolate, NULL, 16 * KB);
   2762   masm.set_emit_debug_code(false);
   2763   GenerateDeoptimizationEntries(&masm, entry_count, type);
   2764   CodeDesc desc;
   2765   masm.GetCode(&desc);
   2766   ASSERT(!RelocInfo::RequiresRelocation(desc));
   2767 
   2768   MemoryChunk* chunk = data->deopt_entry_code_[type];
   2769   ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
   2770          desc.instr_size);
   2771   chunk->CommitArea(desc.instr_size);
   2772   CopyBytes(chunk->area_start(), desc.buffer,
   2773       static_cast<size_t>(desc.instr_size));
   2774   CPU::FlushICache(chunk->area_start(), desc.instr_size);
   2775 
   2776   data->deopt_entry_code_entries_[type] = entry_count;
   2777 }
   2778 
   2779 
   2780 FrameDescription::FrameDescription(uint32_t frame_size,
   2781                                    JSFunction* function)
   2782     : frame_size_(frame_size),
   2783       function_(function),
   2784       top_(kZapUint32),
   2785       pc_(kZapUint32),
   2786       fp_(kZapUint32),
   2787       context_(kZapUint32) {
   2788   // Zap all the registers.
   2789   for (int r = 0; r < Register::kNumRegisters; r++) {
   2790     SetRegister(r, kZapUint32);
   2791   }
   2792 
   2793   // Zap all the slots.
   2794   for (unsigned o = 0; o < frame_size; o += kPointerSize) {
   2795     SetFrameSlot(o, kZapUint32);
   2796   }
   2797 }
   2798 
   2799 
   2800 int FrameDescription::ComputeFixedSize() {
   2801   return StandardFrameConstants::kFixedFrameSize +
   2802       (ComputeParametersCount() + 1) * kPointerSize;
   2803 }
   2804 
   2805 
   2806 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
   2807   if (slot_index >= 0) {
   2808     // Local or spill slots. Skip the fixed part of the frame
   2809     // including all arguments.
   2810     unsigned base = GetFrameSize() - ComputeFixedSize();
   2811     return base - ((slot_index + 1) * kPointerSize);
   2812   } else {
   2813     // Incoming parameter.
   2814     int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
   2815     unsigned base = GetFrameSize() - arg_size;
   2816     return base - ((slot_index + 1) * kPointerSize);
   2817   }
   2818 }
   2819 
   2820 
   2821 int FrameDescription::ComputeParametersCount() {
   2822   switch (type_) {
   2823     case StackFrame::JAVA_SCRIPT:
   2824       return function_->shared()->formal_parameter_count();
   2825     case StackFrame::ARGUMENTS_ADAPTOR: {
   2826       // Last slot contains number of incomming arguments as a smi.
   2827       // Can't use GetExpression(0) because it would cause infinite recursion.
   2828       return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
   2829     }
   2830     case StackFrame::STUB:
   2831       return -1;  // Minus receiver.
   2832     default:
   2833       UNREACHABLE();
   2834       return 0;
   2835   }
   2836 }
   2837 
   2838 
   2839 Object* FrameDescription::GetParameter(int index) {
   2840   ASSERT(index >= 0);
   2841   ASSERT(index < ComputeParametersCount());
   2842   // The slot indexes for incoming arguments are negative.
   2843   unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
   2844   return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
   2845 }
   2846 
   2847 
   2848 unsigned FrameDescription::GetExpressionCount() {
   2849   ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
   2850   unsigned size = GetFrameSize() - ComputeFixedSize();
   2851   return size / kPointerSize;
   2852 }
   2853 
   2854 
   2855 Object* FrameDescription::GetExpression(int index) {
   2856   ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
   2857   unsigned offset = GetOffsetFromSlotIndex(index);
   2858   return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
   2859 }
   2860 
   2861 
   2862 void TranslationBuffer::Add(int32_t value, Zone* zone) {
   2863   // Encode the sign bit in the least significant bit.
   2864   bool is_negative = (value < 0);
   2865   uint32_t bits = ((is_negative ? -value : value) << 1) |
   2866       static_cast<int32_t>(is_negative);
   2867   // Encode the individual bytes using the least significant bit of
   2868   // each byte to indicate whether or not more bytes follow.
   2869   do {
   2870     uint32_t next = bits >> 7;
   2871     contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
   2872     bits = next;
   2873   } while (bits != 0);
   2874 }
   2875 
   2876 
   2877 int32_t TranslationIterator::Next() {
   2878   // Run through the bytes until we reach one with a least significant
   2879   // bit of zero (marks the end).
   2880   uint32_t bits = 0;
   2881   for (int i = 0; true; i += 7) {
   2882     ASSERT(HasNext());
   2883     uint8_t next = buffer_->get(index_++);
   2884     bits |= (next >> 1) << i;
   2885     if ((next & 1) == 0) break;
   2886   }
   2887   // The bits encode the sign in the least significant bit.
   2888   bool is_negative = (bits & 1) == 1;
   2889   int32_t result = bits >> 1;
   2890   return is_negative ? -result : result;
   2891 }
   2892 
   2893 
   2894 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
   2895   int length = contents_.length();
   2896   Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
   2897   OS::MemCopy(
   2898       result->GetDataStartAddress(), contents_.ToVector().start(), length);
   2899   return result;
   2900 }
   2901 
   2902 
   2903 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
   2904   buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
   2905   buffer_->Add(literal_id, zone());
   2906   buffer_->Add(height, zone());
   2907 }
   2908 
   2909 
   2910 void Translation::BeginGetterStubFrame(int literal_id) {
   2911   buffer_->Add(GETTER_STUB_FRAME, zone());
   2912   buffer_->Add(literal_id, zone());
   2913 }
   2914 
   2915 
   2916 void Translation::BeginSetterStubFrame(int literal_id) {
   2917   buffer_->Add(SETTER_STUB_FRAME, zone());
   2918   buffer_->Add(literal_id, zone());
   2919 }
   2920 
   2921 
   2922 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
   2923   buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
   2924   buffer_->Add(literal_id, zone());
   2925   buffer_->Add(height, zone());
   2926 }
   2927 
   2928 
   2929 void Translation::BeginJSFrame(BailoutId node_id,
   2930                                int literal_id,
   2931                                unsigned height) {
   2932   buffer_->Add(JS_FRAME, zone());
   2933   buffer_->Add(node_id.ToInt(), zone());
   2934   buffer_->Add(literal_id, zone());
   2935   buffer_->Add(height, zone());
   2936 }
   2937 
   2938 
   2939 void Translation::BeginCompiledStubFrame() {
   2940   buffer_->Add(COMPILED_STUB_FRAME, zone());
   2941 }
   2942 
   2943 
   2944 void Translation::BeginArgumentsObject(int args_length) {
   2945   buffer_->Add(ARGUMENTS_OBJECT, zone());
   2946   buffer_->Add(args_length, zone());
   2947 }
   2948 
   2949 
   2950 void Translation::BeginCapturedObject(int length) {
   2951   buffer_->Add(CAPTURED_OBJECT, zone());
   2952   buffer_->Add(length, zone());
   2953 }
   2954 
   2955 
   2956 void Translation::DuplicateObject(int object_index) {
   2957   buffer_->Add(DUPLICATED_OBJECT, zone());
   2958   buffer_->Add(object_index, zone());
   2959 }
   2960 
   2961 
   2962 void Translation::StoreRegister(Register reg) {
   2963   buffer_->Add(REGISTER, zone());
   2964   buffer_->Add(reg.code(), zone());
   2965 }
   2966 
   2967 
   2968 void Translation::StoreInt32Register(Register reg) {
   2969   buffer_->Add(INT32_REGISTER, zone());
   2970   buffer_->Add(reg.code(), zone());
   2971 }
   2972 
   2973 
   2974 void Translation::StoreUint32Register(Register reg) {
   2975   buffer_->Add(UINT32_REGISTER, zone());
   2976   buffer_->Add(reg.code(), zone());
   2977 }
   2978 
   2979 
   2980 void Translation::StoreDoubleRegister(DoubleRegister reg) {
   2981   buffer_->Add(DOUBLE_REGISTER, zone());
   2982   buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
   2983 }
   2984 
   2985 
   2986 void Translation::StoreStackSlot(int index) {
   2987   buffer_->Add(STACK_SLOT, zone());
   2988   buffer_->Add(index, zone());
   2989 }
   2990 
   2991 
   2992 void Translation::StoreInt32StackSlot(int index) {
   2993   buffer_->Add(INT32_STACK_SLOT, zone());
   2994   buffer_->Add(index, zone());
   2995 }
   2996 
   2997 
   2998 void Translation::StoreUint32StackSlot(int index) {
   2999   buffer_->Add(UINT32_STACK_SLOT, zone());
   3000   buffer_->Add(index, zone());
   3001 }
   3002 
   3003 
   3004 void Translation::StoreDoubleStackSlot(int index) {
   3005   buffer_->Add(DOUBLE_STACK_SLOT, zone());
   3006   buffer_->Add(index, zone());
   3007 }
   3008 
   3009 
   3010 void Translation::StoreLiteral(int literal_id) {
   3011   buffer_->Add(LITERAL, zone());
   3012   buffer_->Add(literal_id, zone());
   3013 }
   3014 
   3015 
   3016 void Translation::StoreArgumentsObject(bool args_known,
   3017                                        int args_index,
   3018                                        int args_length) {
   3019   buffer_->Add(ARGUMENTS_OBJECT, zone());
   3020   buffer_->Add(args_known, zone());
   3021   buffer_->Add(args_index, zone());
   3022   buffer_->Add(args_length, zone());
   3023 }
   3024 
   3025 
   3026 int Translation::NumberOfOperandsFor(Opcode opcode) {
   3027   switch (opcode) {
   3028     case GETTER_STUB_FRAME:
   3029     case SETTER_STUB_FRAME:
   3030     case DUPLICATED_OBJECT:
   3031     case ARGUMENTS_OBJECT:
   3032     case CAPTURED_OBJECT:
   3033     case REGISTER:
   3034     case INT32_REGISTER:
   3035     case UINT32_REGISTER:
   3036     case DOUBLE_REGISTER:
   3037     case STACK_SLOT:
   3038     case INT32_STACK_SLOT:
   3039     case UINT32_STACK_SLOT:
   3040     case DOUBLE_STACK_SLOT:
   3041     case LITERAL:
   3042     case COMPILED_STUB_FRAME:
   3043       return 1;
   3044     case BEGIN:
   3045     case ARGUMENTS_ADAPTOR_FRAME:
   3046     case CONSTRUCT_STUB_FRAME:
   3047       return 2;
   3048     case JS_FRAME:
   3049       return 3;
   3050   }
   3051   UNREACHABLE();
   3052   return -1;
   3053 }
   3054 
   3055 
   3056 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
   3057 
   3058 const char* Translation::StringFor(Opcode opcode) {
   3059   switch (opcode) {
   3060     case BEGIN:
   3061       return "BEGIN";
   3062     case JS_FRAME:
   3063       return "JS_FRAME";
   3064     case ARGUMENTS_ADAPTOR_FRAME:
   3065       return "ARGUMENTS_ADAPTOR_FRAME";
   3066     case CONSTRUCT_STUB_FRAME:
   3067       return "CONSTRUCT_STUB_FRAME";
   3068     case GETTER_STUB_FRAME:
   3069       return "GETTER_STUB_FRAME";
   3070     case SETTER_STUB_FRAME:
   3071       return "SETTER_STUB_FRAME";
   3072     case COMPILED_STUB_FRAME:
   3073       return "COMPILED_STUB_FRAME";
   3074     case REGISTER:
   3075       return "REGISTER";
   3076     case INT32_REGISTER:
   3077       return "INT32_REGISTER";
   3078     case UINT32_REGISTER:
   3079       return "UINT32_REGISTER";
   3080     case DOUBLE_REGISTER:
   3081       return "DOUBLE_REGISTER";
   3082     case STACK_SLOT:
   3083       return "STACK_SLOT";
   3084     case INT32_STACK_SLOT:
   3085       return "INT32_STACK_SLOT";
   3086     case UINT32_STACK_SLOT:
   3087       return "UINT32_STACK_SLOT";
   3088     case DOUBLE_STACK_SLOT:
   3089       return "DOUBLE_STACK_SLOT";
   3090     case LITERAL:
   3091       return "LITERAL";
   3092     case DUPLICATED_OBJECT:
   3093       return "DUPLICATED_OBJECT";
   3094     case ARGUMENTS_OBJECT:
   3095       return "ARGUMENTS_OBJECT";
   3096     case CAPTURED_OBJECT:
   3097       return "CAPTURED_OBJECT";
   3098   }
   3099   UNREACHABLE();
   3100   return "";
   3101 }
   3102 
   3103 #endif
   3104 
   3105 
   3106 DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
   3107   GlobalHandles* global_handles = code->GetIsolate()->global_handles();
   3108   // Globalize the code object and make it weak.
   3109   code_ = Handle<Code>::cast(global_handles->Create(code));
   3110   global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
   3111                            this,
   3112                            Deoptimizer::HandleWeakDeoptimizedCode);
   3113 }
   3114 
   3115 
   3116 DeoptimizingCodeListNode::~DeoptimizingCodeListNode() {
   3117   GlobalHandles* global_handles = code_->GetIsolate()->global_handles();
   3118   global_handles->Destroy(reinterpret_cast<Object**>(code_.location()));
   3119 }
   3120 
   3121 
   3122 // We can't intermix stack decoding and allocations because
   3123 // deoptimization infrastracture is not GC safe.
   3124 // Thus we build a temporary structure in malloced space.
   3125 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
   3126                                             DeoptimizationInputData* data,
   3127                                             JavaScriptFrame* frame) {
   3128   Translation::Opcode opcode =
   3129       static_cast<Translation::Opcode>(iterator->Next());
   3130 
   3131   switch (opcode) {
   3132     case Translation::BEGIN:
   3133     case Translation::JS_FRAME:
   3134     case Translation::ARGUMENTS_ADAPTOR_FRAME:
   3135     case Translation::CONSTRUCT_STUB_FRAME:
   3136     case Translation::GETTER_STUB_FRAME:
   3137     case Translation::SETTER_STUB_FRAME:
   3138       // Peeled off before getting here.
   3139       break;
   3140 
   3141     case Translation::DUPLICATED_OBJECT:
   3142     case Translation::ARGUMENTS_OBJECT:
   3143     case Translation::CAPTURED_OBJECT:
   3144       // This can be only emitted for local slots not for argument slots.
   3145       break;
   3146 
   3147     case Translation::REGISTER:
   3148     case Translation::INT32_REGISTER:
   3149     case Translation::UINT32_REGISTER:
   3150     case Translation::DOUBLE_REGISTER:
   3151       // We are at safepoint which corresponds to call.  All registers are
   3152       // saved by caller so there would be no live registers at this
   3153       // point. Thus these translation commands should not be used.
   3154       break;
   3155 
   3156     case Translation::STACK_SLOT: {
   3157       int slot_index = iterator->Next();
   3158       Address slot_addr = SlotAddress(frame, slot_index);
   3159       return SlotRef(slot_addr, SlotRef::TAGGED);
   3160     }
   3161 
   3162     case Translation::INT32_STACK_SLOT: {
   3163       int slot_index = iterator->Next();
   3164       Address slot_addr = SlotAddress(frame, slot_index);
   3165       return SlotRef(slot_addr, SlotRef::INT32);
   3166     }
   3167 
   3168     case Translation::UINT32_STACK_SLOT: {
   3169       int slot_index = iterator->Next();
   3170       Address slot_addr = SlotAddress(frame, slot_index);
   3171       return SlotRef(slot_addr, SlotRef::UINT32);
   3172     }
   3173 
   3174     case Translation::DOUBLE_STACK_SLOT: {
   3175       int slot_index = iterator->Next();
   3176       Address slot_addr = SlotAddress(frame, slot_index);
   3177       return SlotRef(slot_addr, SlotRef::DOUBLE);
   3178     }
   3179 
   3180     case Translation::LITERAL: {
   3181       int literal_index = iterator->Next();
   3182       return SlotRef(data->GetIsolate(),
   3183                      data->LiteralArray()->get(literal_index));
   3184     }
   3185 
   3186     case Translation::COMPILED_STUB_FRAME:
   3187       UNREACHABLE();
   3188       break;
   3189   }
   3190 
   3191   UNREACHABLE();
   3192   return SlotRef();
   3193 }
   3194 
   3195 
   3196 void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots,
   3197                                        TranslationIterator* it,
   3198                                        DeoptimizationInputData* data,
   3199                                        JavaScriptFrame* frame) {
   3200   // Process the translation commands for the arguments.
   3201 
   3202   // Skip the translation command for the receiver.
   3203   it->Skip(Translation::NumberOfOperandsFor(
   3204       static_cast<Translation::Opcode>(it->Next())));
   3205 
   3206   // Compute slots for arguments.
   3207   for (int i = 0; i < args_slots->length(); ++i) {
   3208     (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame);
   3209   }
   3210 }
   3211 
   3212 
   3213 Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
   3214     JavaScriptFrame* frame,
   3215     int inlined_jsframe_index,
   3216     int formal_parameter_count) {
   3217   DisallowHeapAllocation no_gc;
   3218   int deopt_index = Safepoint::kNoDeoptimizationIndex;
   3219   DeoptimizationInputData* data =
   3220       static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
   3221   TranslationIterator it(data->TranslationByteArray(),
   3222                          data->TranslationIndex(deopt_index)->value());
   3223   Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
   3224   ASSERT(opcode == Translation::BEGIN);
   3225   it.Next();  // Drop frame count.
   3226   int jsframe_count = it.Next();
   3227   USE(jsframe_count);
   3228   ASSERT(jsframe_count > inlined_jsframe_index);
   3229   int jsframes_to_skip = inlined_jsframe_index;
   3230   while (true) {
   3231     opcode = static_cast<Translation::Opcode>(it.Next());
   3232     if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
   3233       if (jsframes_to_skip == 0) {
   3234         ASSERT(Translation::NumberOfOperandsFor(opcode) == 2);
   3235 
   3236         it.Skip(1);  // literal id
   3237         int height = it.Next();
   3238 
   3239         // We reached the arguments adaptor frame corresponding to the
   3240         // inlined function in question.  Number of arguments is height - 1.
   3241         Vector<SlotRef> args_slots =
   3242             Vector<SlotRef>::New(height - 1);  // Minus receiver.
   3243         ComputeSlotsForArguments(&args_slots, &it, data, frame);
   3244         return args_slots;
   3245       }
   3246     } else if (opcode == Translation::JS_FRAME) {
   3247       if (jsframes_to_skip == 0) {
   3248         // Skip over operands to advance to the next opcode.
   3249         it.Skip(Translation::NumberOfOperandsFor(opcode));
   3250 
   3251         // We reached the frame corresponding to the inlined function
   3252         // in question.  Process the translation commands for the
   3253         // arguments.  Number of arguments is equal to the number of
   3254         // format parameter count.
   3255         Vector<SlotRef> args_slots =
   3256             Vector<SlotRef>::New(formal_parameter_count);
   3257         ComputeSlotsForArguments(&args_slots, &it, data, frame);
   3258         return args_slots;
   3259       }
   3260       jsframes_to_skip--;
   3261     }
   3262 
   3263     // Skip over operands to advance to the next opcode.
   3264     it.Skip(Translation::NumberOfOperandsFor(opcode));
   3265   }
   3266 
   3267   UNREACHABLE();
   3268   return Vector<SlotRef>();
   3269 }
   3270 
   3271 #ifdef ENABLE_DEBUGGER_SUPPORT
   3272 
   3273 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
   3274                                            int frame_index,
   3275                                            bool has_arguments_adaptor,
   3276                                            bool has_construct_stub) {
   3277   FrameDescription* output_frame = deoptimizer->output_[frame_index];
   3278   function_ = output_frame->GetFunction();
   3279   has_construct_stub_ = has_construct_stub;
   3280   expression_count_ = output_frame->GetExpressionCount();
   3281   expression_stack_ = new Object*[expression_count_];
   3282   // Get the source position using the unoptimized code.
   3283   Address pc = reinterpret_cast<Address>(output_frame->GetPc());
   3284   Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
   3285   source_position_ = code->SourcePosition(pc);
   3286 
   3287   for (int i = 0; i < expression_count_; i++) {
   3288     SetExpression(i, output_frame->GetExpression(i));
   3289   }
   3290 
   3291   if (has_arguments_adaptor) {
   3292     output_frame = deoptimizer->output_[frame_index - 1];
   3293     ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR);
   3294   }
   3295 
   3296   parameters_count_ = output_frame->ComputeParametersCount();
   3297   parameters_ = new Object*[parameters_count_];
   3298   for (int i = 0; i < parameters_count_; i++) {
   3299     SetParameter(i, output_frame->GetParameter(i));
   3300   }
   3301 }
   3302 
   3303 
   3304 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
   3305   delete[] expression_stack_;
   3306   delete[] parameters_;
   3307 }
   3308 
   3309 
   3310 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
   3311   v->VisitPointer(BitCast<Object**>(&function_));
   3312   v->VisitPointers(parameters_, parameters_ + parameters_count_);
   3313   v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
   3314 }
   3315 
   3316 #endif  // ENABLE_DEBUGGER_SUPPORT
   3317 
   3318 } }  // namespace v8::internal
   3319