Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_DEOPTIMIZER_H_
     29 #define V8_DEOPTIMIZER_H_
     30 
     31 #include "v8.h"
     32 
     33 #include "allocation.h"
     34 #include "macro-assembler.h"
     35 #include "zone-inl.h"
     36 
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 
     42 static inline double read_double_value(Address p) {
     43 #ifdef V8_HOST_CAN_READ_UNALIGNED
     44   return Memory::double_at(p);
     45 #else  // V8_HOST_CAN_READ_UNALIGNED
     46   // Prevent gcc from using load-double (mips ldc1) on (possibly)
     47   // non-64-bit aligned address.
     48   union conversion {
     49     double d;
     50     uint32_t u[2];
     51   } c;
     52   c.u[0] = *reinterpret_cast<uint32_t*>(p);
     53   c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
     54   return c.d;
     55 #endif  // V8_HOST_CAN_READ_UNALIGNED
     56 }
     57 
     58 
     59 class FrameDescription;
     60 class TranslationIterator;
     61 class DeoptimizingCodeListNode;
     62 class DeoptimizedFrameInfo;
     63 
     64 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
     65  public:
     66   HeapNumberMaterializationDescriptor(Address slot_address, double val)
     67       : slot_address_(slot_address), val_(val) { }
     68 
     69   Address slot_address() const { return slot_address_; }
     70   double value() const { return val_; }
     71 
     72  private:
     73   Address slot_address_;
     74   double val_;
     75 };
     76 
     77 
     78 class ObjectMaterializationDescriptor BASE_EMBEDDED {
     79  public:
     80   ObjectMaterializationDescriptor(
     81       Address slot_address, int frame, int length, int duplicate, bool is_args)
     82       : slot_address_(slot_address),
     83         jsframe_index_(frame),
     84         object_length_(length),
     85         duplicate_object_(duplicate),
     86         is_arguments_(is_args) { }
     87 
     88   Address slot_address() const { return slot_address_; }
     89   int jsframe_index() const { return jsframe_index_; }
     90   int object_length() const { return object_length_; }
     91   int duplicate_object() const { return duplicate_object_; }
     92   bool is_arguments() const { return is_arguments_; }
     93 
     94   // Only used for allocated receivers in DoComputeConstructStubFrame.
     95   void patch_slot_address(intptr_t slot) {
     96     slot_address_ = reinterpret_cast<Address>(slot);
     97   }
     98 
     99  private:
    100   Address slot_address_;
    101   int jsframe_index_;
    102   int object_length_;
    103   int duplicate_object_;
    104   bool is_arguments_;
    105 };
    106 
    107 
    108 class OptimizedFunctionVisitor BASE_EMBEDDED {
    109  public:
    110   virtual ~OptimizedFunctionVisitor() {}
    111 
    112   // Function which is called before iteration of any optimized functions
    113   // from given native context.
    114   virtual void EnterContext(Context* context) = 0;
    115 
    116   virtual void VisitFunction(JSFunction* function) = 0;
    117 
    118   // Function which is called after iteration of all optimized functions
    119   // from given native context.
    120   virtual void LeaveContext(Context* context) = 0;
    121 };
    122 
    123 
    124 class OptimizedFunctionFilter BASE_EMBEDDED {
    125  public:
    126   virtual ~OptimizedFunctionFilter() {}
    127 
    128   virtual bool TakeFunction(JSFunction* function) = 0;
    129 };
    130 
    131 
    132 class Deoptimizer;
    133 
    134 
    135 class Deoptimizer : public Malloced {
    136  public:
    137   enum BailoutType {
    138     EAGER,
    139     LAZY,
    140     SOFT,
    141     OSR,
    142     // This last bailout type is not really a bailout, but used by the
    143     // debugger to deoptimize stack frames to allow inspection.
    144     DEBUGGER
    145   };
    146 
    147   static const int kBailoutTypesWithCodeEntry = SOFT + 1;
    148 
    149   struct JumpTableEntry {
    150     inline JumpTableEntry(Address entry,
    151                           Deoptimizer::BailoutType type,
    152                           bool frame)
    153         : label(),
    154           address(entry),
    155           bailout_type(type),
    156           needs_frame(frame) { }
    157     Label label;
    158     Address address;
    159     Deoptimizer::BailoutType bailout_type;
    160     bool needs_frame;
    161   };
    162 
    163   static bool TraceEnabledFor(BailoutType deopt_type,
    164                               StackFrame::Type frame_type);
    165   static const char* MessageFor(BailoutType type);
    166 
    167   int output_count() const { return output_count_; }
    168 
    169   Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
    170   Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
    171   BailoutType bailout_type() const { return bailout_type_; }
    172 
    173   // Number of created JS frames. Not all created frames are necessarily JS.
    174   int jsframe_count() const { return jsframe_count_; }
    175 
    176   static Deoptimizer* New(JSFunction* function,
    177                           BailoutType type,
    178                           unsigned bailout_id,
    179                           Address from,
    180                           int fp_to_sp_delta,
    181                           Isolate* isolate);
    182   static Deoptimizer* Grab(Isolate* isolate);
    183 
    184 #ifdef ENABLE_DEBUGGER_SUPPORT
    185   // The returned object with information on the optimized frame needs to be
    186   // freed before another one can be generated.
    187   static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
    188                                                         int jsframe_index,
    189                                                         Isolate* isolate);
    190   static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
    191                                              Isolate* isolate);
    192 #endif
    193 
    194   // Makes sure that there is enough room in the relocation
    195   // information of a code object to perform lazy deoptimization
    196   // patching. If there is not enough room a new relocation
    197   // information object is allocated and comments are added until it
    198   // is big enough.
    199   static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
    200 
    201   // Deoptimize the function now. Its current optimized code will never be run
    202   // again and any activations of the optimized code will get deoptimized when
    203   // execution returns.
    204   static void DeoptimizeFunction(JSFunction* function);
    205 
    206   // Iterate over all the functions which share the same code object
    207   // and make them use unoptimized version.
    208   static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code);
    209 
    210   // Deoptimize all functions in the heap.
    211   static void DeoptimizeAll(Isolate* isolate);
    212 
    213   static void DeoptimizeGlobalObject(JSObject* object);
    214 
    215   static void DeoptimizeAllFunctionsWith(Isolate* isolate,
    216                                          OptimizedFunctionFilter* filter);
    217 
    218   static void DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes);
    219 
    220   static void DeoptimizeAllFunctionsForContext(
    221       Context* context, OptimizedFunctionFilter* filter);
    222 
    223   static void VisitAllOptimizedFunctionsForContext(
    224       Context* context, OptimizedFunctionVisitor* visitor);
    225 
    226   static void VisitAllOptimizedFunctions(Isolate* isolate,
    227                                          OptimizedFunctionVisitor* visitor);
    228 
    229   // The size in bytes of the code required at a lazy deopt patch site.
    230   static int patch_size();
    231 
    232   // Patch all interrupts with allowed loop depth in the unoptimized code to
    233   // unconditionally call replacement_code.
    234   static void PatchInterruptCode(Code* unoptimized_code,
    235                                  Code* interrupt_code,
    236                                  Code* replacement_code);
    237 
    238   // Patch the interrupt at the instruction before pc_after in
    239   // the unoptimized code to unconditionally call replacement_code.
    240   static void PatchInterruptCodeAt(Code* unoptimized_code,
    241                                    Address pc_after,
    242                                    Code* interrupt_code,
    243                                    Code* replacement_code);
    244 
    245   // Change all patched interrupts patched in the unoptimized code
    246   // back to normal interrupts.
    247   static void RevertInterruptCode(Code* unoptimized_code,
    248                                   Code* interrupt_code,
    249                                   Code* replacement_code);
    250 
    251   // Change patched interrupt in the unoptimized code
    252   // back to a normal interrupt.
    253   static void RevertInterruptCodeAt(Code* unoptimized_code,
    254                                     Address pc_after,
    255                                     Code* interrupt_code,
    256                                     Code* replacement_code);
    257 
    258 #ifdef DEBUG
    259   static bool InterruptCodeIsPatched(Code* unoptimized_code,
    260                                      Address pc_after,
    261                                      Code* interrupt_code,
    262                                      Code* replacement_code);
    263 
    264   // Verify that all back edges of a certain loop depth are patched.
    265   static void VerifyInterruptCode(Code* unoptimized_code,
    266                                   Code* interrupt_code,
    267                                   Code* replacement_code,
    268                                   int loop_nesting_level);
    269 #endif  // DEBUG
    270 
    271   ~Deoptimizer();
    272 
    273   void MaterializeHeapObjects(JavaScriptFrameIterator* it);
    274 #ifdef ENABLE_DEBUGGER_SUPPORT
    275   void MaterializeHeapNumbersForDebuggerInspectableFrame(
    276       Address parameters_top,
    277       uint32_t parameters_size,
    278       Address expressions_top,
    279       uint32_t expressions_size,
    280       DeoptimizedFrameInfo* info);
    281 #endif
    282 
    283   static void ComputeOutputFrames(Deoptimizer* deoptimizer);
    284 
    285 
    286   enum GetEntryMode {
    287     CALCULATE_ENTRY_ADDRESS,
    288     ENSURE_ENTRY_CODE
    289   };
    290 
    291 
    292   static Address GetDeoptimizationEntry(
    293       Isolate* isolate,
    294       int id,
    295       BailoutType type,
    296       GetEntryMode mode = ENSURE_ENTRY_CODE);
    297   static int GetDeoptimizationId(Isolate* isolate,
    298                                  Address addr,
    299                                  BailoutType type);
    300   static int GetOutputInfo(DeoptimizationOutputData* data,
    301                            BailoutId node_id,
    302                            SharedFunctionInfo* shared);
    303 
    304   // Code generation support.
    305   static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
    306   static int output_count_offset() {
    307     return OFFSET_OF(Deoptimizer, output_count_);
    308   }
    309   static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
    310 
    311   static int has_alignment_padding_offset() {
    312     return OFFSET_OF(Deoptimizer, has_alignment_padding_);
    313   }
    314 
    315   static int GetDeoptimizedCodeCount(Isolate* isolate);
    316 
    317   static const int kNotDeoptimizationEntry = -1;
    318 
    319   // Generators for the deoptimization entry code.
    320   class EntryGenerator BASE_EMBEDDED {
    321    public:
    322     EntryGenerator(MacroAssembler* masm, BailoutType type)
    323         : masm_(masm), type_(type) { }
    324     virtual ~EntryGenerator() { }
    325 
    326     void Generate();
    327 
    328    protected:
    329     MacroAssembler* masm() const { return masm_; }
    330     BailoutType type() const { return type_; }
    331     Isolate* isolate() const { return masm_->isolate(); }
    332 
    333     virtual void GeneratePrologue() { }
    334 
    335    private:
    336     MacroAssembler* masm_;
    337     Deoptimizer::BailoutType type_;
    338   };
    339 
    340   class TableEntryGenerator : public EntryGenerator {
    341    public:
    342     TableEntryGenerator(MacroAssembler* masm, BailoutType type,  int count)
    343         : EntryGenerator(masm, type), count_(count) { }
    344 
    345    protected:
    346     virtual void GeneratePrologue();
    347 
    348    private:
    349     int count() const { return count_; }
    350 
    351     int count_;
    352   };
    353 
    354   int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
    355 
    356   static size_t GetMaxDeoptTableSize();
    357 
    358   static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
    359                                                BailoutType type,
    360                                                int max_entry_id);
    361 
    362   Isolate* isolate() const { return isolate_; }
    363 
    364  private:
    365   static const int kMinNumberOfEntries = 64;
    366   static const int kMaxNumberOfEntries = 16384;
    367 
    368   Deoptimizer(Isolate* isolate,
    369               JSFunction* function,
    370               BailoutType type,
    371               unsigned bailout_id,
    372               Address from,
    373               int fp_to_sp_delta,
    374               Code* optimized_code);
    375   Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
    376   void PrintFunctionName();
    377   void DeleteFrameDescriptions();
    378 
    379   void DoComputeOutputFrames();
    380   void DoComputeOsrOutputFrame();
    381   void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
    382   void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
    383                                       int frame_index);
    384   void DoComputeConstructStubFrame(TranslationIterator* iterator,
    385                                    int frame_index);
    386   void DoComputeAccessorStubFrame(TranslationIterator* iterator,
    387                                   int frame_index,
    388                                   bool is_setter_stub_frame);
    389   void DoComputeCompiledStubFrame(TranslationIterator* iterator,
    390                                   int frame_index);
    391 
    392   void DoTranslateObject(TranslationIterator* iterator,
    393                          int object_index,
    394                          int field_index);
    395 
    396   enum DeoptimizerTranslatedValueType {
    397     TRANSLATED_VALUE_IS_NATIVE,
    398     TRANSLATED_VALUE_IS_TAGGED
    399   };
    400 
    401   void DoTranslateCommand(TranslationIterator* iterator,
    402       int frame_index,
    403       unsigned output_offset,
    404       DeoptimizerTranslatedValueType value_type = TRANSLATED_VALUE_IS_TAGGED);
    405 
    406   // Translate a command for OSR.  Updates the input offset to be used for
    407   // the next command.  Returns false if translation of the command failed
    408   // (e.g., a number conversion failed) and may or may not have updated the
    409   // input offset.
    410   bool DoOsrTranslateCommand(TranslationIterator* iterator,
    411                              int* input_offset);
    412 
    413   unsigned ComputeInputFrameSize() const;
    414   unsigned ComputeFixedSize(JSFunction* function) const;
    415 
    416   unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
    417   unsigned ComputeOutgoingArgumentSize() const;
    418 
    419   Object* ComputeLiteral(int index) const;
    420 
    421   void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
    422   void AddObjectDuplication(intptr_t slot, int object_index);
    423   void AddObjectTaggedValue(intptr_t value);
    424   void AddObjectDoubleValue(double value);
    425   void AddDoubleValue(intptr_t slot_address, double value);
    426 
    427   bool ArgumentsObjectIsAdapted(int object_index) {
    428     ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
    429     int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
    430     return jsframe_has_adapted_arguments_[reverse_jsframe_index];
    431   }
    432 
    433   Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
    434     ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
    435     int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
    436     return jsframe_functions_[reverse_jsframe_index];
    437   }
    438 
    439   // Helper function for heap object materialization.
    440   Handle<Object> MaterializeNextHeapObject();
    441   Handle<Object> MaterializeNextValue();
    442 
    443   static void GenerateDeoptimizationEntries(
    444       MacroAssembler* masm, int count, BailoutType type);
    445 
    446   // Weak handle callback for deoptimizing code objects.
    447   static void HandleWeakDeoptimizedCode(v8::Isolate* isolate,
    448                                         v8::Persistent<v8::Value>* obj,
    449                                         void* data);
    450 
    451   // Deoptimize the given code and add to appropriate deoptimization lists.
    452   static void DeoptimizeCode(Isolate* isolate, Code* code);
    453 
    454   // Patch the given code so that it will deoptimize itself.
    455   static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
    456 
    457   // Fill the input from from a JavaScript frame. This is used when
    458   // the debugger needs to inspect an optimized frame. For normal
    459   // deoptimizations the input frame is filled in generated code.
    460   void FillInputFrame(Address tos, JavaScriptFrame* frame);
    461 
    462   // Fill the given output frame's registers to contain the failure handler
    463   // address and the number of parameters for a stub failure trampoline.
    464   void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
    465                                         CodeStubInterfaceDescriptor* desc);
    466 
    467   // Fill the given output frame's double registers with the original values
    468   // from the input frame's double registers.
    469   void CopyDoubleRegisters(FrameDescription* output_frame);
    470 
    471   // Determines whether the input frame contains alignment padding by looking
    472   // at the dynamic alignment state slot inside the frame.
    473   bool HasAlignmentPadding(JSFunction* function);
    474 
    475   Isolate* isolate_;
    476   JSFunction* function_;
    477   Code* compiled_code_;
    478   unsigned bailout_id_;
    479   BailoutType bailout_type_;
    480   Address from_;
    481   int fp_to_sp_delta_;
    482   int has_alignment_padding_;
    483 
    484   // Input frame description.
    485   FrameDescription* input_;
    486   // Number of output frames.
    487   int output_count_;
    488   // Number of output js frames.
    489   int jsframe_count_;
    490   // Array of output frame descriptions.
    491   FrameDescription** output_;
    492 
    493   // Deferred values to be materialized.
    494   List<Object*> deferred_objects_tagged_values_;
    495   List<double> deferred_objects_double_values_;
    496   List<ObjectMaterializationDescriptor> deferred_objects_;
    497   List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
    498 
    499   // Output frame information. Only used during heap object materialization.
    500   List<Handle<JSFunction> > jsframe_functions_;
    501   List<bool> jsframe_has_adapted_arguments_;
    502 
    503   // Materialized objects. Only used during heap object materialization.
    504   List<Handle<Object> >* materialized_values_;
    505   List<Handle<Object> >* materialized_objects_;
    506   int materialization_value_index_;
    507   int materialization_object_index_;
    508 
    509 #ifdef DEBUG
    510   DisallowHeapAllocation* disallow_heap_allocation_;
    511 #endif  // DEBUG
    512 
    513   bool trace_;
    514 
    515   static const int table_entry_size_;
    516 
    517   friend class FrameDescription;
    518   friend class DeoptimizingCodeListNode;
    519   friend class DeoptimizedFrameInfo;
    520 };
    521 
    522 
    523 class FrameDescription {
    524  public:
    525   FrameDescription(uint32_t frame_size,
    526                    JSFunction* function);
    527 
    528   void* operator new(size_t size, uint32_t frame_size) {
    529     // Subtracts kPointerSize, as the member frame_content_ already supplies
    530     // the first element of the area to store the frame.
    531     return malloc(size + frame_size - kPointerSize);
    532   }
    533 
    534   void operator delete(void* pointer, uint32_t frame_size) {
    535     free(pointer);
    536   }
    537 
    538   void operator delete(void* description) {
    539     free(description);
    540   }
    541 
    542   uint32_t GetFrameSize() const {
    543     ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
    544     return static_cast<uint32_t>(frame_size_);
    545   }
    546 
    547   JSFunction* GetFunction() const { return function_; }
    548 
    549   unsigned GetOffsetFromSlotIndex(int slot_index);
    550 
    551   intptr_t GetFrameSlot(unsigned offset) {
    552     return *GetFrameSlotPointer(offset);
    553   }
    554 
    555   double GetDoubleFrameSlot(unsigned offset) {
    556     intptr_t* ptr = GetFrameSlotPointer(offset);
    557     return read_double_value(reinterpret_cast<Address>(ptr));
    558   }
    559 
    560   void SetFrameSlot(unsigned offset, intptr_t value) {
    561     *GetFrameSlotPointer(offset) = value;
    562   }
    563 
    564   void SetCallerPc(unsigned offset, intptr_t value);
    565 
    566   void SetCallerFp(unsigned offset, intptr_t value);
    567 
    568   intptr_t GetRegister(unsigned n) const {
    569     ASSERT(n < ARRAY_SIZE(registers_));
    570     return registers_[n];
    571   }
    572 
    573   double GetDoubleRegister(unsigned n) const {
    574     ASSERT(n < ARRAY_SIZE(double_registers_));
    575     return double_registers_[n];
    576   }
    577 
    578   void SetRegister(unsigned n, intptr_t value) {
    579     ASSERT(n < ARRAY_SIZE(registers_));
    580     registers_[n] = value;
    581   }
    582 
    583   void SetDoubleRegister(unsigned n, double value) {
    584     ASSERT(n < ARRAY_SIZE(double_registers_));
    585     double_registers_[n] = value;
    586   }
    587 
    588   intptr_t GetTop() const { return top_; }
    589   void SetTop(intptr_t top) { top_ = top; }
    590 
    591   intptr_t GetPc() const { return pc_; }
    592   void SetPc(intptr_t pc) { pc_ = pc; }
    593 
    594   intptr_t GetFp() const { return fp_; }
    595   void SetFp(intptr_t fp) { fp_ = fp; }
    596 
    597   intptr_t GetContext() const { return context_; }
    598   void SetContext(intptr_t context) { context_ = context; }
    599 
    600   Smi* GetState() const { return state_; }
    601   void SetState(Smi* state) { state_ = state; }
    602 
    603   void SetContinuation(intptr_t pc) { continuation_ = pc; }
    604 
    605   StackFrame::Type GetFrameType() const { return type_; }
    606   void SetFrameType(StackFrame::Type type) { type_ = type; }
    607 
    608   // Get the incoming arguments count.
    609   int ComputeParametersCount();
    610 
    611   // Get a parameter value for an unoptimized frame.
    612   Object* GetParameter(int index);
    613 
    614   // Get the expression stack height for a unoptimized frame.
    615   unsigned GetExpressionCount();
    616 
    617   // Get the expression stack value for an unoptimized frame.
    618   Object* GetExpression(int index);
    619 
    620   static int registers_offset() {
    621     return OFFSET_OF(FrameDescription, registers_);
    622   }
    623 
    624   static int double_registers_offset() {
    625     return OFFSET_OF(FrameDescription, double_registers_);
    626   }
    627 
    628   static int frame_size_offset() {
    629     return OFFSET_OF(FrameDescription, frame_size_);
    630   }
    631 
    632   static int pc_offset() {
    633     return OFFSET_OF(FrameDescription, pc_);
    634   }
    635 
    636   static int state_offset() {
    637     return OFFSET_OF(FrameDescription, state_);
    638   }
    639 
    640   static int continuation_offset() {
    641     return OFFSET_OF(FrameDescription, continuation_);
    642   }
    643 
    644   static int frame_content_offset() {
    645     return OFFSET_OF(FrameDescription, frame_content_);
    646   }
    647 
    648  private:
    649   static const uint32_t kZapUint32 = 0xbeeddead;
    650 
    651   // Frame_size_ must hold a uint32_t value.  It is only a uintptr_t to
    652   // keep the variable-size array frame_content_ of type intptr_t at
    653   // the end of the structure aligned.
    654   uintptr_t frame_size_;  // Number of bytes.
    655   JSFunction* function_;
    656   intptr_t registers_[Register::kNumRegisters];
    657   double double_registers_[DoubleRegister::kMaxNumRegisters];
    658   intptr_t top_;
    659   intptr_t pc_;
    660   intptr_t fp_;
    661   intptr_t context_;
    662   StackFrame::Type type_;
    663   Smi* state_;
    664 
    665   // Continuation is the PC where the execution continues after
    666   // deoptimizing.
    667   intptr_t continuation_;
    668 
    669   // This must be at the end of the object as the object is allocated larger
    670   // than it's definition indicate to extend this array.
    671   intptr_t frame_content_[1];
    672 
    673   intptr_t* GetFrameSlotPointer(unsigned offset) {
    674     ASSERT(offset < frame_size_);
    675     return reinterpret_cast<intptr_t*>(
    676         reinterpret_cast<Address>(this) + frame_content_offset() + offset);
    677   }
    678 
    679   int ComputeFixedSize();
    680 };
    681 
    682 
    683 class DeoptimizerData {
    684  public:
    685   explicit DeoptimizerData(MemoryAllocator* allocator);
    686   ~DeoptimizerData();
    687 
    688 #ifdef ENABLE_DEBUGGER_SUPPORT
    689   void Iterate(ObjectVisitor* v);
    690 #endif
    691 
    692   Code* FindDeoptimizingCode(Address addr);
    693   void RemoveDeoptimizingCode(Code* code);
    694 
    695  private:
    696   MemoryAllocator* allocator_;
    697   int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
    698   MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
    699   Deoptimizer* current_;
    700 
    701 #ifdef ENABLE_DEBUGGER_SUPPORT
    702   DeoptimizedFrameInfo* deoptimized_frame_info_;
    703 #endif
    704 
    705   // List of deoptimized code which still have references from active stack
    706   // frames. These code objects are needed by the deoptimizer when deoptimizing
    707   // a frame for which the code object for the function function has been
    708   // changed from the code present when deoptimizing was done.
    709   DeoptimizingCodeListNode* deoptimizing_code_list_;
    710 
    711   friend class Deoptimizer;
    712 
    713   DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
    714 };
    715 
    716 
    717 class TranslationBuffer BASE_EMBEDDED {
    718  public:
    719   explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
    720 
    721   int CurrentIndex() const { return contents_.length(); }
    722   void Add(int32_t value, Zone* zone);
    723 
    724   Handle<ByteArray> CreateByteArray(Factory* factory);
    725 
    726  private:
    727   ZoneList<uint8_t> contents_;
    728 };
    729 
    730 
    731 class TranslationIterator BASE_EMBEDDED {
    732  public:
    733   TranslationIterator(ByteArray* buffer, int index)
    734       : buffer_(buffer), index_(index) {
    735     ASSERT(index >= 0 && index < buffer->length());
    736   }
    737 
    738   int32_t Next();
    739 
    740   bool HasNext() const { return index_ < buffer_->length(); }
    741 
    742   void Skip(int n) {
    743     for (int i = 0; i < n; i++) Next();
    744   }
    745 
    746  private:
    747   ByteArray* buffer_;
    748   int index_;
    749 };
    750 
    751 
    752 class Translation BASE_EMBEDDED {
    753  public:
    754   enum Opcode {
    755     BEGIN,
    756     JS_FRAME,
    757     CONSTRUCT_STUB_FRAME,
    758     GETTER_STUB_FRAME,
    759     SETTER_STUB_FRAME,
    760     ARGUMENTS_ADAPTOR_FRAME,
    761     COMPILED_STUB_FRAME,
    762     DUPLICATED_OBJECT,
    763     ARGUMENTS_OBJECT,
    764     CAPTURED_OBJECT,
    765     REGISTER,
    766     INT32_REGISTER,
    767     UINT32_REGISTER,
    768     DOUBLE_REGISTER,
    769     STACK_SLOT,
    770     INT32_STACK_SLOT,
    771     UINT32_STACK_SLOT,
    772     DOUBLE_STACK_SLOT,
    773     LITERAL
    774   };
    775 
    776   Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
    777               Zone* zone)
    778       : buffer_(buffer),
    779         index_(buffer->CurrentIndex()),
    780         zone_(zone) {
    781     buffer_->Add(BEGIN, zone);
    782     buffer_->Add(frame_count, zone);
    783     buffer_->Add(jsframe_count, zone);
    784   }
    785 
    786   int index() const { return index_; }
    787 
    788   // Commands.
    789   void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
    790   void BeginCompiledStubFrame();
    791   void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
    792   void BeginConstructStubFrame(int literal_id, unsigned height);
    793   void BeginGetterStubFrame(int literal_id);
    794   void BeginSetterStubFrame(int literal_id);
    795   void BeginArgumentsObject(int args_length);
    796   void BeginCapturedObject(int length);
    797   void DuplicateObject(int object_index);
    798   void StoreRegister(Register reg);
    799   void StoreInt32Register(Register reg);
    800   void StoreUint32Register(Register reg);
    801   void StoreDoubleRegister(DoubleRegister reg);
    802   void StoreStackSlot(int index);
    803   void StoreInt32StackSlot(int index);
    804   void StoreUint32StackSlot(int index);
    805   void StoreDoubleStackSlot(int index);
    806   void StoreLiteral(int literal_id);
    807   void StoreArgumentsObject(bool args_known, int args_index, int args_length);
    808 
    809   Zone* zone() const { return zone_; }
    810 
    811   static int NumberOfOperandsFor(Opcode opcode);
    812 
    813 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
    814   static const char* StringFor(Opcode opcode);
    815 #endif
    816 
    817   // A literal id which refers to the JSFunction itself.
    818   static const int kSelfLiteralId = -239;
    819 
    820  private:
    821   TranslationBuffer* buffer_;
    822   int index_;
    823   Zone* zone_;
    824 };
    825 
    826 
    827 // Linked list holding deoptimizing code objects. The deoptimizing code objects
    828 // are kept as weak handles until they are no longer activated on the stack.
    829 class DeoptimizingCodeListNode : public Malloced {
    830  public:
    831   explicit DeoptimizingCodeListNode(Code* code);
    832   ~DeoptimizingCodeListNode();
    833 
    834   DeoptimizingCodeListNode* next() const { return next_; }
    835   void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
    836   Handle<Code> code() const { return code_; }
    837 
    838  private:
    839   // Global (weak) handle to the deoptimizing code object.
    840   Handle<Code> code_;
    841 
    842   // Next pointer for linked list.
    843   DeoptimizingCodeListNode* next_;
    844 };
    845 
    846 
    847 class SlotRef BASE_EMBEDDED {
    848  public:
    849   enum SlotRepresentation {
    850     UNKNOWN,
    851     TAGGED,
    852     INT32,
    853     UINT32,
    854     DOUBLE,
    855     LITERAL
    856   };
    857 
    858   SlotRef()
    859       : addr_(NULL), representation_(UNKNOWN) { }
    860 
    861   SlotRef(Address addr, SlotRepresentation representation)
    862       : addr_(addr), representation_(representation) { }
    863 
    864   SlotRef(Isolate* isolate, Object* literal)
    865       : literal_(literal, isolate), representation_(LITERAL) { }
    866 
    867   Handle<Object> GetValue(Isolate* isolate) {
    868     switch (representation_) {
    869       case TAGGED:
    870         return Handle<Object>(Memory::Object_at(addr_), isolate);
    871 
    872       case INT32: {
    873         int value = Memory::int32_at(addr_);
    874         if (Smi::IsValid(value)) {
    875           return Handle<Object>(Smi::FromInt(value), isolate);
    876         } else {
    877           return isolate->factory()->NewNumberFromInt(value);
    878         }
    879       }
    880 
    881       case UINT32: {
    882         uint32_t value = Memory::uint32_at(addr_);
    883         if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
    884           return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
    885         } else {
    886           return isolate->factory()->NewNumber(static_cast<double>(value));
    887         }
    888       }
    889 
    890       case DOUBLE: {
    891         double value = read_double_value(addr_);
    892         return isolate->factory()->NewNumber(value);
    893       }
    894 
    895       case LITERAL:
    896         return literal_;
    897 
    898       default:
    899         UNREACHABLE();
    900         return Handle<Object>::null();
    901     }
    902   }
    903 
    904   static Vector<SlotRef> ComputeSlotMappingForArguments(
    905       JavaScriptFrame* frame,
    906       int inlined_frame_index,
    907       int formal_parameter_count);
    908 
    909  private:
    910   Address addr_;
    911   Handle<Object> literal_;
    912   SlotRepresentation representation_;
    913 
    914   static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
    915     if (slot_index >= 0) {
    916       const int offset = JavaScriptFrameConstants::kLocal0Offset;
    917       return frame->fp() + offset - (slot_index * kPointerSize);
    918     } else {
    919       const int offset = JavaScriptFrameConstants::kLastParameterOffset;
    920       return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
    921     }
    922   }
    923 
    924   static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
    925                                             DeoptimizationInputData* data,
    926                                             JavaScriptFrame* frame);
    927 
    928   static void ComputeSlotsForArguments(
    929       Vector<SlotRef>* args_slots,
    930       TranslationIterator* iterator,
    931       DeoptimizationInputData* data,
    932       JavaScriptFrame* frame);
    933 };
    934 
    935 
    936 #ifdef ENABLE_DEBUGGER_SUPPORT
    937 // Class used to represent an unoptimized frame when the debugger
    938 // needs to inspect a frame that is part of an optimized frame. The
    939 // internally used FrameDescription objects are not GC safe so for use
    940 // by the debugger frame information is copied to an object of this type.
    941 // Represents parameters in unadapted form so their number might mismatch
    942 // formal parameter count.
    943 class DeoptimizedFrameInfo : public Malloced {
    944  public:
    945   DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
    946                        int frame_index,
    947                        bool has_arguments_adaptor,
    948                        bool has_construct_stub);
    949   virtual ~DeoptimizedFrameInfo();
    950 
    951   // GC support.
    952   void Iterate(ObjectVisitor* v);
    953 
    954   // Return the number of incoming arguments.
    955   int parameters_count() { return parameters_count_; }
    956 
    957   // Return the height of the expression stack.
    958   int expression_count() { return expression_count_; }
    959 
    960   // Get the frame function.
    961   JSFunction* GetFunction() {
    962     return function_;
    963   }
    964 
    965   // Check if this frame is preceded by construct stub frame.  The bottom-most
    966   // inlined frame might still be called by an uninlined construct stub.
    967   bool HasConstructStub() {
    968     return has_construct_stub_;
    969   }
    970 
    971   // Get an incoming argument.
    972   Object* GetParameter(int index) {
    973     ASSERT(0 <= index && index < parameters_count());
    974     return parameters_[index];
    975   }
    976 
    977   // Get an expression from the expression stack.
    978   Object* GetExpression(int index) {
    979     ASSERT(0 <= index && index < expression_count());
    980     return expression_stack_[index];
    981   }
    982 
    983   int GetSourcePosition() {
    984     return source_position_;
    985   }
    986 
    987  private:
    988   // Set an incoming argument.
    989   void SetParameter(int index, Object* obj) {
    990     ASSERT(0 <= index && index < parameters_count());
    991     parameters_[index] = obj;
    992   }
    993 
    994   // Set an expression on the expression stack.
    995   void SetExpression(int index, Object* obj) {
    996     ASSERT(0 <= index && index < expression_count());
    997     expression_stack_[index] = obj;
    998   }
    999 
   1000   JSFunction* function_;
   1001   bool has_construct_stub_;
   1002   int parameters_count_;
   1003   int expression_count_;
   1004   Object** parameters_;
   1005   Object** expression_stack_;
   1006   int source_position_;
   1007 
   1008   friend class Deoptimizer;
   1009 };
   1010 #endif
   1011 
   1012 } }  // namespace v8::internal
   1013 
   1014 #endif  // V8_DEOPTIMIZER_H_
   1015