1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_DEOPTIMIZER_H_ 29 #define V8_DEOPTIMIZER_H_ 30 31 #include "v8.h" 32 33 #include "allocation.h" 34 #include "macro-assembler.h" 35 #include "zone-inl.h" 36 37 38 namespace v8 { 39 namespace internal { 40 41 42 static inline double read_double_value(Address p) { 43 #ifdef V8_HOST_CAN_READ_UNALIGNED 44 return Memory::double_at(p); 45 #else // V8_HOST_CAN_READ_UNALIGNED 46 // Prevent gcc from using load-double (mips ldc1) on (possibly) 47 // non-64-bit aligned address. 48 union conversion { 49 double d; 50 uint32_t u[2]; 51 } c; 52 c.u[0] = *reinterpret_cast<uint32_t*>(p); 53 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4); 54 return c.d; 55 #endif // V8_HOST_CAN_READ_UNALIGNED 56 } 57 58 59 class FrameDescription; 60 class TranslationIterator; 61 class DeoptimizedFrameInfo; 62 63 template<typename T> 64 class HeapNumberMaterializationDescriptor BASE_EMBEDDED { 65 public: 66 HeapNumberMaterializationDescriptor(T destination, double value) 67 : destination_(destination), value_(value) { } 68 69 T destination() const { return destination_; } 70 double value() const { return value_; } 71 72 private: 73 T destination_; 74 double value_; 75 }; 76 77 78 class ObjectMaterializationDescriptor BASE_EMBEDDED { 79 public: 80 ObjectMaterializationDescriptor( 81 Address slot_address, int frame, int length, int duplicate, bool is_args) 82 : slot_address_(slot_address), 83 jsframe_index_(frame), 84 object_length_(length), 85 duplicate_object_(duplicate), 86 is_arguments_(is_args) { } 87 88 Address slot_address() const { return slot_address_; } 89 int jsframe_index() const { return jsframe_index_; } 90 int object_length() const { return object_length_; } 91 int duplicate_object() const { return duplicate_object_; } 92 bool is_arguments() const { return is_arguments_; } 93 94 // Only used for allocated receivers in DoComputeConstructStubFrame. 95 void patch_slot_address(intptr_t slot) { 96 slot_address_ = reinterpret_cast<Address>(slot); 97 } 98 99 private: 100 Address slot_address_; 101 int jsframe_index_; 102 int object_length_; 103 int duplicate_object_; 104 bool is_arguments_; 105 }; 106 107 108 class OptimizedFunctionVisitor BASE_EMBEDDED { 109 public: 110 virtual ~OptimizedFunctionVisitor() {} 111 112 // Function which is called before iteration of any optimized functions 113 // from given native context. 114 virtual void EnterContext(Context* context) = 0; 115 116 virtual void VisitFunction(JSFunction* function) = 0; 117 118 // Function which is called after iteration of all optimized functions 119 // from given native context. 120 virtual void LeaveContext(Context* context) = 0; 121 }; 122 123 124 class Deoptimizer : public Malloced { 125 public: 126 enum BailoutType { 127 EAGER, 128 LAZY, 129 SOFT, 130 // This last bailout type is not really a bailout, but used by the 131 // debugger to deoptimize stack frames to allow inspection. 132 DEBUGGER 133 }; 134 135 static const int kBailoutTypesWithCodeEntry = SOFT + 1; 136 137 struct JumpTableEntry { 138 inline JumpTableEntry(Address entry, 139 Deoptimizer::BailoutType type, 140 bool frame) 141 : label(), 142 address(entry), 143 bailout_type(type), 144 needs_frame(frame) { } 145 Label label; 146 Address address; 147 Deoptimizer::BailoutType bailout_type; 148 bool needs_frame; 149 }; 150 151 static bool TraceEnabledFor(BailoutType deopt_type, 152 StackFrame::Type frame_type); 153 static const char* MessageFor(BailoutType type); 154 155 int output_count() const { return output_count_; } 156 157 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); } 158 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); } 159 BailoutType bailout_type() const { return bailout_type_; } 160 161 // Number of created JS frames. Not all created frames are necessarily JS. 162 int jsframe_count() const { return jsframe_count_; } 163 164 static Deoptimizer* New(JSFunction* function, 165 BailoutType type, 166 unsigned bailout_id, 167 Address from, 168 int fp_to_sp_delta, 169 Isolate* isolate); 170 static Deoptimizer* Grab(Isolate* isolate); 171 172 #ifdef ENABLE_DEBUGGER_SUPPORT 173 // The returned object with information on the optimized frame needs to be 174 // freed before another one can be generated. 175 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame, 176 int jsframe_index, 177 Isolate* isolate); 178 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, 179 Isolate* isolate); 180 #endif 181 182 // Makes sure that there is enough room in the relocation 183 // information of a code object to perform lazy deoptimization 184 // patching. If there is not enough room a new relocation 185 // information object is allocated and comments are added until it 186 // is big enough. 187 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code); 188 189 // Deoptimize the function now. Its current optimized code will never be run 190 // again and any activations of the optimized code will get deoptimized when 191 // execution returns. 192 static void DeoptimizeFunction(JSFunction* function); 193 194 // Deoptimize all code in the given isolate. 195 static void DeoptimizeAll(Isolate* isolate); 196 197 // Deoptimize code associated with the given global object. 198 static void DeoptimizeGlobalObject(JSObject* object); 199 200 // Deoptimizes all optimized code that has been previously marked 201 // (via code->set_marked_for_deoptimization) and unlinks all functions that 202 // refer to that code. 203 static void DeoptimizeMarkedCode(Isolate* isolate); 204 205 // Visit all the known optimized functions in a given isolate. 206 static void VisitAllOptimizedFunctions( 207 Isolate* isolate, OptimizedFunctionVisitor* visitor); 208 209 // The size in bytes of the code required at a lazy deopt patch site. 210 static int patch_size(); 211 212 ~Deoptimizer(); 213 214 void MaterializeHeapObjects(JavaScriptFrameIterator* it); 215 #ifdef ENABLE_DEBUGGER_SUPPORT 216 void MaterializeHeapNumbersForDebuggerInspectableFrame( 217 Address parameters_top, 218 uint32_t parameters_size, 219 Address expressions_top, 220 uint32_t expressions_size, 221 DeoptimizedFrameInfo* info); 222 #endif 223 224 static void ComputeOutputFrames(Deoptimizer* deoptimizer); 225 226 227 enum GetEntryMode { 228 CALCULATE_ENTRY_ADDRESS, 229 ENSURE_ENTRY_CODE 230 }; 231 232 233 static Address GetDeoptimizationEntry( 234 Isolate* isolate, 235 int id, 236 BailoutType type, 237 GetEntryMode mode = ENSURE_ENTRY_CODE); 238 static int GetDeoptimizationId(Isolate* isolate, 239 Address addr, 240 BailoutType type); 241 static int GetOutputInfo(DeoptimizationOutputData* data, 242 BailoutId node_id, 243 SharedFunctionInfo* shared); 244 245 // Code generation support. 246 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); } 247 static int output_count_offset() { 248 return OFFSET_OF(Deoptimizer, output_count_); 249 } 250 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); } 251 252 static int has_alignment_padding_offset() { 253 return OFFSET_OF(Deoptimizer, has_alignment_padding_); 254 } 255 256 static int GetDeoptimizedCodeCount(Isolate* isolate); 257 258 static const int kNotDeoptimizationEntry = -1; 259 260 // Generators for the deoptimization entry code. 261 class EntryGenerator BASE_EMBEDDED { 262 public: 263 EntryGenerator(MacroAssembler* masm, BailoutType type) 264 : masm_(masm), type_(type) { } 265 virtual ~EntryGenerator() { } 266 267 void Generate(); 268 269 protected: 270 MacroAssembler* masm() const { return masm_; } 271 BailoutType type() const { return type_; } 272 Isolate* isolate() const { return masm_->isolate(); } 273 274 virtual void GeneratePrologue() { } 275 276 private: 277 MacroAssembler* masm_; 278 Deoptimizer::BailoutType type_; 279 }; 280 281 class TableEntryGenerator : public EntryGenerator { 282 public: 283 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count) 284 : EntryGenerator(masm, type), count_(count) { } 285 286 protected: 287 virtual void GeneratePrologue(); 288 289 private: 290 int count() const { return count_; } 291 292 int count_; 293 }; 294 295 int ConvertJSFrameIndexToFrameIndex(int jsframe_index); 296 297 static size_t GetMaxDeoptTableSize(); 298 299 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate, 300 BailoutType type, 301 int max_entry_id); 302 303 Isolate* isolate() const { return isolate_; } 304 305 private: 306 static const int kMinNumberOfEntries = 64; 307 static const int kMaxNumberOfEntries = 16384; 308 309 Deoptimizer(Isolate* isolate, 310 JSFunction* function, 311 BailoutType type, 312 unsigned bailout_id, 313 Address from, 314 int fp_to_sp_delta, 315 Code* optimized_code); 316 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code); 317 void PrintFunctionName(); 318 void DeleteFrameDescriptions(); 319 320 void DoComputeOutputFrames(); 321 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index); 322 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, 323 int frame_index); 324 void DoComputeConstructStubFrame(TranslationIterator* iterator, 325 int frame_index); 326 void DoComputeAccessorStubFrame(TranslationIterator* iterator, 327 int frame_index, 328 bool is_setter_stub_frame); 329 void DoComputeCompiledStubFrame(TranslationIterator* iterator, 330 int frame_index); 331 332 void DoTranslateObject(TranslationIterator* iterator, 333 int object_index, 334 int field_index); 335 336 void DoTranslateCommand(TranslationIterator* iterator, 337 int frame_index, 338 unsigned output_offset); 339 340 unsigned ComputeInputFrameSize() const; 341 unsigned ComputeFixedSize(JSFunction* function) const; 342 343 unsigned ComputeIncomingArgumentSize(JSFunction* function) const; 344 unsigned ComputeOutgoingArgumentSize() const; 345 346 Object* ComputeLiteral(int index) const; 347 348 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments); 349 void AddObjectDuplication(intptr_t slot, int object_index); 350 void AddObjectTaggedValue(intptr_t value); 351 void AddObjectDoubleValue(double value); 352 void AddDoubleValue(intptr_t slot_address, double value); 353 354 bool ArgumentsObjectIsAdapted(int object_index) { 355 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index); 356 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1; 357 return jsframe_has_adapted_arguments_[reverse_jsframe_index]; 358 } 359 360 Handle<JSFunction> ArgumentsObjectFunction(int object_index) { 361 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index); 362 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1; 363 return jsframe_functions_[reverse_jsframe_index]; 364 } 365 366 // Helper function for heap object materialization. 367 Handle<Object> MaterializeNextHeapObject(); 368 Handle<Object> MaterializeNextValue(); 369 370 static void GenerateDeoptimizationEntries( 371 MacroAssembler* masm, int count, BailoutType type); 372 373 // Marks all the code in the given context for deoptimization. 374 static void MarkAllCodeForContext(Context* native_context); 375 376 // Visit all the known optimized functions in a given context. 377 static void VisitAllOptimizedFunctionsForContext( 378 Context* context, OptimizedFunctionVisitor* visitor); 379 380 // Deoptimizes all code marked in the given context. 381 static void DeoptimizeMarkedCodeForContext(Context* native_context); 382 383 // Patch the given code so that it will deoptimize itself. 384 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code); 385 386 // Searches the list of known deoptimizing code for a Code object 387 // containing the given address (which is supposedly faster than 388 // searching all code objects). 389 Code* FindDeoptimizingCode(Address addr); 390 391 // Fill the input from from a JavaScript frame. This is used when 392 // the debugger needs to inspect an optimized frame. For normal 393 // deoptimizations the input frame is filled in generated code. 394 void FillInputFrame(Address tos, JavaScriptFrame* frame); 395 396 // Fill the given output frame's registers to contain the failure handler 397 // address and the number of parameters for a stub failure trampoline. 398 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame, 399 CodeStubInterfaceDescriptor* desc); 400 401 // Fill the given output frame's double registers with the original values 402 // from the input frame's double registers. 403 void CopyDoubleRegisters(FrameDescription* output_frame); 404 405 // Determines whether the input frame contains alignment padding by looking 406 // at the dynamic alignment state slot inside the frame. 407 bool HasAlignmentPadding(JSFunction* function); 408 409 // Select the version of NotifyStubFailure builtin that either saves or 410 // doesn't save the double registers depending on CPU features. 411 Code* NotifyStubFailureBuiltin(); 412 413 Isolate* isolate_; 414 JSFunction* function_; 415 Code* compiled_code_; 416 unsigned bailout_id_; 417 BailoutType bailout_type_; 418 Address from_; 419 int fp_to_sp_delta_; 420 int has_alignment_padding_; 421 422 // Input frame description. 423 FrameDescription* input_; 424 // Number of output frames. 425 int output_count_; 426 // Number of output js frames. 427 int jsframe_count_; 428 // Array of output frame descriptions. 429 FrameDescription** output_; 430 431 // Deferred values to be materialized. 432 List<Object*> deferred_objects_tagged_values_; 433 List<HeapNumberMaterializationDescriptor<int> > 434 deferred_objects_double_values_; 435 List<ObjectMaterializationDescriptor> deferred_objects_; 436 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_; 437 438 // Output frame information. Only used during heap object materialization. 439 List<Handle<JSFunction> > jsframe_functions_; 440 List<bool> jsframe_has_adapted_arguments_; 441 442 // Materialized objects. Only used during heap object materialization. 443 List<Handle<Object> >* materialized_values_; 444 List<Handle<Object> >* materialized_objects_; 445 int materialization_value_index_; 446 int materialization_object_index_; 447 448 #ifdef DEBUG 449 DisallowHeapAllocation* disallow_heap_allocation_; 450 #endif // DEBUG 451 452 CodeTracer::Scope* trace_scope_; 453 454 static const int table_entry_size_; 455 456 friend class FrameDescription; 457 friend class DeoptimizedFrameInfo; 458 }; 459 460 461 class FrameDescription { 462 public: 463 FrameDescription(uint32_t frame_size, 464 JSFunction* function); 465 466 void* operator new(size_t size, uint32_t frame_size) { 467 // Subtracts kPointerSize, as the member frame_content_ already supplies 468 // the first element of the area to store the frame. 469 return malloc(size + frame_size - kPointerSize); 470 } 471 472 void operator delete(void* pointer, uint32_t frame_size) { 473 free(pointer); 474 } 475 476 void operator delete(void* description) { 477 free(description); 478 } 479 480 uint32_t GetFrameSize() const { 481 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_); 482 return static_cast<uint32_t>(frame_size_); 483 } 484 485 JSFunction* GetFunction() const { return function_; } 486 487 unsigned GetOffsetFromSlotIndex(int slot_index); 488 489 intptr_t GetFrameSlot(unsigned offset) { 490 return *GetFrameSlotPointer(offset); 491 } 492 493 double GetDoubleFrameSlot(unsigned offset) { 494 intptr_t* ptr = GetFrameSlotPointer(offset); 495 return read_double_value(reinterpret_cast<Address>(ptr)); 496 } 497 498 void SetFrameSlot(unsigned offset, intptr_t value) { 499 *GetFrameSlotPointer(offset) = value; 500 } 501 502 void SetCallerPc(unsigned offset, intptr_t value); 503 504 void SetCallerFp(unsigned offset, intptr_t value); 505 506 intptr_t GetRegister(unsigned n) const { 507 #if DEBUG 508 // This convoluted ASSERT is needed to work around a gcc problem that 509 // improperly detects an array bounds overflow in optimized debug builds 510 // when using a plain ASSERT. 511 if (n >= ARRAY_SIZE(registers_)) { 512 ASSERT(false); 513 return 0; 514 } 515 #endif 516 return registers_[n]; 517 } 518 519 double GetDoubleRegister(unsigned n) const { 520 ASSERT(n < ARRAY_SIZE(double_registers_)); 521 return double_registers_[n]; 522 } 523 524 void SetRegister(unsigned n, intptr_t value) { 525 ASSERT(n < ARRAY_SIZE(registers_)); 526 registers_[n] = value; 527 } 528 529 void SetDoubleRegister(unsigned n, double value) { 530 ASSERT(n < ARRAY_SIZE(double_registers_)); 531 double_registers_[n] = value; 532 } 533 534 intptr_t GetTop() const { return top_; } 535 void SetTop(intptr_t top) { top_ = top; } 536 537 intptr_t GetPc() const { return pc_; } 538 void SetPc(intptr_t pc) { pc_ = pc; } 539 540 intptr_t GetFp() const { return fp_; } 541 void SetFp(intptr_t fp) { fp_ = fp; } 542 543 intptr_t GetContext() const { return context_; } 544 void SetContext(intptr_t context) { context_ = context; } 545 546 Smi* GetState() const { return state_; } 547 void SetState(Smi* state) { state_ = state; } 548 549 void SetContinuation(intptr_t pc) { continuation_ = pc; } 550 551 StackFrame::Type GetFrameType() const { return type_; } 552 void SetFrameType(StackFrame::Type type) { type_ = type; } 553 554 // Get the incoming arguments count. 555 int ComputeParametersCount(); 556 557 // Get a parameter value for an unoptimized frame. 558 Object* GetParameter(int index); 559 560 // Get the expression stack height for a unoptimized frame. 561 unsigned GetExpressionCount(); 562 563 // Get the expression stack value for an unoptimized frame. 564 Object* GetExpression(int index); 565 566 static int registers_offset() { 567 return OFFSET_OF(FrameDescription, registers_); 568 } 569 570 static int double_registers_offset() { 571 return OFFSET_OF(FrameDescription, double_registers_); 572 } 573 574 static int frame_size_offset() { 575 return OFFSET_OF(FrameDescription, frame_size_); 576 } 577 578 static int pc_offset() { 579 return OFFSET_OF(FrameDescription, pc_); 580 } 581 582 static int state_offset() { 583 return OFFSET_OF(FrameDescription, state_); 584 } 585 586 static int continuation_offset() { 587 return OFFSET_OF(FrameDescription, continuation_); 588 } 589 590 static int frame_content_offset() { 591 return OFFSET_OF(FrameDescription, frame_content_); 592 } 593 594 private: 595 static const uint32_t kZapUint32 = 0xbeeddead; 596 597 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to 598 // keep the variable-size array frame_content_ of type intptr_t at 599 // the end of the structure aligned. 600 uintptr_t frame_size_; // Number of bytes. 601 JSFunction* function_; 602 intptr_t registers_[Register::kNumRegisters]; 603 double double_registers_[DoubleRegister::kMaxNumRegisters]; 604 intptr_t top_; 605 intptr_t pc_; 606 intptr_t fp_; 607 intptr_t context_; 608 StackFrame::Type type_; 609 Smi* state_; 610 611 // Continuation is the PC where the execution continues after 612 // deoptimizing. 613 intptr_t continuation_; 614 615 // This must be at the end of the object as the object is allocated larger 616 // than it's definition indicate to extend this array. 617 intptr_t frame_content_[1]; 618 619 intptr_t* GetFrameSlotPointer(unsigned offset) { 620 ASSERT(offset < frame_size_); 621 return reinterpret_cast<intptr_t*>( 622 reinterpret_cast<Address>(this) + frame_content_offset() + offset); 623 } 624 625 int ComputeFixedSize(); 626 }; 627 628 629 class DeoptimizerData { 630 public: 631 explicit DeoptimizerData(MemoryAllocator* allocator); 632 ~DeoptimizerData(); 633 634 #ifdef ENABLE_DEBUGGER_SUPPORT 635 void Iterate(ObjectVisitor* v); 636 #endif 637 638 private: 639 MemoryAllocator* allocator_; 640 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry]; 641 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry]; 642 643 #ifdef ENABLE_DEBUGGER_SUPPORT 644 DeoptimizedFrameInfo* deoptimized_frame_info_; 645 #endif 646 647 Deoptimizer* current_; 648 649 friend class Deoptimizer; 650 651 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); 652 }; 653 654 655 class TranslationBuffer BASE_EMBEDDED { 656 public: 657 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { } 658 659 int CurrentIndex() const { return contents_.length(); } 660 void Add(int32_t value, Zone* zone); 661 662 Handle<ByteArray> CreateByteArray(Factory* factory); 663 664 private: 665 ZoneList<uint8_t> contents_; 666 }; 667 668 669 class TranslationIterator BASE_EMBEDDED { 670 public: 671 TranslationIterator(ByteArray* buffer, int index) 672 : buffer_(buffer), index_(index) { 673 ASSERT(index >= 0 && index < buffer->length()); 674 } 675 676 int32_t Next(); 677 678 bool HasNext() const { return index_ < buffer_->length(); } 679 680 void Skip(int n) { 681 for (int i = 0; i < n; i++) Next(); 682 } 683 684 private: 685 ByteArray* buffer_; 686 int index_; 687 }; 688 689 690 #define TRANSLATION_OPCODE_LIST(V) \ 691 V(BEGIN) \ 692 V(JS_FRAME) \ 693 V(CONSTRUCT_STUB_FRAME) \ 694 V(GETTER_STUB_FRAME) \ 695 V(SETTER_STUB_FRAME) \ 696 V(ARGUMENTS_ADAPTOR_FRAME) \ 697 V(COMPILED_STUB_FRAME) \ 698 V(DUPLICATED_OBJECT) \ 699 V(ARGUMENTS_OBJECT) \ 700 V(CAPTURED_OBJECT) \ 701 V(REGISTER) \ 702 V(INT32_REGISTER) \ 703 V(UINT32_REGISTER) \ 704 V(DOUBLE_REGISTER) \ 705 V(STACK_SLOT) \ 706 V(INT32_STACK_SLOT) \ 707 V(UINT32_STACK_SLOT) \ 708 V(DOUBLE_STACK_SLOT) \ 709 V(LITERAL) 710 711 712 class Translation BASE_EMBEDDED { 713 public: 714 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item, 715 enum Opcode { 716 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM) 717 LAST = LITERAL 718 }; 719 #undef DECLARE_TRANSLATION_OPCODE_ENUM 720 721 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count, 722 Zone* zone) 723 : buffer_(buffer), 724 index_(buffer->CurrentIndex()), 725 zone_(zone) { 726 buffer_->Add(BEGIN, zone); 727 buffer_->Add(frame_count, zone); 728 buffer_->Add(jsframe_count, zone); 729 } 730 731 int index() const { return index_; } 732 733 // Commands. 734 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height); 735 void BeginCompiledStubFrame(); 736 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height); 737 void BeginConstructStubFrame(int literal_id, unsigned height); 738 void BeginGetterStubFrame(int literal_id); 739 void BeginSetterStubFrame(int literal_id); 740 void BeginArgumentsObject(int args_length); 741 void BeginCapturedObject(int length); 742 void DuplicateObject(int object_index); 743 void StoreRegister(Register reg); 744 void StoreInt32Register(Register reg); 745 void StoreUint32Register(Register reg); 746 void StoreDoubleRegister(DoubleRegister reg); 747 void StoreStackSlot(int index); 748 void StoreInt32StackSlot(int index); 749 void StoreUint32StackSlot(int index); 750 void StoreDoubleStackSlot(int index); 751 void StoreLiteral(int literal_id); 752 void StoreArgumentsObject(bool args_known, int args_index, int args_length); 753 754 Zone* zone() const { return zone_; } 755 756 static int NumberOfOperandsFor(Opcode opcode); 757 758 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 759 static const char* StringFor(Opcode opcode); 760 #endif 761 762 // A literal id which refers to the JSFunction itself. 763 static const int kSelfLiteralId = -239; 764 765 private: 766 TranslationBuffer* buffer_; 767 int index_; 768 Zone* zone_; 769 }; 770 771 772 class SlotRef BASE_EMBEDDED { 773 public: 774 enum SlotRepresentation { 775 UNKNOWN, 776 TAGGED, 777 INT32, 778 UINT32, 779 DOUBLE, 780 LITERAL 781 }; 782 783 SlotRef() 784 : addr_(NULL), representation_(UNKNOWN) { } 785 786 SlotRef(Address addr, SlotRepresentation representation) 787 : addr_(addr), representation_(representation) { } 788 789 SlotRef(Isolate* isolate, Object* literal) 790 : literal_(literal, isolate), representation_(LITERAL) { } 791 792 Handle<Object> GetValue(Isolate* isolate) { 793 switch (representation_) { 794 case TAGGED: 795 return Handle<Object>(Memory::Object_at(addr_), isolate); 796 797 case INT32: { 798 int value = Memory::int32_at(addr_); 799 if (Smi::IsValid(value)) { 800 return Handle<Object>(Smi::FromInt(value), isolate); 801 } else { 802 return isolate->factory()->NewNumberFromInt(value); 803 } 804 } 805 806 case UINT32: { 807 uint32_t value = Memory::uint32_at(addr_); 808 if (value <= static_cast<uint32_t>(Smi::kMaxValue)) { 809 return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate); 810 } else { 811 return isolate->factory()->NewNumber(static_cast<double>(value)); 812 } 813 } 814 815 case DOUBLE: { 816 double value = read_double_value(addr_); 817 return isolate->factory()->NewNumber(value); 818 } 819 820 case LITERAL: 821 return literal_; 822 823 default: 824 UNREACHABLE(); 825 return Handle<Object>::null(); 826 } 827 } 828 829 static Vector<SlotRef> ComputeSlotMappingForArguments( 830 JavaScriptFrame* frame, 831 int inlined_frame_index, 832 int formal_parameter_count); 833 834 private: 835 Address addr_; 836 Handle<Object> literal_; 837 SlotRepresentation representation_; 838 839 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) { 840 if (slot_index >= 0) { 841 const int offset = JavaScriptFrameConstants::kLocal0Offset; 842 return frame->fp() + offset - (slot_index * kPointerSize); 843 } else { 844 const int offset = JavaScriptFrameConstants::kLastParameterOffset; 845 return frame->fp() + offset - ((slot_index + 1) * kPointerSize); 846 } 847 } 848 849 static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator, 850 DeoptimizationInputData* data, 851 JavaScriptFrame* frame); 852 853 static void ComputeSlotsForArguments( 854 Vector<SlotRef>* args_slots, 855 TranslationIterator* iterator, 856 DeoptimizationInputData* data, 857 JavaScriptFrame* frame); 858 }; 859 860 861 #ifdef ENABLE_DEBUGGER_SUPPORT 862 // Class used to represent an unoptimized frame when the debugger 863 // needs to inspect a frame that is part of an optimized frame. The 864 // internally used FrameDescription objects are not GC safe so for use 865 // by the debugger frame information is copied to an object of this type. 866 // Represents parameters in unadapted form so their number might mismatch 867 // formal parameter count. 868 class DeoptimizedFrameInfo : public Malloced { 869 public: 870 DeoptimizedFrameInfo(Deoptimizer* deoptimizer, 871 int frame_index, 872 bool has_arguments_adaptor, 873 bool has_construct_stub); 874 virtual ~DeoptimizedFrameInfo(); 875 876 // GC support. 877 void Iterate(ObjectVisitor* v); 878 879 // Return the number of incoming arguments. 880 int parameters_count() { return parameters_count_; } 881 882 // Return the height of the expression stack. 883 int expression_count() { return expression_count_; } 884 885 // Get the frame function. 886 JSFunction* GetFunction() { 887 return function_; 888 } 889 890 // Check if this frame is preceded by construct stub frame. The bottom-most 891 // inlined frame might still be called by an uninlined construct stub. 892 bool HasConstructStub() { 893 return has_construct_stub_; 894 } 895 896 // Get an incoming argument. 897 Object* GetParameter(int index) { 898 ASSERT(0 <= index && index < parameters_count()); 899 return parameters_[index]; 900 } 901 902 // Get an expression from the expression stack. 903 Object* GetExpression(int index) { 904 ASSERT(0 <= index && index < expression_count()); 905 return expression_stack_[index]; 906 } 907 908 int GetSourcePosition() { 909 return source_position_; 910 } 911 912 private: 913 // Set an incoming argument. 914 void SetParameter(int index, Object* obj) { 915 ASSERT(0 <= index && index < parameters_count()); 916 parameters_[index] = obj; 917 } 918 919 // Set an expression on the expression stack. 920 void SetExpression(int index, Object* obj) { 921 ASSERT(0 <= index && index < expression_count()); 922 expression_stack_[index] = obj; 923 } 924 925 JSFunction* function_; 926 bool has_construct_stub_; 927 int parameters_count_; 928 int expression_count_; 929 Object** parameters_; 930 Object** expression_stack_; 931 int source_position_; 932 933 friend class Deoptimizer; 934 }; 935 #endif 936 937 } } // namespace v8::internal 938 939 #endif // V8_DEOPTIMIZER_H_ 940