1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "accessors.h" 31 #include "codegen.h" 32 #include "deoptimizer.h" 33 #include "disasm.h" 34 #include "full-codegen.h" 35 #include "global-handles.h" 36 #include "macro-assembler.h" 37 #include "prettyprinter.h" 38 39 40 namespace v8 { 41 namespace internal { 42 43 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) { 44 return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(), 45 OS::CommitPageSize(), 46 #if defined(__native_client__) 47 // The Native Client port of V8 uses an interpreter, 48 // so code pages don't need PROT_EXEC. 49 NOT_EXECUTABLE, 50 #else 51 EXECUTABLE, 52 #endif 53 NULL); 54 } 55 56 57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) 58 : allocator_(allocator), 59 #ifdef ENABLE_DEBUGGER_SUPPORT 60 deoptimized_frame_info_(NULL), 61 #endif 62 current_(NULL) { 63 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { 64 deopt_entry_code_entries_[i] = -1; 65 deopt_entry_code_[i] = AllocateCodeChunk(allocator); 66 } 67 } 68 69 70 DeoptimizerData::~DeoptimizerData() { 71 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { 72 allocator_->Free(deopt_entry_code_[i]); 73 deopt_entry_code_[i] = NULL; 74 } 75 } 76 77 78 #ifdef ENABLE_DEBUGGER_SUPPORT 79 void DeoptimizerData::Iterate(ObjectVisitor* v) { 80 if (deoptimized_frame_info_ != NULL) { 81 deoptimized_frame_info_->Iterate(v); 82 } 83 } 84 #endif 85 86 87 Code* Deoptimizer::FindDeoptimizingCode(Address addr) { 88 if (function_->IsHeapObject()) { 89 // Search all deoptimizing code in the native context of the function. 90 Context* native_context = function_->context()->native_context(); 91 Object* element = native_context->DeoptimizedCodeListHead(); 92 while (!element->IsUndefined()) { 93 Code* code = Code::cast(element); 94 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 95 if (code->contains(addr)) return code; 96 element = code->next_code_link(); 97 } 98 } 99 return NULL; 100 } 101 102 103 // We rely on this function not causing a GC. It is called from generated code 104 // without having a real stack frame in place. 105 Deoptimizer* Deoptimizer::New(JSFunction* function, 106 BailoutType type, 107 unsigned bailout_id, 108 Address from, 109 int fp_to_sp_delta, 110 Isolate* isolate) { 111 Deoptimizer* deoptimizer = new Deoptimizer(isolate, 112 function, 113 type, 114 bailout_id, 115 from, 116 fp_to_sp_delta, 117 NULL); 118 ASSERT(isolate->deoptimizer_data()->current_ == NULL); 119 isolate->deoptimizer_data()->current_ = deoptimizer; 120 return deoptimizer; 121 } 122 123 124 // No larger than 2K on all platforms 125 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB; 126 127 128 size_t Deoptimizer::GetMaxDeoptTableSize() { 129 int entries_size = 130 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_; 131 int commit_page_size = static_cast<int>(OS::CommitPageSize()); 132 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) / 133 commit_page_size) + 1; 134 return static_cast<size_t>(commit_page_size * page_count); 135 } 136 137 138 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { 139 Deoptimizer* result = isolate->deoptimizer_data()->current_; 140 ASSERT(result != NULL); 141 result->DeleteFrameDescriptions(); 142 isolate->deoptimizer_data()->current_ = NULL; 143 return result; 144 } 145 146 147 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) { 148 if (jsframe_index == 0) return 0; 149 150 int frame_index = 0; 151 while (jsframe_index >= 0) { 152 FrameDescription* frame = output_[frame_index]; 153 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) { 154 jsframe_index--; 155 } 156 frame_index++; 157 } 158 159 return frame_index - 1; 160 } 161 162 163 #ifdef ENABLE_DEBUGGER_SUPPORT 164 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( 165 JavaScriptFrame* frame, 166 int jsframe_index, 167 Isolate* isolate) { 168 ASSERT(frame->is_optimized()); 169 ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL); 170 171 // Get the function and code from the frame. 172 JSFunction* function = frame->function(); 173 Code* code = frame->LookupCode(); 174 175 // Locate the deoptimization point in the code. As we are at a call the 176 // return address must be at a place in the code with deoptimization support. 177 SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc()); 178 int deoptimization_index = safepoint_entry.deoptimization_index(); 179 ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex); 180 181 // Always use the actual stack slots when calculating the fp to sp 182 // delta adding two for the function and context. 183 unsigned stack_slots = code->stack_slots(); 184 unsigned fp_to_sp_delta = (stack_slots * kPointerSize) + 185 StandardFrameConstants::kFixedFrameSizeFromFp; 186 187 Deoptimizer* deoptimizer = new Deoptimizer(isolate, 188 function, 189 Deoptimizer::DEBUGGER, 190 deoptimization_index, 191 frame->pc(), 192 fp_to_sp_delta, 193 code); 194 Address tos = frame->fp() - fp_to_sp_delta; 195 deoptimizer->FillInputFrame(tos, frame); 196 197 // Calculate the output frames. 198 Deoptimizer::ComputeOutputFrames(deoptimizer); 199 200 // Create the GC safe output frame information and register it for GC 201 // handling. 202 ASSERT_LT(jsframe_index, deoptimizer->jsframe_count()); 203 204 // Convert JS frame index into frame index. 205 int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index); 206 207 bool has_arguments_adaptor = 208 frame_index > 0 && 209 deoptimizer->output_[frame_index - 1]->GetFrameType() == 210 StackFrame::ARGUMENTS_ADAPTOR; 211 212 int construct_offset = has_arguments_adaptor ? 2 : 1; 213 bool has_construct_stub = 214 frame_index >= construct_offset && 215 deoptimizer->output_[frame_index - construct_offset]->GetFrameType() == 216 StackFrame::CONSTRUCT; 217 218 DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer, 219 frame_index, 220 has_arguments_adaptor, 221 has_construct_stub); 222 isolate->deoptimizer_data()->deoptimized_frame_info_ = info; 223 224 // Get the "simulated" top and size for the requested frame. 225 FrameDescription* parameters_frame = 226 deoptimizer->output_[ 227 has_arguments_adaptor ? (frame_index - 1) : frame_index]; 228 229 uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize; 230 Address parameters_top = reinterpret_cast<Address>( 231 parameters_frame->GetTop() + (parameters_frame->GetFrameSize() - 232 parameters_size)); 233 234 uint32_t expressions_size = info->expression_count() * kPointerSize; 235 Address expressions_top = reinterpret_cast<Address>( 236 deoptimizer->output_[frame_index]->GetTop()); 237 238 // Done with the GC-unsafe frame descriptions. This re-enables allocation. 239 deoptimizer->DeleteFrameDescriptions(); 240 241 // Allocate a heap number for the doubles belonging to this frame. 242 deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame( 243 parameters_top, parameters_size, expressions_top, expressions_size, info); 244 245 // Finished using the deoptimizer instance. 246 delete deoptimizer; 247 248 return info; 249 } 250 251 252 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, 253 Isolate* isolate) { 254 ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info); 255 delete info; 256 isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL; 257 } 258 #endif 259 260 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, 261 int count, 262 BailoutType type) { 263 TableEntryGenerator generator(masm, type, count); 264 generator.Generate(); 265 } 266 267 268 void Deoptimizer::VisitAllOptimizedFunctionsForContext( 269 Context* context, OptimizedFunctionVisitor* visitor) { 270 DisallowHeapAllocation no_allocation; 271 272 ASSERT(context->IsNativeContext()); 273 274 visitor->EnterContext(context); 275 276 // Visit the list of optimized functions, removing elements that 277 // no longer refer to optimized code. 278 JSFunction* prev = NULL; 279 Object* element = context->OptimizedFunctionsListHead(); 280 while (!element->IsUndefined()) { 281 JSFunction* function = JSFunction::cast(element); 282 Object* next = function->next_function_link(); 283 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION || 284 (visitor->VisitFunction(function), 285 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) { 286 // The function no longer refers to optimized code, or the visitor 287 // changed the code to which it refers to no longer be optimized code. 288 // Remove the function from this list. 289 if (prev != NULL) { 290 prev->set_next_function_link(next); 291 } else { 292 context->SetOptimizedFunctionsListHead(next); 293 } 294 // The visitor should not alter the link directly. 295 ASSERT(function->next_function_link() == next); 296 // Set the next function link to undefined to indicate it is no longer 297 // in the optimized functions list. 298 function->set_next_function_link(context->GetHeap()->undefined_value()); 299 } else { 300 // The visitor should not alter the link directly. 301 ASSERT(function->next_function_link() == next); 302 // preserve this element. 303 prev = function; 304 } 305 element = next; 306 } 307 308 visitor->LeaveContext(context); 309 } 310 311 312 void Deoptimizer::VisitAllOptimizedFunctions( 313 Isolate* isolate, 314 OptimizedFunctionVisitor* visitor) { 315 DisallowHeapAllocation no_allocation; 316 317 // Run through the list of all native contexts. 318 Object* context = isolate->heap()->native_contexts_list(); 319 while (!context->IsUndefined()) { 320 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); 321 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 322 } 323 } 324 325 326 // Unlink functions referring to code marked for deoptimization, then move 327 // marked code from the optimized code list to the deoptimized code list, 328 // and patch code for lazy deopt. 329 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { 330 DisallowHeapAllocation no_allocation; 331 332 // A "closure" that unlinks optimized code that is going to be 333 // deoptimized from the functions that refer to it. 334 class SelectedCodeUnlinker: public OptimizedFunctionVisitor { 335 public: 336 virtual void EnterContext(Context* context) { } // Don't care. 337 virtual void LeaveContext(Context* context) { } // Don't care. 338 virtual void VisitFunction(JSFunction* function) { 339 Code* code = function->code(); 340 if (!code->marked_for_deoptimization()) return; 341 342 // Unlink this function and evict from optimized code map. 343 SharedFunctionInfo* shared = function->shared(); 344 function->set_code(shared->code()); 345 shared->EvictFromOptimizedCodeMap(code, "deoptimized function"); 346 347 if (FLAG_trace_deopt) { 348 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer()); 349 PrintF(scope.file(), "[deoptimizer unlinked: "); 350 function->PrintName(scope.file()); 351 PrintF(scope.file(), 352 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); 353 } 354 } 355 }; 356 357 // Unlink all functions that refer to marked code. 358 SelectedCodeUnlinker unlinker; 359 VisitAllOptimizedFunctionsForContext(context, &unlinker); 360 361 // Move marked code from the optimized code list to the deoptimized 362 // code list, collecting them into a ZoneList. 363 Isolate* isolate = context->GetHeap()->isolate(); 364 Zone zone(isolate); 365 ZoneList<Code*> codes(10, &zone); 366 367 // Walk over all optimized code objects in this native context. 368 Code* prev = NULL; 369 Object* element = context->OptimizedCodeListHead(); 370 while (!element->IsUndefined()) { 371 Code* code = Code::cast(element); 372 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 373 Object* next = code->next_code_link(); 374 if (code->marked_for_deoptimization()) { 375 // Put the code into the list for later patching. 376 codes.Add(code, &zone); 377 378 if (prev != NULL) { 379 // Skip this code in the optimized code list. 380 prev->set_next_code_link(next); 381 } else { 382 // There was no previous node, the next node is the new head. 383 context->SetOptimizedCodeListHead(next); 384 } 385 386 // Move the code to the _deoptimized_ code list. 387 code->set_next_code_link(context->DeoptimizedCodeListHead()); 388 context->SetDeoptimizedCodeListHead(code); 389 } else { 390 // Not marked; preserve this element. 391 prev = code; 392 } 393 element = next; 394 } 395 396 // TODO(titzer): we need a handle scope only because of the macro assembler, 397 // which is only used in EnsureCodeForDeoptimizationEntry. 398 HandleScope scope(isolate); 399 // Now patch all the codes for deoptimization. 400 for (int i = 0; i < codes.length(); i++) { 401 // It is finally time to die, code object. 402 // Do platform-specific patching to force any activations to lazy deopt. 403 PatchCodeForDeoptimization(isolate, codes[i]); 404 405 // We might be in the middle of incremental marking with compaction. 406 // Tell collector to treat this code object in a special way and 407 // ignore all slots that might have been recorded on it. 408 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); 409 } 410 } 411 412 413 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { 414 if (FLAG_trace_deopt) { 415 CodeTracer::Scope scope(isolate->GetCodeTracer()); 416 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); 417 } 418 DisallowHeapAllocation no_allocation; 419 // For all contexts, mark all code, then deoptimize. 420 Object* context = isolate->heap()->native_contexts_list(); 421 while (!context->IsUndefined()) { 422 Context* native_context = Context::cast(context); 423 MarkAllCodeForContext(native_context); 424 DeoptimizeMarkedCodeForContext(native_context); 425 context = native_context->get(Context::NEXT_CONTEXT_LINK); 426 } 427 } 428 429 430 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) { 431 if (FLAG_trace_deopt) { 432 CodeTracer::Scope scope(isolate->GetCodeTracer()); 433 PrintF(scope.file(), "[deoptimize marked code in all contexts]\n"); 434 } 435 DisallowHeapAllocation no_allocation; 436 // For all contexts, deoptimize code already marked. 437 Object* context = isolate->heap()->native_contexts_list(); 438 while (!context->IsUndefined()) { 439 Context* native_context = Context::cast(context); 440 DeoptimizeMarkedCodeForContext(native_context); 441 context = native_context->get(Context::NEXT_CONTEXT_LINK); 442 } 443 } 444 445 446 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { 447 if (FLAG_trace_deopt) { 448 CodeTracer::Scope scope(object->GetHeap()->isolate()->GetCodeTracer()); 449 PrintF(scope.file(), "[deoptimize global object @ 0x%08" V8PRIxPTR "]\n", 450 reinterpret_cast<intptr_t>(object)); 451 } 452 if (object->IsJSGlobalProxy()) { 453 Object* proto = object->GetPrototype(); 454 ASSERT(proto->IsJSGlobalObject()); 455 Context* native_context = GlobalObject::cast(proto)->native_context(); 456 MarkAllCodeForContext(native_context); 457 DeoptimizeMarkedCodeForContext(native_context); 458 } else if (object->IsGlobalObject()) { 459 Context* native_context = GlobalObject::cast(object)->native_context(); 460 MarkAllCodeForContext(native_context); 461 DeoptimizeMarkedCodeForContext(native_context); 462 } 463 } 464 465 466 void Deoptimizer::MarkAllCodeForContext(Context* context) { 467 Object* element = context->OptimizedCodeListHead(); 468 while (!element->IsUndefined()) { 469 Code* code = Code::cast(element); 470 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 471 code->set_marked_for_deoptimization(true); 472 element = code->next_code_link(); 473 } 474 } 475 476 477 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { 478 Code* code = function->code(); 479 if (code->kind() == Code::OPTIMIZED_FUNCTION) { 480 // Mark the code for deoptimization and unlink any functions that also 481 // refer to that code. The code cannot be shared across native contexts, 482 // so we only need to search one. 483 code->set_marked_for_deoptimization(true); 484 DeoptimizeMarkedCodeForContext(function->context()->native_context()); 485 } 486 } 487 488 489 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { 490 deoptimizer->DoComputeOutputFrames(); 491 } 492 493 494 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, 495 StackFrame::Type frame_type) { 496 switch (deopt_type) { 497 case EAGER: 498 case SOFT: 499 case LAZY: 500 case DEBUGGER: 501 return (frame_type == StackFrame::STUB) 502 ? FLAG_trace_stub_failures 503 : FLAG_trace_deopt; 504 } 505 UNREACHABLE(); 506 return false; 507 } 508 509 510 const char* Deoptimizer::MessageFor(BailoutType type) { 511 switch (type) { 512 case EAGER: return "eager"; 513 case SOFT: return "soft"; 514 case LAZY: return "lazy"; 515 case DEBUGGER: return "debugger"; 516 } 517 UNREACHABLE(); 518 return NULL; 519 } 520 521 522 Deoptimizer::Deoptimizer(Isolate* isolate, 523 JSFunction* function, 524 BailoutType type, 525 unsigned bailout_id, 526 Address from, 527 int fp_to_sp_delta, 528 Code* optimized_code) 529 : isolate_(isolate), 530 function_(function), 531 bailout_id_(bailout_id), 532 bailout_type_(type), 533 from_(from), 534 fp_to_sp_delta_(fp_to_sp_delta), 535 has_alignment_padding_(0), 536 input_(NULL), 537 output_count_(0), 538 jsframe_count_(0), 539 output_(NULL), 540 deferred_objects_tagged_values_(0), 541 deferred_objects_double_values_(0), 542 deferred_objects_(0), 543 deferred_heap_numbers_(0), 544 jsframe_functions_(0), 545 jsframe_has_adapted_arguments_(0), 546 materialized_values_(NULL), 547 materialized_objects_(NULL), 548 materialization_value_index_(0), 549 materialization_object_index_(0), 550 trace_scope_(NULL) { 551 // For COMPILED_STUBs called from builtins, the function pointer is a SMI 552 // indicating an internal frame. 553 if (function->IsSmi()) { 554 function = NULL; 555 } 556 ASSERT(from != NULL); 557 if (function != NULL && function->IsOptimized()) { 558 function->shared()->increment_deopt_count(); 559 if (bailout_type_ == Deoptimizer::SOFT) { 560 isolate->counters()->soft_deopts_executed()->Increment(); 561 // Soft deopts shouldn't count against the overall re-optimization count 562 // that can eventually lead to disabling optimization for a function. 563 int opt_count = function->shared()->opt_count(); 564 if (opt_count > 0) opt_count--; 565 function->shared()->set_opt_count(opt_count); 566 } 567 } 568 compiled_code_ = FindOptimizedCode(function, optimized_code); 569 570 #if DEBUG 571 ASSERT(compiled_code_ != NULL); 572 if (type == EAGER || type == SOFT || type == LAZY) { 573 ASSERT(compiled_code_->kind() != Code::FUNCTION); 574 } 575 #endif 576 577 StackFrame::Type frame_type = function == NULL 578 ? StackFrame::STUB 579 : StackFrame::JAVA_SCRIPT; 580 trace_scope_ = TraceEnabledFor(type, frame_type) ? 581 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL; 582 #ifdef DEBUG 583 CHECK(AllowHeapAllocation::IsAllowed()); 584 disallow_heap_allocation_ = new DisallowHeapAllocation(); 585 #endif // DEBUG 586 unsigned size = ComputeInputFrameSize(); 587 input_ = new(size) FrameDescription(size, function); 588 input_->SetFrameType(frame_type); 589 } 590 591 592 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, 593 Code* optimized_code) { 594 switch (bailout_type_) { 595 case Deoptimizer::SOFT: 596 case Deoptimizer::EAGER: 597 case Deoptimizer::LAZY: { 598 Code* compiled_code = FindDeoptimizingCode(from_); 599 return (compiled_code == NULL) 600 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) 601 : compiled_code; 602 } 603 case Deoptimizer::DEBUGGER: 604 ASSERT(optimized_code->contains(from_)); 605 return optimized_code; 606 } 607 UNREACHABLE(); 608 return NULL; 609 } 610 611 612 void Deoptimizer::PrintFunctionName() { 613 if (function_->IsJSFunction()) { 614 function_->PrintName(trace_scope_->file()); 615 } else { 616 PrintF(trace_scope_->file(), 617 "%s", Code::Kind2String(compiled_code_->kind())); 618 } 619 } 620 621 622 Deoptimizer::~Deoptimizer() { 623 ASSERT(input_ == NULL && output_ == NULL); 624 ASSERT(disallow_heap_allocation_ == NULL); 625 delete trace_scope_; 626 } 627 628 629 void Deoptimizer::DeleteFrameDescriptions() { 630 delete input_; 631 for (int i = 0; i < output_count_; ++i) { 632 if (output_[i] != input_) delete output_[i]; 633 } 634 delete[] output_; 635 input_ = NULL; 636 output_ = NULL; 637 #ifdef DEBUG 638 CHECK(!AllowHeapAllocation::IsAllowed()); 639 CHECK(disallow_heap_allocation_ != NULL); 640 delete disallow_heap_allocation_; 641 disallow_heap_allocation_ = NULL; 642 #endif // DEBUG 643 } 644 645 646 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, 647 int id, 648 BailoutType type, 649 GetEntryMode mode) { 650 ASSERT(id >= 0); 651 if (id >= kMaxNumberOfEntries) return NULL; 652 if (mode == ENSURE_ENTRY_CODE) { 653 EnsureCodeForDeoptimizationEntry(isolate, type, id); 654 } else { 655 ASSERT(mode == CALCULATE_ENTRY_ADDRESS); 656 } 657 DeoptimizerData* data = isolate->deoptimizer_data(); 658 ASSERT(type < kBailoutTypesWithCodeEntry); 659 MemoryChunk* base = data->deopt_entry_code_[type]; 660 return base->area_start() + (id * table_entry_size_); 661 } 662 663 664 int Deoptimizer::GetDeoptimizationId(Isolate* isolate, 665 Address addr, 666 BailoutType type) { 667 DeoptimizerData* data = isolate->deoptimizer_data(); 668 MemoryChunk* base = data->deopt_entry_code_[type]; 669 Address start = base->area_start(); 670 if (base == NULL || 671 addr < start || 672 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) { 673 return kNotDeoptimizationEntry; 674 } 675 ASSERT_EQ(0, 676 static_cast<int>(addr - start) % table_entry_size_); 677 return static_cast<int>(addr - start) / table_entry_size_; 678 } 679 680 681 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, 682 BailoutId id, 683 SharedFunctionInfo* shared) { 684 // TODO(kasperl): For now, we do a simple linear search for the PC 685 // offset associated with the given node id. This should probably be 686 // changed to a binary search. 687 int length = data->DeoptPoints(); 688 for (int i = 0; i < length; i++) { 689 if (data->AstId(i) == id) { 690 return data->PcAndState(i)->value(); 691 } 692 } 693 PrintF(stderr, "[couldn't find pc offset for node=%d]\n", id.ToInt()); 694 PrintF(stderr, "[method: %s]\n", *shared->DebugName()->ToCString()); 695 // Print the source code if available. 696 HeapStringAllocator string_allocator; 697 StringStream stream(&string_allocator); 698 shared->SourceCodePrint(&stream, -1); 699 PrintF(stderr, "[source:\n%s\n]", *stream.ToCString()); 700 701 FATAL("unable to find pc offset during deoptimization"); 702 return -1; 703 } 704 705 706 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { 707 int length = 0; 708 // Count all entries in the deoptimizing code list of every context. 709 Object* context = isolate->heap()->native_contexts_list(); 710 while (!context->IsUndefined()) { 711 Context* native_context = Context::cast(context); 712 Object* element = native_context->DeoptimizedCodeListHead(); 713 while (!element->IsUndefined()) { 714 Code* code = Code::cast(element); 715 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 716 length++; 717 element = code->next_code_link(); 718 } 719 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 720 } 721 return length; 722 } 723 724 725 // We rely on this function not causing a GC. It is called from generated code 726 // without having a real stack frame in place. 727 void Deoptimizer::DoComputeOutputFrames() { 728 // Print some helpful diagnostic information. 729 if (FLAG_log_timer_events && 730 compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) { 731 LOG(isolate(), CodeDeoptEvent(compiled_code_)); 732 } 733 ElapsedTimer timer; 734 if (trace_scope_ != NULL) { 735 timer.Start(); 736 PrintF(trace_scope_->file(), 737 "[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ", 738 MessageFor(bailout_type_), 739 reinterpret_cast<intptr_t>(function_)); 740 PrintFunctionName(); 741 PrintF(trace_scope_->file(), 742 " @%d, FP to SP delta: %d]\n", 743 bailout_id_, 744 fp_to_sp_delta_); 745 if (bailout_type_ == EAGER || bailout_type_ == SOFT) { 746 compiled_code_->PrintDeoptLocation(trace_scope_->file(), bailout_id_); 747 } 748 } 749 750 // Determine basic deoptimization information. The optimized frame is 751 // described by the input data. 752 DeoptimizationInputData* input_data = 753 DeoptimizationInputData::cast(compiled_code_->deoptimization_data()); 754 BailoutId node_id = input_data->AstId(bailout_id_); 755 ByteArray* translations = input_data->TranslationByteArray(); 756 unsigned translation_index = 757 input_data->TranslationIndex(bailout_id_)->value(); 758 759 // Do the input frame to output frame(s) translation. 760 TranslationIterator iterator(translations, translation_index); 761 Translation::Opcode opcode = 762 static_cast<Translation::Opcode>(iterator.Next()); 763 ASSERT(Translation::BEGIN == opcode); 764 USE(opcode); 765 // Read the number of output frames and allocate an array for their 766 // descriptions. 767 int count = iterator.Next(); 768 iterator.Next(); // Drop JS frames count. 769 ASSERT(output_ == NULL); 770 output_ = new FrameDescription*[count]; 771 for (int i = 0; i < count; ++i) { 772 output_[i] = NULL; 773 } 774 output_count_ = count; 775 776 // Translate each output frame. 777 for (int i = 0; i < count; ++i) { 778 // Read the ast node id, function, and frame height for this output frame. 779 Translation::Opcode opcode = 780 static_cast<Translation::Opcode>(iterator.Next()); 781 switch (opcode) { 782 case Translation::JS_FRAME: 783 DoComputeJSFrame(&iterator, i); 784 jsframe_count_++; 785 break; 786 case Translation::ARGUMENTS_ADAPTOR_FRAME: 787 DoComputeArgumentsAdaptorFrame(&iterator, i); 788 break; 789 case Translation::CONSTRUCT_STUB_FRAME: 790 DoComputeConstructStubFrame(&iterator, i); 791 break; 792 case Translation::GETTER_STUB_FRAME: 793 DoComputeAccessorStubFrame(&iterator, i, false); 794 break; 795 case Translation::SETTER_STUB_FRAME: 796 DoComputeAccessorStubFrame(&iterator, i, true); 797 break; 798 case Translation::COMPILED_STUB_FRAME: 799 DoComputeCompiledStubFrame(&iterator, i); 800 break; 801 case Translation::BEGIN: 802 case Translation::REGISTER: 803 case Translation::INT32_REGISTER: 804 case Translation::UINT32_REGISTER: 805 case Translation::DOUBLE_REGISTER: 806 case Translation::STACK_SLOT: 807 case Translation::INT32_STACK_SLOT: 808 case Translation::UINT32_STACK_SLOT: 809 case Translation::DOUBLE_STACK_SLOT: 810 case Translation::LITERAL: 811 case Translation::ARGUMENTS_OBJECT: 812 default: 813 UNREACHABLE(); 814 break; 815 } 816 } 817 818 // Print some helpful diagnostic information. 819 if (trace_scope_ != NULL) { 820 double ms = timer.Elapsed().InMillisecondsF(); 821 int index = output_count_ - 1; // Index of the topmost frame. 822 JSFunction* function = output_[index]->GetFunction(); 823 PrintF(trace_scope_->file(), 824 "[deoptimizing (%s): end 0x%08" V8PRIxPTR " ", 825 MessageFor(bailout_type_), 826 reinterpret_cast<intptr_t>(function)); 827 PrintFunctionName(); 828 PrintF(trace_scope_->file(), 829 " @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s," 830 " took %0.3f ms]\n", 831 bailout_id_, 832 node_id.ToInt(), 833 output_[index]->GetPc(), 834 FullCodeGenerator::State2String( 835 static_cast<FullCodeGenerator::State>( 836 output_[index]->GetState()->value())), 837 has_alignment_padding_ ? "with padding" : "no padding", 838 ms); 839 } 840 } 841 842 843 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, 844 int frame_index) { 845 BailoutId node_id = BailoutId(iterator->Next()); 846 JSFunction* function; 847 if (frame_index != 0) { 848 function = JSFunction::cast(ComputeLiteral(iterator->Next())); 849 } else { 850 int closure_id = iterator->Next(); 851 USE(closure_id); 852 ASSERT_EQ(Translation::kSelfLiteralId, closure_id); 853 function = function_; 854 } 855 unsigned height = iterator->Next(); 856 unsigned height_in_bytes = height * kPointerSize; 857 if (trace_scope_ != NULL) { 858 PrintF(trace_scope_->file(), " translating "); 859 function->PrintName(trace_scope_->file()); 860 PrintF(trace_scope_->file(), 861 " => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes); 862 } 863 864 // The 'fixed' part of the frame consists of the incoming parameters and 865 // the part described by JavaScriptFrameConstants. 866 unsigned fixed_frame_size = ComputeFixedSize(function); 867 unsigned input_frame_size = input_->GetFrameSize(); 868 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 869 870 // Allocate and store the output frame description. 871 FrameDescription* output_frame = 872 new(output_frame_size) FrameDescription(output_frame_size, function); 873 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT); 874 875 bool is_bottommost = (0 == frame_index); 876 bool is_topmost = (output_count_ - 1 == frame_index); 877 ASSERT(frame_index >= 0 && frame_index < output_count_); 878 ASSERT(output_[frame_index] == NULL); 879 output_[frame_index] = output_frame; 880 881 // The top address for the bottommost output frame can be computed from 882 // the input frame pointer and the output frame's height. For all 883 // subsequent output frames, it can be computed from the previous one's 884 // top address and the current frame's size. 885 Register fp_reg = JavaScriptFrame::fp_register(); 886 intptr_t top_address; 887 if (is_bottommost) { 888 // Determine whether the input frame contains alignment padding. 889 has_alignment_padding_ = HasAlignmentPadding(function) ? 1 : 0; 890 // 2 = context and function in the frame. 891 // If the optimized frame had alignment padding, adjust the frame pointer 892 // to point to the new position of the old frame pointer after padding 893 // is removed. Subtract 2 * kPointerSize for the context and function slots. 894 top_address = input_->GetRegister(fp_reg.code()) - 895 StandardFrameConstants::kFixedFrameSizeFromFp - 896 height_in_bytes + has_alignment_padding_ * kPointerSize; 897 } else { 898 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 899 } 900 output_frame->SetTop(top_address); 901 902 // Compute the incoming parameter translation. 903 int parameter_count = function->shared()->formal_parameter_count() + 1; 904 unsigned output_offset = output_frame_size; 905 unsigned input_offset = input_frame_size; 906 for (int i = 0; i < parameter_count; ++i) { 907 output_offset -= kPointerSize; 908 DoTranslateCommand(iterator, frame_index, output_offset); 909 } 910 input_offset -= (parameter_count * kPointerSize); 911 912 // There are no translation commands for the caller's pc and fp, the 913 // context, and the function. Synthesize their values and set them up 914 // explicitly. 915 // 916 // The caller's pc for the bottommost output frame is the same as in the 917 // input frame. For all subsequent output frames, it can be read from the 918 // previous one. This frame's pc can be computed from the non-optimized 919 // function code and AST id of the bailout. 920 output_offset -= kPCOnStackSize; 921 input_offset -= kPCOnStackSize; 922 intptr_t value; 923 if (is_bottommost) { 924 value = input_->GetFrameSlot(input_offset); 925 } else { 926 value = output_[frame_index - 1]->GetPc(); 927 } 928 output_frame->SetCallerPc(output_offset, value); 929 if (trace_scope_ != NULL) { 930 PrintF(trace_scope_->file(), 931 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 932 V8PRIxPTR " ; caller's pc\n", 933 top_address + output_offset, output_offset, value); 934 } 935 936 // The caller's frame pointer for the bottommost output frame is the same 937 // as in the input frame. For all subsequent output frames, it can be 938 // read from the previous one. Also compute and set this frame's frame 939 // pointer. 940 output_offset -= kFPOnStackSize; 941 input_offset -= kFPOnStackSize; 942 if (is_bottommost) { 943 value = input_->GetFrameSlot(input_offset); 944 } else { 945 value = output_[frame_index - 1]->GetFp(); 946 } 947 output_frame->SetCallerFp(output_offset, value); 948 intptr_t fp_value = top_address + output_offset; 949 ASSERT(!is_bottommost || (input_->GetRegister(fp_reg.code()) + 950 has_alignment_padding_ * kPointerSize) == fp_value); 951 output_frame->SetFp(fp_value); 952 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value); 953 if (trace_scope_ != NULL) { 954 PrintF(trace_scope_->file(), 955 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 956 V8PRIxPTR " ; caller's fp\n", 957 fp_value, output_offset, value); 958 } 959 ASSERT(!is_bottommost || !has_alignment_padding_ || 960 (fp_value & kPointerSize) != 0); 961 962 // For the bottommost output frame the context can be gotten from the input 963 // frame. For all subsequent output frames it can be gotten from the function 964 // so long as we don't inline functions that need local contexts. 965 Register context_reg = JavaScriptFrame::context_register(); 966 output_offset -= kPointerSize; 967 input_offset -= kPointerSize; 968 if (is_bottommost) { 969 value = input_->GetFrameSlot(input_offset); 970 } else { 971 value = reinterpret_cast<intptr_t>(function->context()); 972 } 973 output_frame->SetFrameSlot(output_offset, value); 974 output_frame->SetContext(value); 975 if (is_topmost) output_frame->SetRegister(context_reg.code(), value); 976 if (trace_scope_ != NULL) { 977 PrintF(trace_scope_->file(), 978 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 979 V8PRIxPTR "; context\n", 980 top_address + output_offset, output_offset, value); 981 } 982 983 // The function was mentioned explicitly in the BEGIN_FRAME. 984 output_offset -= kPointerSize; 985 input_offset -= kPointerSize; 986 value = reinterpret_cast<intptr_t>(function); 987 // The function for the bottommost output frame should also agree with the 988 // input frame. 989 ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value); 990 output_frame->SetFrameSlot(output_offset, value); 991 if (trace_scope_ != NULL) { 992 PrintF(trace_scope_->file(), 993 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 994 V8PRIxPTR "; function\n", 995 top_address + output_offset, output_offset, value); 996 } 997 998 // Translate the rest of the frame. 999 for (unsigned i = 0; i < height; ++i) { 1000 output_offset -= kPointerSize; 1001 DoTranslateCommand(iterator, frame_index, output_offset); 1002 } 1003 ASSERT(0 == output_offset); 1004 1005 // Compute this frame's PC, state, and continuation. 1006 Code* non_optimized_code = function->shared()->code(); 1007 FixedArray* raw_data = non_optimized_code->deoptimization_data(); 1008 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data); 1009 Address start = non_optimized_code->instruction_start(); 1010 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared()); 1011 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state); 1012 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset); 1013 output_frame->SetPc(pc_value); 1014 1015 FullCodeGenerator::State state = 1016 FullCodeGenerator::StateField::decode(pc_and_state); 1017 output_frame->SetState(Smi::FromInt(state)); 1018 1019 // Set the continuation for the topmost frame. 1020 if (is_topmost && bailout_type_ != DEBUGGER) { 1021 Builtins* builtins = isolate_->builtins(); 1022 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); 1023 if (bailout_type_ == LAZY) { 1024 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); 1025 } else if (bailout_type_ == SOFT) { 1026 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); 1027 } else { 1028 ASSERT(bailout_type_ == EAGER); 1029 } 1030 output_frame->SetContinuation( 1031 reinterpret_cast<intptr_t>(continuation->entry())); 1032 } 1033 } 1034 1035 1036 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, 1037 int frame_index) { 1038 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); 1039 unsigned height = iterator->Next(); 1040 unsigned height_in_bytes = height * kPointerSize; 1041 if (trace_scope_ != NULL) { 1042 PrintF(trace_scope_->file(), 1043 " translating arguments adaptor => height=%d\n", height_in_bytes); 1044 } 1045 1046 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize; 1047 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1048 1049 // Allocate and store the output frame description. 1050 FrameDescription* output_frame = 1051 new(output_frame_size) FrameDescription(output_frame_size, function); 1052 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR); 1053 1054 // Arguments adaptor can not be topmost or bottommost. 1055 ASSERT(frame_index > 0 && frame_index < output_count_ - 1); 1056 ASSERT(output_[frame_index] == NULL); 1057 output_[frame_index] = output_frame; 1058 1059 // The top address of the frame is computed from the previous 1060 // frame's top and this frame's size. 1061 intptr_t top_address; 1062 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1063 output_frame->SetTop(top_address); 1064 1065 // Compute the incoming parameter translation. 1066 int parameter_count = height; 1067 unsigned output_offset = output_frame_size; 1068 for (int i = 0; i < parameter_count; ++i) { 1069 output_offset -= kPointerSize; 1070 DoTranslateCommand(iterator, frame_index, output_offset); 1071 } 1072 1073 // Read caller's PC from the previous frame. 1074 output_offset -= kPCOnStackSize; 1075 intptr_t callers_pc = output_[frame_index - 1]->GetPc(); 1076 output_frame->SetCallerPc(output_offset, callers_pc); 1077 if (trace_scope_ != NULL) { 1078 PrintF(trace_scope_->file(), 1079 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1080 V8PRIxPTR " ; caller's pc\n", 1081 top_address + output_offset, output_offset, callers_pc); 1082 } 1083 1084 // Read caller's FP from the previous frame, and set this frame's FP. 1085 output_offset -= kFPOnStackSize; 1086 intptr_t value = output_[frame_index - 1]->GetFp(); 1087 output_frame->SetCallerFp(output_offset, value); 1088 intptr_t fp_value = top_address + output_offset; 1089 output_frame->SetFp(fp_value); 1090 if (trace_scope_ != NULL) { 1091 PrintF(trace_scope_->file(), 1092 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1093 V8PRIxPTR " ; caller's fp\n", 1094 fp_value, output_offset, value); 1095 } 1096 1097 // A marker value is used in place of the context. 1098 output_offset -= kPointerSize; 1099 intptr_t context = reinterpret_cast<intptr_t>( 1100 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1101 output_frame->SetFrameSlot(output_offset, context); 1102 if (trace_scope_ != NULL) { 1103 PrintF(trace_scope_->file(), 1104 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1105 V8PRIxPTR " ; context (adaptor sentinel)\n", 1106 top_address + output_offset, output_offset, context); 1107 } 1108 1109 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME. 1110 output_offset -= kPointerSize; 1111 value = reinterpret_cast<intptr_t>(function); 1112 output_frame->SetFrameSlot(output_offset, value); 1113 if (trace_scope_ != NULL) { 1114 PrintF(trace_scope_->file(), 1115 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1116 V8PRIxPTR " ; function\n", 1117 top_address + output_offset, output_offset, value); 1118 } 1119 1120 // Number of incoming arguments. 1121 output_offset -= kPointerSize; 1122 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); 1123 output_frame->SetFrameSlot(output_offset, value); 1124 if (trace_scope_ != NULL) { 1125 PrintF(trace_scope_->file(), 1126 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1127 V8PRIxPTR " ; argc (%d)\n", 1128 top_address + output_offset, output_offset, value, height - 1); 1129 } 1130 1131 ASSERT(0 == output_offset); 1132 1133 Builtins* builtins = isolate_->builtins(); 1134 Code* adaptor_trampoline = 1135 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline); 1136 intptr_t pc_value = reinterpret_cast<intptr_t>( 1137 adaptor_trampoline->instruction_start() + 1138 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value()); 1139 output_frame->SetPc(pc_value); 1140 } 1141 1142 1143 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, 1144 int frame_index) { 1145 Builtins* builtins = isolate_->builtins(); 1146 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric); 1147 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); 1148 unsigned height = iterator->Next(); 1149 unsigned height_in_bytes = height * kPointerSize; 1150 if (trace_scope_ != NULL) { 1151 PrintF(trace_scope_->file(), 1152 " translating construct stub => height=%d\n", height_in_bytes); 1153 } 1154 1155 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize; 1156 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1157 1158 // Allocate and store the output frame description. 1159 FrameDescription* output_frame = 1160 new(output_frame_size) FrameDescription(output_frame_size, function); 1161 output_frame->SetFrameType(StackFrame::CONSTRUCT); 1162 1163 // Construct stub can not be topmost or bottommost. 1164 ASSERT(frame_index > 0 && frame_index < output_count_ - 1); 1165 ASSERT(output_[frame_index] == NULL); 1166 output_[frame_index] = output_frame; 1167 1168 // The top address of the frame is computed from the previous 1169 // frame's top and this frame's size. 1170 intptr_t top_address; 1171 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1172 output_frame->SetTop(top_address); 1173 1174 // Compute the incoming parameter translation. 1175 int parameter_count = height; 1176 unsigned output_offset = output_frame_size; 1177 for (int i = 0; i < parameter_count; ++i) { 1178 output_offset -= kPointerSize; 1179 int deferred_object_index = deferred_objects_.length(); 1180 DoTranslateCommand(iterator, frame_index, output_offset); 1181 // The allocated receiver of a construct stub frame is passed as the 1182 // receiver parameter through the translation. It might be encoding 1183 // a captured object, patch the slot address for a captured object. 1184 if (i == 0 && deferred_objects_.length() > deferred_object_index) { 1185 ASSERT(!deferred_objects_[deferred_object_index].is_arguments()); 1186 deferred_objects_[deferred_object_index].patch_slot_address(top_address); 1187 } 1188 } 1189 1190 // Read caller's PC from the previous frame. 1191 output_offset -= kPCOnStackSize; 1192 intptr_t callers_pc = output_[frame_index - 1]->GetPc(); 1193 output_frame->SetCallerPc(output_offset, callers_pc); 1194 if (trace_scope_ != NULL) { 1195 PrintF(trace_scope_->file(), 1196 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1197 V8PRIxPTR " ; caller's pc\n", 1198 top_address + output_offset, output_offset, callers_pc); 1199 } 1200 1201 // Read caller's FP from the previous frame, and set this frame's FP. 1202 output_offset -= kFPOnStackSize; 1203 intptr_t value = output_[frame_index - 1]->GetFp(); 1204 output_frame->SetCallerFp(output_offset, value); 1205 intptr_t fp_value = top_address + output_offset; 1206 output_frame->SetFp(fp_value); 1207 if (trace_scope_ != NULL) { 1208 PrintF(trace_scope_->file(), 1209 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1210 V8PRIxPTR " ; caller's fp\n", 1211 fp_value, output_offset, value); 1212 } 1213 1214 // The context can be gotten from the previous frame. 1215 output_offset -= kPointerSize; 1216 value = output_[frame_index - 1]->GetContext(); 1217 output_frame->SetFrameSlot(output_offset, value); 1218 if (trace_scope_ != NULL) { 1219 PrintF(trace_scope_->file(), 1220 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1221 V8PRIxPTR " ; context\n", 1222 top_address + output_offset, output_offset, value); 1223 } 1224 1225 // A marker value is used in place of the function. 1226 output_offset -= kPointerSize; 1227 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT)); 1228 output_frame->SetFrameSlot(output_offset, value); 1229 if (trace_scope_ != NULL) { 1230 PrintF(trace_scope_->file(), 1231 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1232 V8PRIxPTR " ; function (construct sentinel)\n", 1233 top_address + output_offset, output_offset, value); 1234 } 1235 1236 // The output frame reflects a JSConstructStubGeneric frame. 1237 output_offset -= kPointerSize; 1238 value = reinterpret_cast<intptr_t>(construct_stub); 1239 output_frame->SetFrameSlot(output_offset, value); 1240 if (trace_scope_ != NULL) { 1241 PrintF(trace_scope_->file(), 1242 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1243 V8PRIxPTR " ; code object\n", 1244 top_address + output_offset, output_offset, value); 1245 } 1246 1247 // Number of incoming arguments. 1248 output_offset -= kPointerSize; 1249 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); 1250 output_frame->SetFrameSlot(output_offset, value); 1251 if (trace_scope_ != NULL) { 1252 PrintF(trace_scope_->file(), 1253 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1254 V8PRIxPTR " ; argc (%d)\n", 1255 top_address + output_offset, output_offset, value, height - 1); 1256 } 1257 1258 // Constructor function being invoked by the stub (only present on some 1259 // architectures, indicated by kConstructorOffset). 1260 if (ConstructFrameConstants::kConstructorOffset != kMinInt) { 1261 output_offset -= kPointerSize; 1262 value = reinterpret_cast<intptr_t>(function); 1263 output_frame->SetFrameSlot(output_offset, value); 1264 if (trace_scope_ != NULL) { 1265 PrintF(trace_scope_->file(), 1266 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1267 V8PRIxPTR " ; constructor function\n", 1268 top_address + output_offset, output_offset, value); 1269 } 1270 } 1271 1272 // The newly allocated object was passed as receiver in the artificial 1273 // constructor stub environment created by HEnvironment::CopyForInlining(). 1274 output_offset -= kPointerSize; 1275 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize); 1276 output_frame->SetFrameSlot(output_offset, value); 1277 if (trace_scope_ != NULL) { 1278 PrintF(trace_scope_->file(), 1279 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1280 V8PRIxPTR " ; allocated receiver\n", 1281 top_address + output_offset, output_offset, value); 1282 } 1283 1284 ASSERT(0 == output_offset); 1285 1286 intptr_t pc = reinterpret_cast<intptr_t>( 1287 construct_stub->instruction_start() + 1288 isolate_->heap()->construct_stub_deopt_pc_offset()->value()); 1289 output_frame->SetPc(pc); 1290 } 1291 1292 1293 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, 1294 int frame_index, 1295 bool is_setter_stub_frame) { 1296 JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next())); 1297 // The receiver (and the implicit return value, if any) are expected in 1298 // registers by the LoadIC/StoreIC, so they don't belong to the output stack 1299 // frame. This means that we have to use a height of 0. 1300 unsigned height = 0; 1301 unsigned height_in_bytes = height * kPointerSize; 1302 const char* kind = is_setter_stub_frame ? "setter" : "getter"; 1303 if (trace_scope_ != NULL) { 1304 PrintF(trace_scope_->file(), 1305 " translating %s stub => height=%u\n", kind, height_in_bytes); 1306 } 1307 1308 // We need 1 stack entry for the return address and enough entries for the 1309 // StackFrame::INTERNAL (FP, context, frame type and code object - see 1310 // MacroAssembler::EnterFrame). For a setter stub frame we need one additional 1311 // entry for the implicit return value, see 1312 // StoreStubCompiler::CompileStoreViaSetter. 1313 unsigned fixed_frame_entries = 1314 (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 + 1315 (is_setter_stub_frame ? 1 : 0); 1316 unsigned fixed_frame_size = fixed_frame_entries * kPointerSize; 1317 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1318 1319 // Allocate and store the output frame description. 1320 FrameDescription* output_frame = 1321 new(output_frame_size) FrameDescription(output_frame_size, accessor); 1322 output_frame->SetFrameType(StackFrame::INTERNAL); 1323 1324 // A frame for an accessor stub can not be the topmost or bottommost one. 1325 ASSERT(frame_index > 0 && frame_index < output_count_ - 1); 1326 ASSERT(output_[frame_index] == NULL); 1327 output_[frame_index] = output_frame; 1328 1329 // The top address of the frame is computed from the previous frame's top and 1330 // this frame's size. 1331 intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1332 output_frame->SetTop(top_address); 1333 1334 unsigned output_offset = output_frame_size; 1335 1336 // Read caller's PC from the previous frame. 1337 output_offset -= kPCOnStackSize; 1338 intptr_t callers_pc = output_[frame_index - 1]->GetPc(); 1339 output_frame->SetCallerPc(output_offset, callers_pc); 1340 if (trace_scope_ != NULL) { 1341 PrintF(trace_scope_->file(), 1342 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR 1343 " ; caller's pc\n", 1344 top_address + output_offset, output_offset, callers_pc); 1345 } 1346 1347 // Read caller's FP from the previous frame, and set this frame's FP. 1348 output_offset -= kFPOnStackSize; 1349 intptr_t value = output_[frame_index - 1]->GetFp(); 1350 output_frame->SetCallerFp(output_offset, value); 1351 intptr_t fp_value = top_address + output_offset; 1352 output_frame->SetFp(fp_value); 1353 if (trace_scope_ != NULL) { 1354 PrintF(trace_scope_->file(), 1355 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR 1356 " ; caller's fp\n", 1357 fp_value, output_offset, value); 1358 } 1359 1360 // The context can be gotten from the previous frame. 1361 output_offset -= kPointerSize; 1362 value = output_[frame_index - 1]->GetContext(); 1363 output_frame->SetFrameSlot(output_offset, value); 1364 if (trace_scope_ != NULL) { 1365 PrintF(trace_scope_->file(), 1366 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR 1367 " ; context\n", 1368 top_address + output_offset, output_offset, value); 1369 } 1370 1371 // A marker value is used in place of the function. 1372 output_offset -= kPointerSize; 1373 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL)); 1374 output_frame->SetFrameSlot(output_offset, value); 1375 if (trace_scope_ != NULL) { 1376 PrintF(trace_scope_->file(), 1377 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR 1378 " ; function (%s sentinel)\n", 1379 top_address + output_offset, output_offset, value, kind); 1380 } 1381 1382 // Get Code object from accessor stub. 1383 output_offset -= kPointerSize; 1384 Builtins::Name name = is_setter_stub_frame ? 1385 Builtins::kStoreIC_Setter_ForDeopt : 1386 Builtins::kLoadIC_Getter_ForDeopt; 1387 Code* accessor_stub = isolate_->builtins()->builtin(name); 1388 value = reinterpret_cast<intptr_t>(accessor_stub); 1389 output_frame->SetFrameSlot(output_offset, value); 1390 if (trace_scope_ != NULL) { 1391 PrintF(trace_scope_->file(), 1392 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR 1393 " ; code object\n", 1394 top_address + output_offset, output_offset, value); 1395 } 1396 1397 // Skip receiver. 1398 Translation::Opcode opcode = 1399 static_cast<Translation::Opcode>(iterator->Next()); 1400 iterator->Skip(Translation::NumberOfOperandsFor(opcode)); 1401 1402 if (is_setter_stub_frame) { 1403 // The implicit return value was part of the artificial setter stub 1404 // environment. 1405 output_offset -= kPointerSize; 1406 DoTranslateCommand(iterator, frame_index, output_offset); 1407 } 1408 1409 ASSERT(0 == output_offset); 1410 1411 Smi* offset = is_setter_stub_frame ? 1412 isolate_->heap()->setter_stub_deopt_pc_offset() : 1413 isolate_->heap()->getter_stub_deopt_pc_offset(); 1414 intptr_t pc = reinterpret_cast<intptr_t>( 1415 accessor_stub->instruction_start() + offset->value()); 1416 output_frame->SetPc(pc); 1417 } 1418 1419 1420 void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, 1421 int frame_index) { 1422 // 1423 // FROM TO 1424 // | .... | | .... | 1425 // +-------------------------+ +-------------------------+ 1426 // | JSFunction continuation | | JSFunction continuation | 1427 // +-------------------------+ +-------------------------+ 1428 // | | saved frame (FP) | | saved frame (FP) | 1429 // | +=========================+<-fpreg +=========================+<-fpreg 1430 // | | JSFunction context | | JSFunction context | 1431 // v +-------------------------+ +-------------------------| 1432 // | COMPILED_STUB marker | | STUB_FAILURE marker | 1433 // +-------------------------+ +-------------------------+ 1434 // | | | caller args.arguments_ | 1435 // | ... | +-------------------------+ 1436 // | | | caller args.length_ | 1437 // |-------------------------|<-spreg +-------------------------+ 1438 // | caller args pointer | 1439 // +-------------------------+ 1440 // | caller stack param 1 | 1441 // parameters in registers +-------------------------+ 1442 // and spilled to stack | .... | 1443 // +-------------------------+ 1444 // | caller stack param n | 1445 // +-------------------------+<-spreg 1446 // reg = number of parameters 1447 // reg = failure handler address 1448 // reg = saved frame 1449 // reg = JSFunction context 1450 // 1451 1452 ASSERT(compiled_code_->is_crankshafted() && 1453 compiled_code_->kind() != Code::OPTIMIZED_FUNCTION); 1454 int major_key = compiled_code_->major_key(); 1455 CodeStubInterfaceDescriptor* descriptor = 1456 isolate_->code_stub_interface_descriptor(major_key); 1457 1458 // The output frame must have room for all pushed register parameters 1459 // and the standard stack frame slots. Include space for an argument 1460 // object to the callee and optionally the space to pass the argument 1461 // object to the stub failure handler. 1462 ASSERT(descriptor->register_param_count_ >= 0); 1463 int height_in_bytes = kPointerSize * descriptor->register_param_count_ + 1464 sizeof(Arguments) + kPointerSize; 1465 int fixed_frame_size = StandardFrameConstants::kFixedFrameSize; 1466 int input_frame_size = input_->GetFrameSize(); 1467 int output_frame_size = height_in_bytes + fixed_frame_size; 1468 if (trace_scope_ != NULL) { 1469 PrintF(trace_scope_->file(), 1470 " translating %s => StubFailure%sTrampolineStub, height=%d\n", 1471 CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false), 1472 descriptor->HasTailCallContinuation() ? "TailCall" : "", 1473 height_in_bytes); 1474 } 1475 1476 // The stub failure trampoline is a single frame. 1477 FrameDescription* output_frame = 1478 new(output_frame_size) FrameDescription(output_frame_size, NULL); 1479 output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); 1480 ASSERT(frame_index == 0); 1481 output_[frame_index] = output_frame; 1482 1483 // The top address for the output frame can be computed from the input 1484 // frame pointer and the output frame's height. Subtract space for the 1485 // context and function slots. 1486 Register fp_reg = StubFailureTrampolineFrame::fp_register(); 1487 intptr_t top_address = input_->GetRegister(fp_reg.code()) - 1488 StandardFrameConstants::kFixedFrameSizeFromFp - height_in_bytes; 1489 output_frame->SetTop(top_address); 1490 1491 // Read caller's PC (JSFunction continuation) from the input frame. 1492 unsigned input_frame_offset = input_frame_size - kPCOnStackSize; 1493 unsigned output_frame_offset = output_frame_size - kFPOnStackSize; 1494 intptr_t value = input_->GetFrameSlot(input_frame_offset); 1495 output_frame->SetCallerPc(output_frame_offset, value); 1496 if (trace_scope_ != NULL) { 1497 PrintF(trace_scope_->file(), 1498 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1499 V8PRIxPTR " ; caller's pc\n", 1500 top_address + output_frame_offset, output_frame_offset, value); 1501 } 1502 1503 // Read caller's FP from the input frame, and set this frame's FP. 1504 input_frame_offset -= kFPOnStackSize; 1505 value = input_->GetFrameSlot(input_frame_offset); 1506 output_frame_offset -= kFPOnStackSize; 1507 output_frame->SetCallerFp(output_frame_offset, value); 1508 intptr_t frame_ptr = input_->GetRegister(fp_reg.code()); 1509 output_frame->SetRegister(fp_reg.code(), frame_ptr); 1510 output_frame->SetFp(frame_ptr); 1511 if (trace_scope_ != NULL) { 1512 PrintF(trace_scope_->file(), 1513 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1514 V8PRIxPTR " ; caller's fp\n", 1515 top_address + output_frame_offset, output_frame_offset, value); 1516 } 1517 1518 // The context can be gotten from the input frame. 1519 Register context_reg = StubFailureTrampolineFrame::context_register(); 1520 input_frame_offset -= kPointerSize; 1521 value = input_->GetFrameSlot(input_frame_offset); 1522 output_frame->SetRegister(context_reg.code(), value); 1523 output_frame_offset -= kPointerSize; 1524 output_frame->SetFrameSlot(output_frame_offset, value); 1525 ASSERT(reinterpret_cast<Object*>(value)->IsContext()); 1526 if (trace_scope_ != NULL) { 1527 PrintF(trace_scope_->file(), 1528 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1529 V8PRIxPTR " ; context\n", 1530 top_address + output_frame_offset, output_frame_offset, value); 1531 } 1532 1533 // A marker value is used in place of the function. 1534 output_frame_offset -= kPointerSize; 1535 value = reinterpret_cast<intptr_t>( 1536 Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE)); 1537 output_frame->SetFrameSlot(output_frame_offset, value); 1538 if (trace_scope_ != NULL) { 1539 PrintF(trace_scope_->file(), 1540 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1541 V8PRIxPTR " ; function (stub failure sentinel)\n", 1542 top_address + output_frame_offset, output_frame_offset, value); 1543 } 1544 1545 intptr_t caller_arg_count = descriptor->HasTailCallContinuation() 1546 ? compiled_code_->arguments_count() + 1 : 0; 1547 bool arg_count_known = !descriptor->stack_parameter_count_.is_valid(); 1548 1549 // Build the Arguments object for the caller's parameters and a pointer to it. 1550 output_frame_offset -= kPointerSize; 1551 int args_arguments_offset = output_frame_offset; 1552 intptr_t the_hole = reinterpret_cast<intptr_t>( 1553 isolate_->heap()->the_hole_value()); 1554 if (arg_count_known) { 1555 value = frame_ptr + StandardFrameConstants::kCallerSPOffset + 1556 (caller_arg_count - 1) * kPointerSize; 1557 } else { 1558 value = the_hole; 1559 } 1560 1561 output_frame->SetFrameSlot(args_arguments_offset, value); 1562 if (trace_scope_ != NULL) { 1563 PrintF(trace_scope_->file(), 1564 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1565 V8PRIxPTR " ; args.arguments %s\n", 1566 top_address + args_arguments_offset, args_arguments_offset, value, 1567 arg_count_known ? "" : "(the hole)"); 1568 } 1569 1570 output_frame_offset -= kPointerSize; 1571 int length_frame_offset = output_frame_offset; 1572 value = arg_count_known ? caller_arg_count : the_hole; 1573 output_frame->SetFrameSlot(length_frame_offset, value); 1574 if (trace_scope_ != NULL) { 1575 PrintF(trace_scope_->file(), 1576 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1577 V8PRIxPTR " ; args.length %s\n", 1578 top_address + length_frame_offset, length_frame_offset, value, 1579 arg_count_known ? "" : "(the hole)"); 1580 } 1581 1582 output_frame_offset -= kPointerSize; 1583 value = frame_ptr + StandardFrameConstants::kCallerSPOffset - 1584 (output_frame_size - output_frame_offset) + kPointerSize; 1585 output_frame->SetFrameSlot(output_frame_offset, value); 1586 if (trace_scope_ != NULL) { 1587 PrintF(trace_scope_->file(), 1588 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1589 V8PRIxPTR " ; args*\n", 1590 top_address + output_frame_offset, output_frame_offset, value); 1591 } 1592 1593 // Copy the register parameters to the failure frame. 1594 int arguments_length_offset = -1; 1595 for (int i = 0; i < descriptor->register_param_count_; ++i) { 1596 output_frame_offset -= kPointerSize; 1597 DoTranslateCommand(iterator, 0, output_frame_offset); 1598 1599 if (!arg_count_known && descriptor->IsParameterCountRegister(i)) { 1600 arguments_length_offset = output_frame_offset; 1601 } 1602 } 1603 1604 ASSERT(0 == output_frame_offset); 1605 1606 if (!arg_count_known) { 1607 ASSERT(arguments_length_offset >= 0); 1608 // We know it's a smi because 1) the code stub guarantees the stack 1609 // parameter count is in smi range, and 2) the DoTranslateCommand in the 1610 // parameter loop above translated that to a tagged value. 1611 Smi* smi_caller_arg_count = reinterpret_cast<Smi*>( 1612 output_frame->GetFrameSlot(arguments_length_offset)); 1613 caller_arg_count = smi_caller_arg_count->value(); 1614 output_frame->SetFrameSlot(length_frame_offset, caller_arg_count); 1615 if (trace_scope_ != NULL) { 1616 PrintF(trace_scope_->file(), 1617 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1618 V8PRIxPTR " ; args.length\n", 1619 top_address + length_frame_offset, length_frame_offset, 1620 caller_arg_count); 1621 } 1622 value = frame_ptr + StandardFrameConstants::kCallerSPOffset + 1623 (caller_arg_count - 1) * kPointerSize; 1624 output_frame->SetFrameSlot(args_arguments_offset, value); 1625 if (trace_scope_ != NULL) { 1626 PrintF(trace_scope_->file(), 1627 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" 1628 V8PRIxPTR " ; args.arguments\n", 1629 top_address + args_arguments_offset, args_arguments_offset, 1630 value); 1631 } 1632 } 1633 1634 // Copy the double registers from the input into the output frame. 1635 CopyDoubleRegisters(output_frame); 1636 1637 // Fill registers containing handler and number of parameters. 1638 SetPlatformCompiledStubRegisters(output_frame, descriptor); 1639 1640 // Compute this frame's PC, state, and continuation. 1641 Code* trampoline = NULL; 1642 if (descriptor->HasTailCallContinuation()) { 1643 StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate_); 1644 } else { 1645 StubFunctionMode function_mode = descriptor->function_mode_; 1646 StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline, 1647 isolate_); 1648 } 1649 ASSERT(trampoline != NULL); 1650 output_frame->SetPc(reinterpret_cast<intptr_t>( 1651 trampoline->instruction_start())); 1652 output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS)); 1653 Code* notify_failure = NotifyStubFailureBuiltin(); 1654 output_frame->SetContinuation( 1655 reinterpret_cast<intptr_t>(notify_failure->entry())); 1656 } 1657 1658 1659 Handle<Object> Deoptimizer::MaterializeNextHeapObject() { 1660 int object_index = materialization_object_index_++; 1661 ObjectMaterializationDescriptor desc = deferred_objects_[object_index]; 1662 const int length = desc.object_length(); 1663 1664 if (desc.duplicate_object() >= 0) { 1665 // Found a previously materialized object by de-duplication. 1666 object_index = desc.duplicate_object(); 1667 materialized_objects_->Add(Handle<Object>()); 1668 } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) { 1669 // Use the arguments adapter frame we just built to materialize the 1670 // arguments object. FunctionGetArguments can't throw an exception. 1671 Handle<JSFunction> function = ArgumentsObjectFunction(object_index); 1672 Handle<JSObject> arguments = Handle<JSObject>::cast( 1673 Accessors::FunctionGetArguments(function)); 1674 materialized_objects_->Add(arguments); 1675 materialization_value_index_ += length; 1676 } else if (desc.is_arguments()) { 1677 // Construct an arguments object and copy the parameters to a newly 1678 // allocated arguments object backing store. 1679 Handle<JSFunction> function = ArgumentsObjectFunction(object_index); 1680 Handle<JSObject> arguments = 1681 isolate_->factory()->NewArgumentsObject(function, length); 1682 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); 1683 ASSERT(array->length() == length); 1684 arguments->set_elements(*array); 1685 materialized_objects_->Add(arguments); 1686 for (int i = 0; i < length; ++i) { 1687 Handle<Object> value = MaterializeNextValue(); 1688 array->set(i, *value); 1689 } 1690 } else { 1691 // Dispatch on the instance type of the object to be materialized. 1692 // We also need to make sure that the representation of all fields 1693 // in the given object are general enough to hold a tagged value. 1694 Handle<Map> map = Map::GeneralizeAllFieldRepresentations( 1695 Handle<Map>::cast(MaterializeNextValue()), Representation::Tagged()); 1696 switch (map->instance_type()) { 1697 case HEAP_NUMBER_TYPE: { 1698 // Reuse the HeapNumber value directly as it is already properly 1699 // tagged and skip materializing the HeapNumber explicitly. 1700 Handle<Object> object = MaterializeNextValue(); 1701 materialized_objects_->Add(object); 1702 materialization_value_index_ += kDoubleSize / kPointerSize - 1; 1703 break; 1704 } 1705 case JS_OBJECT_TYPE: { 1706 Handle<JSObject> object = 1707 isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false); 1708 materialized_objects_->Add(object); 1709 Handle<Object> properties = MaterializeNextValue(); 1710 Handle<Object> elements = MaterializeNextValue(); 1711 object->set_properties(FixedArray::cast(*properties)); 1712 object->set_elements(FixedArrayBase::cast(*elements)); 1713 for (int i = 0; i < length - 3; ++i) { 1714 Handle<Object> value = MaterializeNextValue(); 1715 object->FastPropertyAtPut(i, *value); 1716 } 1717 break; 1718 } 1719 case JS_ARRAY_TYPE: { 1720 Handle<JSArray> object = 1721 isolate_->factory()->NewJSArray(0, map->elements_kind()); 1722 materialized_objects_->Add(object); 1723 Handle<Object> properties = MaterializeNextValue(); 1724 Handle<Object> elements = MaterializeNextValue(); 1725 Handle<Object> length = MaterializeNextValue(); 1726 object->set_properties(FixedArray::cast(*properties)); 1727 object->set_elements(FixedArrayBase::cast(*elements)); 1728 object->set_length(*length); 1729 break; 1730 } 1731 default: 1732 PrintF(stderr, 1733 "[couldn't handle instance type %d]\n", map->instance_type()); 1734 UNREACHABLE(); 1735 } 1736 } 1737 1738 return materialized_objects_->at(object_index); 1739 } 1740 1741 1742 Handle<Object> Deoptimizer::MaterializeNextValue() { 1743 int value_index = materialization_value_index_++; 1744 Handle<Object> value = materialized_values_->at(value_index); 1745 if (*value == isolate_->heap()->arguments_marker()) { 1746 value = MaterializeNextHeapObject(); 1747 } 1748 return value; 1749 } 1750 1751 1752 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { 1753 ASSERT_NE(DEBUGGER, bailout_type_); 1754 1755 // Walk all JavaScript output frames with the given frame iterator. 1756 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) { 1757 if (frame_index != 0) it->Advance(); 1758 JavaScriptFrame* frame = it->frame(); 1759 jsframe_functions_.Add(handle(frame->function(), isolate_)); 1760 jsframe_has_adapted_arguments_.Add(frame->has_adapted_arguments()); 1761 } 1762 1763 // Handlify all tagged object values before triggering any allocation. 1764 List<Handle<Object> > values(deferred_objects_tagged_values_.length()); 1765 for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) { 1766 values.Add(Handle<Object>(deferred_objects_tagged_values_[i], isolate_)); 1767 } 1768 1769 // Play it safe and clear all unhandlified values before we continue. 1770 deferred_objects_tagged_values_.Clear(); 1771 1772 // Materialize all heap numbers before looking at arguments because when the 1773 // output frames are used to materialize arguments objects later on they need 1774 // to already contain valid heap numbers. 1775 for (int i = 0; i < deferred_heap_numbers_.length(); i++) { 1776 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i]; 1777 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 1778 if (trace_scope_ != NULL) { 1779 PrintF(trace_scope_->file(), 1780 "Materialized a new heap number %p [%e] in slot %p\n", 1781 reinterpret_cast<void*>(*num), 1782 d.value(), 1783 d.destination()); 1784 } 1785 Memory::Object_at(d.destination()) = *num; 1786 } 1787 1788 // Materialize all heap numbers required for arguments/captured objects. 1789 for (int i = 0; i < deferred_objects_double_values_.length(); i++) { 1790 HeapNumberMaterializationDescriptor<int> d = 1791 deferred_objects_double_values_[i]; 1792 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 1793 if (trace_scope_ != NULL) { 1794 PrintF(trace_scope_->file(), 1795 "Materialized a new heap number %p [%e] for object at %d\n", 1796 reinterpret_cast<void*>(*num), 1797 d.value(), 1798 d.destination()); 1799 } 1800 ASSERT(values.at(d.destination())->IsTheHole()); 1801 values.Set(d.destination(), num); 1802 } 1803 1804 // Play it safe and clear all object double values before we continue. 1805 deferred_objects_double_values_.Clear(); 1806 1807 // Materialize arguments/captured objects. 1808 if (!deferred_objects_.is_empty()) { 1809 List<Handle<Object> > materialized_objects(deferred_objects_.length()); 1810 materialized_objects_ = &materialized_objects; 1811 materialized_values_ = &values; 1812 1813 while (materialization_object_index_ < deferred_objects_.length()) { 1814 int object_index = materialization_object_index_; 1815 ObjectMaterializationDescriptor descriptor = 1816 deferred_objects_.at(object_index); 1817 1818 // Find a previously materialized object by de-duplication or 1819 // materialize a new instance of the object if necessary. Store 1820 // the materialized object into the frame slot. 1821 Handle<Object> object = MaterializeNextHeapObject(); 1822 Memory::Object_at(descriptor.slot_address()) = *object; 1823 if (trace_scope_ != NULL) { 1824 if (descriptor.is_arguments()) { 1825 PrintF(trace_scope_->file(), 1826 "Materialized %sarguments object of length %d for %p: ", 1827 ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "", 1828 Handle<JSObject>::cast(object)->elements()->length(), 1829 reinterpret_cast<void*>(descriptor.slot_address())); 1830 } else { 1831 PrintF(trace_scope_->file(), 1832 "Materialized captured object of size %d for %p: ", 1833 Handle<HeapObject>::cast(object)->Size(), 1834 reinterpret_cast<void*>(descriptor.slot_address())); 1835 } 1836 object->ShortPrint(trace_scope_->file()); 1837 PrintF(trace_scope_->file(), "\n"); 1838 } 1839 } 1840 1841 ASSERT(materialization_object_index_ == materialized_objects_->length()); 1842 ASSERT(materialization_value_index_ == materialized_values_->length()); 1843 } 1844 } 1845 1846 1847 #ifdef ENABLE_DEBUGGER_SUPPORT 1848 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame( 1849 Address parameters_top, 1850 uint32_t parameters_size, 1851 Address expressions_top, 1852 uint32_t expressions_size, 1853 DeoptimizedFrameInfo* info) { 1854 ASSERT_EQ(DEBUGGER, bailout_type_); 1855 Address parameters_bottom = parameters_top + parameters_size; 1856 Address expressions_bottom = expressions_top + expressions_size; 1857 for (int i = 0; i < deferred_heap_numbers_.length(); i++) { 1858 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i]; 1859 1860 // Check of the heap number to materialize actually belong to the frame 1861 // being extracted. 1862 Address slot = d.destination(); 1863 if (parameters_top <= slot && slot < parameters_bottom) { 1864 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 1865 1866 int index = (info->parameters_count() - 1) - 1867 static_cast<int>(slot - parameters_top) / kPointerSize; 1868 1869 if (trace_scope_ != NULL) { 1870 PrintF(trace_scope_->file(), 1871 "Materializing a new heap number %p [%e] in slot %p" 1872 "for parameter slot #%d\n", 1873 reinterpret_cast<void*>(*num), 1874 d.value(), 1875 d.destination(), 1876 index); 1877 } 1878 1879 info->SetParameter(index, *num); 1880 } else if (expressions_top <= slot && slot < expressions_bottom) { 1881 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 1882 1883 int index = info->expression_count() - 1 - 1884 static_cast<int>(slot - expressions_top) / kPointerSize; 1885 1886 if (trace_scope_ != NULL) { 1887 PrintF(trace_scope_->file(), 1888 "Materializing a new heap number %p [%e] in slot %p" 1889 "for expression slot #%d\n", 1890 reinterpret_cast<void*>(*num), 1891 d.value(), 1892 d.destination(), 1893 index); 1894 } 1895 1896 info->SetExpression(index, *num); 1897 } 1898 } 1899 } 1900 #endif 1901 1902 1903 static const char* TraceValueType(bool is_smi) { 1904 if (is_smi) { 1905 return "smi"; 1906 } 1907 1908 return "heap number"; 1909 } 1910 1911 1912 void Deoptimizer::DoTranslateObject(TranslationIterator* iterator, 1913 int object_index, 1914 int field_index) { 1915 disasm::NameConverter converter; 1916 Address object_slot = deferred_objects_[object_index].slot_address(); 1917 1918 Translation::Opcode opcode = 1919 static_cast<Translation::Opcode>(iterator->Next()); 1920 1921 switch (opcode) { 1922 case Translation::BEGIN: 1923 case Translation::JS_FRAME: 1924 case Translation::ARGUMENTS_ADAPTOR_FRAME: 1925 case Translation::CONSTRUCT_STUB_FRAME: 1926 case Translation::GETTER_STUB_FRAME: 1927 case Translation::SETTER_STUB_FRAME: 1928 case Translation::COMPILED_STUB_FRAME: 1929 UNREACHABLE(); 1930 return; 1931 1932 case Translation::REGISTER: { 1933 int input_reg = iterator->Next(); 1934 intptr_t input_value = input_->GetRegister(input_reg); 1935 if (trace_scope_ != NULL) { 1936 PrintF(trace_scope_->file(), 1937 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 1938 reinterpret_cast<intptr_t>(object_slot), 1939 field_index); 1940 PrintF(trace_scope_->file(), 1941 "0x%08" V8PRIxPTR " ; %s ", input_value, 1942 converter.NameOfCPURegister(input_reg)); 1943 reinterpret_cast<Object*>(input_value)->ShortPrint( 1944 trace_scope_->file()); 1945 PrintF(trace_scope_->file(), 1946 "\n"); 1947 } 1948 AddObjectTaggedValue(input_value); 1949 return; 1950 } 1951 1952 case Translation::INT32_REGISTER: { 1953 int input_reg = iterator->Next(); 1954 intptr_t value = input_->GetRegister(input_reg); 1955 bool is_smi = Smi::IsValid(value); 1956 if (trace_scope_ != NULL) { 1957 PrintF(trace_scope_->file(), 1958 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 1959 reinterpret_cast<intptr_t>(object_slot), 1960 field_index); 1961 PrintF(trace_scope_->file(), 1962 "%" V8PRIdPTR " ; %s (%s)\n", value, 1963 converter.NameOfCPURegister(input_reg), 1964 TraceValueType(is_smi)); 1965 } 1966 if (is_smi) { 1967 intptr_t tagged_value = 1968 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 1969 AddObjectTaggedValue(tagged_value); 1970 } else { 1971 double double_value = static_cast<double>(static_cast<int32_t>(value)); 1972 AddObjectDoubleValue(double_value); 1973 } 1974 return; 1975 } 1976 1977 case Translation::UINT32_REGISTER: { 1978 int input_reg = iterator->Next(); 1979 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg)); 1980 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue)); 1981 if (trace_scope_ != NULL) { 1982 PrintF(trace_scope_->file(), 1983 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 1984 reinterpret_cast<intptr_t>(object_slot), 1985 field_index); 1986 PrintF(trace_scope_->file(), 1987 "%" V8PRIdPTR " ; uint %s (%s)\n", value, 1988 converter.NameOfCPURegister(input_reg), 1989 TraceValueType(is_smi)); 1990 } 1991 if (is_smi) { 1992 intptr_t tagged_value = 1993 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 1994 AddObjectTaggedValue(tagged_value); 1995 } else { 1996 double double_value = static_cast<double>(static_cast<uint32_t>(value)); 1997 AddObjectDoubleValue(double_value); 1998 } 1999 return; 2000 } 2001 2002 case Translation::DOUBLE_REGISTER: { 2003 int input_reg = iterator->Next(); 2004 double value = input_->GetDoubleRegister(input_reg); 2005 if (trace_scope_ != NULL) { 2006 PrintF(trace_scope_->file(), 2007 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2008 reinterpret_cast<intptr_t>(object_slot), 2009 field_index); 2010 PrintF(trace_scope_->file(), 2011 "%e ; %s\n", value, 2012 DoubleRegister::AllocationIndexToString(input_reg)); 2013 } 2014 AddObjectDoubleValue(value); 2015 return; 2016 } 2017 2018 case Translation::STACK_SLOT: { 2019 int input_slot_index = iterator->Next(); 2020 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2021 intptr_t input_value = input_->GetFrameSlot(input_offset); 2022 if (trace_scope_ != NULL) { 2023 PrintF(trace_scope_->file(), 2024 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2025 reinterpret_cast<intptr_t>(object_slot), 2026 field_index); 2027 PrintF(trace_scope_->file(), 2028 "0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset); 2029 reinterpret_cast<Object*>(input_value)->ShortPrint( 2030 trace_scope_->file()); 2031 PrintF(trace_scope_->file(), 2032 "\n"); 2033 } 2034 AddObjectTaggedValue(input_value); 2035 return; 2036 } 2037 2038 case Translation::INT32_STACK_SLOT: { 2039 int input_slot_index = iterator->Next(); 2040 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2041 intptr_t value = input_->GetFrameSlot(input_offset); 2042 bool is_smi = Smi::IsValid(value); 2043 if (trace_scope_ != NULL) { 2044 PrintF(trace_scope_->file(), 2045 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2046 reinterpret_cast<intptr_t>(object_slot), 2047 field_index); 2048 PrintF(trace_scope_->file(), 2049 "%" V8PRIdPTR " ; [sp + %d] (%s)\n", 2050 value, input_offset, TraceValueType(is_smi)); 2051 } 2052 if (is_smi) { 2053 intptr_t tagged_value = 2054 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2055 AddObjectTaggedValue(tagged_value); 2056 } else { 2057 double double_value = static_cast<double>(static_cast<int32_t>(value)); 2058 AddObjectDoubleValue(double_value); 2059 } 2060 return; 2061 } 2062 2063 case Translation::UINT32_STACK_SLOT: { 2064 int input_slot_index = iterator->Next(); 2065 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2066 uintptr_t value = 2067 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset)); 2068 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue)); 2069 if (trace_scope_ != NULL) { 2070 PrintF(trace_scope_->file(), 2071 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2072 reinterpret_cast<intptr_t>(object_slot), 2073 field_index); 2074 PrintF(trace_scope_->file(), 2075 "%" V8PRIdPTR " ; [sp + %d] (uint %s)\n", 2076 value, input_offset, TraceValueType(is_smi)); 2077 } 2078 if (is_smi) { 2079 intptr_t tagged_value = 2080 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2081 AddObjectTaggedValue(tagged_value); 2082 } else { 2083 double double_value = static_cast<double>(static_cast<uint32_t>(value)); 2084 AddObjectDoubleValue(double_value); 2085 } 2086 return; 2087 } 2088 2089 case Translation::DOUBLE_STACK_SLOT: { 2090 int input_slot_index = iterator->Next(); 2091 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2092 double value = input_->GetDoubleFrameSlot(input_offset); 2093 if (trace_scope_ != NULL) { 2094 PrintF(trace_scope_->file(), 2095 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2096 reinterpret_cast<intptr_t>(object_slot), 2097 field_index); 2098 PrintF(trace_scope_->file(), 2099 "%e ; [sp + %d]\n", value, input_offset); 2100 } 2101 AddObjectDoubleValue(value); 2102 return; 2103 } 2104 2105 case Translation::LITERAL: { 2106 Object* literal = ComputeLiteral(iterator->Next()); 2107 if (trace_scope_ != NULL) { 2108 PrintF(trace_scope_->file(), 2109 " object @0x%08" V8PRIxPTR ": [field #%d] <- ", 2110 reinterpret_cast<intptr_t>(object_slot), 2111 field_index); 2112 literal->ShortPrint(trace_scope_->file()); 2113 PrintF(trace_scope_->file(), 2114 " ; literal\n"); 2115 } 2116 intptr_t value = reinterpret_cast<intptr_t>(literal); 2117 AddObjectTaggedValue(value); 2118 return; 2119 } 2120 2121 case Translation::DUPLICATED_OBJECT: { 2122 int object_index = iterator->Next(); 2123 if (trace_scope_ != NULL) { 2124 PrintF(trace_scope_->file(), 2125 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ", 2126 reinterpret_cast<intptr_t>(object_slot), 2127 field_index); 2128 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file()); 2129 PrintF(trace_scope_->file(), 2130 " ; duplicate of object #%d\n", object_index); 2131 } 2132 // Use the materialization marker value as a sentinel and fill in 2133 // the object after the deoptimized frame is built. 2134 intptr_t value = reinterpret_cast<intptr_t>( 2135 isolate_->heap()->arguments_marker()); 2136 AddObjectDuplication(0, object_index); 2137 AddObjectTaggedValue(value); 2138 return; 2139 } 2140 2141 case Translation::ARGUMENTS_OBJECT: 2142 case Translation::CAPTURED_OBJECT: { 2143 int length = iterator->Next(); 2144 bool is_args = opcode == Translation::ARGUMENTS_OBJECT; 2145 if (trace_scope_ != NULL) { 2146 PrintF(trace_scope_->file(), 2147 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ", 2148 reinterpret_cast<intptr_t>(object_slot), 2149 field_index); 2150 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file()); 2151 PrintF(trace_scope_->file(), 2152 " ; object (length = %d, is_args = %d)\n", length, is_args); 2153 } 2154 // Use the materialization marker value as a sentinel and fill in 2155 // the object after the deoptimized frame is built. 2156 intptr_t value = reinterpret_cast<intptr_t>( 2157 isolate_->heap()->arguments_marker()); 2158 AddObjectStart(0, length, is_args); 2159 AddObjectTaggedValue(value); 2160 // We save the object values on the side and materialize the actual 2161 // object after the deoptimized frame is built. 2162 int object_index = deferred_objects_.length() - 1; 2163 for (int i = 0; i < length; i++) { 2164 DoTranslateObject(iterator, object_index, i); 2165 } 2166 return; 2167 } 2168 } 2169 } 2170 2171 2172 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator, 2173 int frame_index, 2174 unsigned output_offset) { 2175 disasm::NameConverter converter; 2176 // A GC-safe temporary placeholder that we can put in the output frame. 2177 const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0)); 2178 2179 Translation::Opcode opcode = 2180 static_cast<Translation::Opcode>(iterator->Next()); 2181 2182 switch (opcode) { 2183 case Translation::BEGIN: 2184 case Translation::JS_FRAME: 2185 case Translation::ARGUMENTS_ADAPTOR_FRAME: 2186 case Translation::CONSTRUCT_STUB_FRAME: 2187 case Translation::GETTER_STUB_FRAME: 2188 case Translation::SETTER_STUB_FRAME: 2189 case Translation::COMPILED_STUB_FRAME: 2190 UNREACHABLE(); 2191 return; 2192 2193 case Translation::REGISTER: { 2194 int input_reg = iterator->Next(); 2195 intptr_t input_value = input_->GetRegister(input_reg); 2196 if (trace_scope_ != NULL) { 2197 PrintF( 2198 trace_scope_->file(), 2199 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ", 2200 output_[frame_index]->GetTop() + output_offset, 2201 output_offset, 2202 input_value, 2203 converter.NameOfCPURegister(input_reg)); 2204 reinterpret_cast<Object*>(input_value)->ShortPrint( 2205 trace_scope_->file()); 2206 PrintF(trace_scope_->file(), "\n"); 2207 } 2208 output_[frame_index]->SetFrameSlot(output_offset, input_value); 2209 return; 2210 } 2211 2212 case Translation::INT32_REGISTER: { 2213 int input_reg = iterator->Next(); 2214 intptr_t value = input_->GetRegister(input_reg); 2215 bool is_smi = Smi::IsValid(value); 2216 if (trace_scope_ != NULL) { 2217 PrintF( 2218 trace_scope_->file(), 2219 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n", 2220 output_[frame_index]->GetTop() + output_offset, 2221 output_offset, 2222 value, 2223 converter.NameOfCPURegister(input_reg), 2224 TraceValueType(is_smi)); 2225 } 2226 if (is_smi) { 2227 intptr_t tagged_value = 2228 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2229 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 2230 } else { 2231 // We save the untagged value on the side and store a GC-safe 2232 // temporary placeholder in the frame. 2233 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 2234 static_cast<double>(static_cast<int32_t>(value))); 2235 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2236 } 2237 return; 2238 } 2239 2240 case Translation::UINT32_REGISTER: { 2241 int input_reg = iterator->Next(); 2242 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg)); 2243 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue); 2244 if (trace_scope_ != NULL) { 2245 PrintF( 2246 trace_scope_->file(), 2247 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR 2248 " ; uint %s (%s)\n", 2249 output_[frame_index]->GetTop() + output_offset, 2250 output_offset, 2251 value, 2252 converter.NameOfCPURegister(input_reg), 2253 TraceValueType(is_smi)); 2254 } 2255 if (is_smi) { 2256 intptr_t tagged_value = 2257 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2258 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 2259 } else { 2260 // We save the untagged value on the side and store a GC-safe 2261 // temporary placeholder in the frame. 2262 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 2263 static_cast<double>(static_cast<uint32_t>(value))); 2264 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2265 } 2266 return; 2267 } 2268 2269 case Translation::DOUBLE_REGISTER: { 2270 int input_reg = iterator->Next(); 2271 double value = input_->GetDoubleRegister(input_reg); 2272 if (trace_scope_ != NULL) { 2273 PrintF(trace_scope_->file(), 2274 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n", 2275 output_[frame_index]->GetTop() + output_offset, 2276 output_offset, 2277 value, 2278 DoubleRegister::AllocationIndexToString(input_reg)); 2279 } 2280 // We save the untagged value on the side and store a GC-safe 2281 // temporary placeholder in the frame. 2282 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value); 2283 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2284 return; 2285 } 2286 2287 case Translation::STACK_SLOT: { 2288 int input_slot_index = iterator->Next(); 2289 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2290 intptr_t input_value = input_->GetFrameSlot(input_offset); 2291 if (trace_scope_ != NULL) { 2292 PrintF(trace_scope_->file(), 2293 " 0x%08" V8PRIxPTR ": ", 2294 output_[frame_index]->GetTop() + output_offset); 2295 PrintF(trace_scope_->file(), 2296 "[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ", 2297 output_offset, 2298 input_value, 2299 input_offset); 2300 reinterpret_cast<Object*>(input_value)->ShortPrint( 2301 trace_scope_->file()); 2302 PrintF(trace_scope_->file(), "\n"); 2303 } 2304 output_[frame_index]->SetFrameSlot(output_offset, input_value); 2305 return; 2306 } 2307 2308 case Translation::INT32_STACK_SLOT: { 2309 int input_slot_index = iterator->Next(); 2310 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2311 intptr_t value = input_->GetFrameSlot(input_offset); 2312 bool is_smi = Smi::IsValid(value); 2313 if (trace_scope_ != NULL) { 2314 PrintF(trace_scope_->file(), 2315 " 0x%08" V8PRIxPTR ": ", 2316 output_[frame_index]->GetTop() + output_offset); 2317 PrintF(trace_scope_->file(), 2318 "[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n", 2319 output_offset, 2320 value, 2321 input_offset, 2322 TraceValueType(is_smi)); 2323 } 2324 if (is_smi) { 2325 intptr_t tagged_value = 2326 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2327 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 2328 } else { 2329 // We save the untagged value on the side and store a GC-safe 2330 // temporary placeholder in the frame. 2331 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 2332 static_cast<double>(static_cast<int32_t>(value))); 2333 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2334 } 2335 return; 2336 } 2337 2338 case Translation::UINT32_STACK_SLOT: { 2339 int input_slot_index = iterator->Next(); 2340 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2341 uintptr_t value = 2342 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset)); 2343 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue); 2344 if (trace_scope_ != NULL) { 2345 PrintF(trace_scope_->file(), 2346 " 0x%08" V8PRIxPTR ": ", 2347 output_[frame_index]->GetTop() + output_offset); 2348 PrintF(trace_scope_->file(), 2349 "[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n", 2350 output_offset, 2351 value, 2352 input_offset, 2353 TraceValueType(is_smi)); 2354 } 2355 if (is_smi) { 2356 intptr_t tagged_value = 2357 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 2358 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 2359 } else { 2360 // We save the untagged value on the side and store a GC-safe 2361 // temporary placeholder in the frame. 2362 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 2363 static_cast<double>(static_cast<uint32_t>(value))); 2364 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2365 } 2366 return; 2367 } 2368 2369 case Translation::DOUBLE_STACK_SLOT: { 2370 int input_slot_index = iterator->Next(); 2371 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index); 2372 double value = input_->GetDoubleFrameSlot(input_offset); 2373 if (trace_scope_ != NULL) { 2374 PrintF(trace_scope_->file(), 2375 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n", 2376 output_[frame_index]->GetTop() + output_offset, 2377 output_offset, 2378 value, 2379 input_offset); 2380 } 2381 // We save the untagged value on the side and store a GC-safe 2382 // temporary placeholder in the frame. 2383 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value); 2384 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 2385 return; 2386 } 2387 2388 case Translation::LITERAL: { 2389 Object* literal = ComputeLiteral(iterator->Next()); 2390 if (trace_scope_ != NULL) { 2391 PrintF(trace_scope_->file(), 2392 " 0x%08" V8PRIxPTR ": [top + %d] <- ", 2393 output_[frame_index]->GetTop() + output_offset, 2394 output_offset); 2395 literal->ShortPrint(trace_scope_->file()); 2396 PrintF(trace_scope_->file(), " ; literal\n"); 2397 } 2398 intptr_t value = reinterpret_cast<intptr_t>(literal); 2399 output_[frame_index]->SetFrameSlot(output_offset, value); 2400 return; 2401 } 2402 2403 case Translation::DUPLICATED_OBJECT: { 2404 int object_index = iterator->Next(); 2405 if (trace_scope_ != NULL) { 2406 PrintF(trace_scope_->file(), 2407 " 0x%08" V8PRIxPTR ": [top + %d] <- ", 2408 output_[frame_index]->GetTop() + output_offset, 2409 output_offset); 2410 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file()); 2411 PrintF(trace_scope_->file(), 2412 " ; duplicate of object #%d\n", object_index); 2413 } 2414 // Use the materialization marker value as a sentinel and fill in 2415 // the object after the deoptimized frame is built. 2416 intptr_t value = reinterpret_cast<intptr_t>( 2417 isolate_->heap()->arguments_marker()); 2418 AddObjectDuplication(output_[frame_index]->GetTop() + output_offset, 2419 object_index); 2420 output_[frame_index]->SetFrameSlot(output_offset, value); 2421 return; 2422 } 2423 2424 case Translation::ARGUMENTS_OBJECT: 2425 case Translation::CAPTURED_OBJECT: { 2426 int length = iterator->Next(); 2427 bool is_args = opcode == Translation::ARGUMENTS_OBJECT; 2428 if (trace_scope_ != NULL) { 2429 PrintF(trace_scope_->file(), 2430 " 0x%08" V8PRIxPTR ": [top + %d] <- ", 2431 output_[frame_index]->GetTop() + output_offset, 2432 output_offset); 2433 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file()); 2434 PrintF(trace_scope_->file(), 2435 " ; object (length = %d, is_args = %d)\n", length, is_args); 2436 } 2437 // Use the materialization marker value as a sentinel and fill in 2438 // the object after the deoptimized frame is built. 2439 intptr_t value = reinterpret_cast<intptr_t>( 2440 isolate_->heap()->arguments_marker()); 2441 AddObjectStart(output_[frame_index]->GetTop() + output_offset, 2442 length, is_args); 2443 output_[frame_index]->SetFrameSlot(output_offset, value); 2444 // We save the object values on the side and materialize the actual 2445 // object after the deoptimized frame is built. 2446 int object_index = deferred_objects_.length() - 1; 2447 for (int i = 0; i < length; i++) { 2448 DoTranslateObject(iterator, object_index, i); 2449 } 2450 return; 2451 } 2452 } 2453 } 2454 2455 2456 unsigned Deoptimizer::ComputeInputFrameSize() const { 2457 unsigned fixed_size = ComputeFixedSize(function_); 2458 // The fp-to-sp delta already takes the context and the function 2459 // into account so we have to avoid double counting them. 2460 unsigned result = fixed_size + fp_to_sp_delta_ - 2461 StandardFrameConstants::kFixedFrameSizeFromFp; 2462 #ifdef DEBUG 2463 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) { 2464 unsigned stack_slots = compiled_code_->stack_slots(); 2465 unsigned outgoing_size = ComputeOutgoingArgumentSize(); 2466 ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size); 2467 } 2468 #endif 2469 return result; 2470 } 2471 2472 2473 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const { 2474 // The fixed part of the frame consists of the return address, frame 2475 // pointer, function, context, and all the incoming arguments. 2476 return ComputeIncomingArgumentSize(function) + 2477 StandardFrameConstants::kFixedFrameSize; 2478 } 2479 2480 2481 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const { 2482 // The incoming arguments is the values for formal parameters and 2483 // the receiver. Every slot contains a pointer. 2484 if (function->IsSmi()) { 2485 ASSERT(Smi::cast(function) == Smi::FromInt(StackFrame::STUB)); 2486 return 0; 2487 } 2488 unsigned arguments = function->shared()->formal_parameter_count() + 1; 2489 return arguments * kPointerSize; 2490 } 2491 2492 2493 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const { 2494 DeoptimizationInputData* data = DeoptimizationInputData::cast( 2495 compiled_code_->deoptimization_data()); 2496 unsigned height = data->ArgumentsStackHeight(bailout_id_)->value(); 2497 return height * kPointerSize; 2498 } 2499 2500 2501 Object* Deoptimizer::ComputeLiteral(int index) const { 2502 DeoptimizationInputData* data = DeoptimizationInputData::cast( 2503 compiled_code_->deoptimization_data()); 2504 FixedArray* literals = data->LiteralArray(); 2505 return literals->get(index); 2506 } 2507 2508 2509 void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) { 2510 ObjectMaterializationDescriptor object_desc( 2511 reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args); 2512 deferred_objects_.Add(object_desc); 2513 } 2514 2515 2516 void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) { 2517 ObjectMaterializationDescriptor object_desc( 2518 reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false); 2519 deferred_objects_.Add(object_desc); 2520 } 2521 2522 2523 void Deoptimizer::AddObjectTaggedValue(intptr_t value) { 2524 deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value)); 2525 } 2526 2527 2528 void Deoptimizer::AddObjectDoubleValue(double value) { 2529 deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value()); 2530 HeapNumberMaterializationDescriptor<int> value_desc( 2531 deferred_objects_tagged_values_.length() - 1, value); 2532 deferred_objects_double_values_.Add(value_desc); 2533 } 2534 2535 2536 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) { 2537 HeapNumberMaterializationDescriptor<Address> value_desc( 2538 reinterpret_cast<Address>(slot_address), value); 2539 deferred_heap_numbers_.Add(value_desc); 2540 } 2541 2542 2543 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, 2544 BailoutType type, 2545 int max_entry_id) { 2546 // We cannot run this if the serializer is enabled because this will 2547 // cause us to emit relocation information for the external 2548 // references. This is fine because the deoptimizer's code section 2549 // isn't meant to be serialized at all. 2550 ASSERT(type == EAGER || type == SOFT || type == LAZY); 2551 DeoptimizerData* data = isolate->deoptimizer_data(); 2552 int entry_count = data->deopt_entry_code_entries_[type]; 2553 if (max_entry_id < entry_count) return; 2554 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); 2555 while (max_entry_id >= entry_count) entry_count *= 2; 2556 ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries); 2557 2558 MacroAssembler masm(isolate, NULL, 16 * KB); 2559 masm.set_emit_debug_code(false); 2560 GenerateDeoptimizationEntries(&masm, entry_count, type); 2561 CodeDesc desc; 2562 masm.GetCode(&desc); 2563 ASSERT(!RelocInfo::RequiresRelocation(desc)); 2564 2565 MemoryChunk* chunk = data->deopt_entry_code_[type]; 2566 ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= 2567 desc.instr_size); 2568 chunk->CommitArea(desc.instr_size); 2569 CopyBytes(chunk->area_start(), desc.buffer, 2570 static_cast<size_t>(desc.instr_size)); 2571 CPU::FlushICache(chunk->area_start(), desc.instr_size); 2572 2573 data->deopt_entry_code_entries_[type] = entry_count; 2574 } 2575 2576 2577 FrameDescription::FrameDescription(uint32_t frame_size, 2578 JSFunction* function) 2579 : frame_size_(frame_size), 2580 function_(function), 2581 top_(kZapUint32), 2582 pc_(kZapUint32), 2583 fp_(kZapUint32), 2584 context_(kZapUint32) { 2585 // Zap all the registers. 2586 for (int r = 0; r < Register::kNumRegisters; r++) { 2587 SetRegister(r, kZapUint32); 2588 } 2589 2590 // Zap all the slots. 2591 for (unsigned o = 0; o < frame_size; o += kPointerSize) { 2592 SetFrameSlot(o, kZapUint32); 2593 } 2594 } 2595 2596 2597 int FrameDescription::ComputeFixedSize() { 2598 return StandardFrameConstants::kFixedFrameSize + 2599 (ComputeParametersCount() + 1) * kPointerSize; 2600 } 2601 2602 2603 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) { 2604 if (slot_index >= 0) { 2605 // Local or spill slots. Skip the fixed part of the frame 2606 // including all arguments. 2607 unsigned base = GetFrameSize() - ComputeFixedSize(); 2608 return base - ((slot_index + 1) * kPointerSize); 2609 } else { 2610 // Incoming parameter. 2611 int arg_size = (ComputeParametersCount() + 1) * kPointerSize; 2612 unsigned base = GetFrameSize() - arg_size; 2613 return base - ((slot_index + 1) * kPointerSize); 2614 } 2615 } 2616 2617 2618 int FrameDescription::ComputeParametersCount() { 2619 switch (type_) { 2620 case StackFrame::JAVA_SCRIPT: 2621 return function_->shared()->formal_parameter_count(); 2622 case StackFrame::ARGUMENTS_ADAPTOR: { 2623 // Last slot contains number of incomming arguments as a smi. 2624 // Can't use GetExpression(0) because it would cause infinite recursion. 2625 return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value(); 2626 } 2627 case StackFrame::STUB: 2628 return -1; // Minus receiver. 2629 default: 2630 UNREACHABLE(); 2631 return 0; 2632 } 2633 } 2634 2635 2636 Object* FrameDescription::GetParameter(int index) { 2637 ASSERT(index >= 0); 2638 ASSERT(index < ComputeParametersCount()); 2639 // The slot indexes for incoming arguments are negative. 2640 unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount()); 2641 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); 2642 } 2643 2644 2645 unsigned FrameDescription::GetExpressionCount() { 2646 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); 2647 unsigned size = GetFrameSize() - ComputeFixedSize(); 2648 return size / kPointerSize; 2649 } 2650 2651 2652 Object* FrameDescription::GetExpression(int index) { 2653 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); 2654 unsigned offset = GetOffsetFromSlotIndex(index); 2655 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); 2656 } 2657 2658 2659 void TranslationBuffer::Add(int32_t value, Zone* zone) { 2660 // Encode the sign bit in the least significant bit. 2661 bool is_negative = (value < 0); 2662 uint32_t bits = ((is_negative ? -value : value) << 1) | 2663 static_cast<int32_t>(is_negative); 2664 // Encode the individual bytes using the least significant bit of 2665 // each byte to indicate whether or not more bytes follow. 2666 do { 2667 uint32_t next = bits >> 7; 2668 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone); 2669 bits = next; 2670 } while (bits != 0); 2671 } 2672 2673 2674 int32_t TranslationIterator::Next() { 2675 // Run through the bytes until we reach one with a least significant 2676 // bit of zero (marks the end). 2677 uint32_t bits = 0; 2678 for (int i = 0; true; i += 7) { 2679 ASSERT(HasNext()); 2680 uint8_t next = buffer_->get(index_++); 2681 bits |= (next >> 1) << i; 2682 if ((next & 1) == 0) break; 2683 } 2684 // The bits encode the sign in the least significant bit. 2685 bool is_negative = (bits & 1) == 1; 2686 int32_t result = bits >> 1; 2687 return is_negative ? -result : result; 2688 } 2689 2690 2691 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) { 2692 int length = contents_.length(); 2693 Handle<ByteArray> result = factory->NewByteArray(length, TENURED); 2694 OS::MemCopy( 2695 result->GetDataStartAddress(), contents_.ToVector().start(), length); 2696 return result; 2697 } 2698 2699 2700 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) { 2701 buffer_->Add(CONSTRUCT_STUB_FRAME, zone()); 2702 buffer_->Add(literal_id, zone()); 2703 buffer_->Add(height, zone()); 2704 } 2705 2706 2707 void Translation::BeginGetterStubFrame(int literal_id) { 2708 buffer_->Add(GETTER_STUB_FRAME, zone()); 2709 buffer_->Add(literal_id, zone()); 2710 } 2711 2712 2713 void Translation::BeginSetterStubFrame(int literal_id) { 2714 buffer_->Add(SETTER_STUB_FRAME, zone()); 2715 buffer_->Add(literal_id, zone()); 2716 } 2717 2718 2719 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) { 2720 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone()); 2721 buffer_->Add(literal_id, zone()); 2722 buffer_->Add(height, zone()); 2723 } 2724 2725 2726 void Translation::BeginJSFrame(BailoutId node_id, 2727 int literal_id, 2728 unsigned height) { 2729 buffer_->Add(JS_FRAME, zone()); 2730 buffer_->Add(node_id.ToInt(), zone()); 2731 buffer_->Add(literal_id, zone()); 2732 buffer_->Add(height, zone()); 2733 } 2734 2735 2736 void Translation::BeginCompiledStubFrame() { 2737 buffer_->Add(COMPILED_STUB_FRAME, zone()); 2738 } 2739 2740 2741 void Translation::BeginArgumentsObject(int args_length) { 2742 buffer_->Add(ARGUMENTS_OBJECT, zone()); 2743 buffer_->Add(args_length, zone()); 2744 } 2745 2746 2747 void Translation::BeginCapturedObject(int length) { 2748 buffer_->Add(CAPTURED_OBJECT, zone()); 2749 buffer_->Add(length, zone()); 2750 } 2751 2752 2753 void Translation::DuplicateObject(int object_index) { 2754 buffer_->Add(DUPLICATED_OBJECT, zone()); 2755 buffer_->Add(object_index, zone()); 2756 } 2757 2758 2759 void Translation::StoreRegister(Register reg) { 2760 buffer_->Add(REGISTER, zone()); 2761 buffer_->Add(reg.code(), zone()); 2762 } 2763 2764 2765 void Translation::StoreInt32Register(Register reg) { 2766 buffer_->Add(INT32_REGISTER, zone()); 2767 buffer_->Add(reg.code(), zone()); 2768 } 2769 2770 2771 void Translation::StoreUint32Register(Register reg) { 2772 buffer_->Add(UINT32_REGISTER, zone()); 2773 buffer_->Add(reg.code(), zone()); 2774 } 2775 2776 2777 void Translation::StoreDoubleRegister(DoubleRegister reg) { 2778 buffer_->Add(DOUBLE_REGISTER, zone()); 2779 buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone()); 2780 } 2781 2782 2783 void Translation::StoreStackSlot(int index) { 2784 buffer_->Add(STACK_SLOT, zone()); 2785 buffer_->Add(index, zone()); 2786 } 2787 2788 2789 void Translation::StoreInt32StackSlot(int index) { 2790 buffer_->Add(INT32_STACK_SLOT, zone()); 2791 buffer_->Add(index, zone()); 2792 } 2793 2794 2795 void Translation::StoreUint32StackSlot(int index) { 2796 buffer_->Add(UINT32_STACK_SLOT, zone()); 2797 buffer_->Add(index, zone()); 2798 } 2799 2800 2801 void Translation::StoreDoubleStackSlot(int index) { 2802 buffer_->Add(DOUBLE_STACK_SLOT, zone()); 2803 buffer_->Add(index, zone()); 2804 } 2805 2806 2807 void Translation::StoreLiteral(int literal_id) { 2808 buffer_->Add(LITERAL, zone()); 2809 buffer_->Add(literal_id, zone()); 2810 } 2811 2812 2813 void Translation::StoreArgumentsObject(bool args_known, 2814 int args_index, 2815 int args_length) { 2816 buffer_->Add(ARGUMENTS_OBJECT, zone()); 2817 buffer_->Add(args_known, zone()); 2818 buffer_->Add(args_index, zone()); 2819 buffer_->Add(args_length, zone()); 2820 } 2821 2822 2823 int Translation::NumberOfOperandsFor(Opcode opcode) { 2824 switch (opcode) { 2825 case GETTER_STUB_FRAME: 2826 case SETTER_STUB_FRAME: 2827 case DUPLICATED_OBJECT: 2828 case ARGUMENTS_OBJECT: 2829 case CAPTURED_OBJECT: 2830 case REGISTER: 2831 case INT32_REGISTER: 2832 case UINT32_REGISTER: 2833 case DOUBLE_REGISTER: 2834 case STACK_SLOT: 2835 case INT32_STACK_SLOT: 2836 case UINT32_STACK_SLOT: 2837 case DOUBLE_STACK_SLOT: 2838 case LITERAL: 2839 case COMPILED_STUB_FRAME: 2840 return 1; 2841 case BEGIN: 2842 case ARGUMENTS_ADAPTOR_FRAME: 2843 case CONSTRUCT_STUB_FRAME: 2844 return 2; 2845 case JS_FRAME: 2846 return 3; 2847 } 2848 UNREACHABLE(); 2849 return -1; 2850 } 2851 2852 2853 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 2854 2855 const char* Translation::StringFor(Opcode opcode) { 2856 #define TRANSLATION_OPCODE_CASE(item) case item: return #item; 2857 switch (opcode) { 2858 TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE) 2859 } 2860 #undef TRANSLATION_OPCODE_CASE 2861 UNREACHABLE(); 2862 return ""; 2863 } 2864 2865 #endif 2866 2867 2868 // We can't intermix stack decoding and allocations because 2869 // deoptimization infrastracture is not GC safe. 2870 // Thus we build a temporary structure in malloced space. 2871 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, 2872 DeoptimizationInputData* data, 2873 JavaScriptFrame* frame) { 2874 Translation::Opcode opcode = 2875 static_cast<Translation::Opcode>(iterator->Next()); 2876 2877 switch (opcode) { 2878 case Translation::BEGIN: 2879 case Translation::JS_FRAME: 2880 case Translation::ARGUMENTS_ADAPTOR_FRAME: 2881 case Translation::CONSTRUCT_STUB_FRAME: 2882 case Translation::GETTER_STUB_FRAME: 2883 case Translation::SETTER_STUB_FRAME: 2884 // Peeled off before getting here. 2885 break; 2886 2887 case Translation::DUPLICATED_OBJECT: 2888 case Translation::ARGUMENTS_OBJECT: 2889 case Translation::CAPTURED_OBJECT: 2890 // This can be only emitted for local slots not for argument slots. 2891 break; 2892 2893 case Translation::REGISTER: 2894 case Translation::INT32_REGISTER: 2895 case Translation::UINT32_REGISTER: 2896 case Translation::DOUBLE_REGISTER: 2897 // We are at safepoint which corresponds to call. All registers are 2898 // saved by caller so there would be no live registers at this 2899 // point. Thus these translation commands should not be used. 2900 break; 2901 2902 case Translation::STACK_SLOT: { 2903 int slot_index = iterator->Next(); 2904 Address slot_addr = SlotAddress(frame, slot_index); 2905 return SlotRef(slot_addr, SlotRef::TAGGED); 2906 } 2907 2908 case Translation::INT32_STACK_SLOT: { 2909 int slot_index = iterator->Next(); 2910 Address slot_addr = SlotAddress(frame, slot_index); 2911 return SlotRef(slot_addr, SlotRef::INT32); 2912 } 2913 2914 case Translation::UINT32_STACK_SLOT: { 2915 int slot_index = iterator->Next(); 2916 Address slot_addr = SlotAddress(frame, slot_index); 2917 return SlotRef(slot_addr, SlotRef::UINT32); 2918 } 2919 2920 case Translation::DOUBLE_STACK_SLOT: { 2921 int slot_index = iterator->Next(); 2922 Address slot_addr = SlotAddress(frame, slot_index); 2923 return SlotRef(slot_addr, SlotRef::DOUBLE); 2924 } 2925 2926 case Translation::LITERAL: { 2927 int literal_index = iterator->Next(); 2928 return SlotRef(data->GetIsolate(), 2929 data->LiteralArray()->get(literal_index)); 2930 } 2931 2932 case Translation::COMPILED_STUB_FRAME: 2933 UNREACHABLE(); 2934 break; 2935 } 2936 2937 UNREACHABLE(); 2938 return SlotRef(); 2939 } 2940 2941 2942 void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots, 2943 TranslationIterator* it, 2944 DeoptimizationInputData* data, 2945 JavaScriptFrame* frame) { 2946 // Process the translation commands for the arguments. 2947 2948 // Skip the translation command for the receiver. 2949 it->Skip(Translation::NumberOfOperandsFor( 2950 static_cast<Translation::Opcode>(it->Next()))); 2951 2952 // Compute slots for arguments. 2953 for (int i = 0; i < args_slots->length(); ++i) { 2954 (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame); 2955 } 2956 } 2957 2958 2959 Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments( 2960 JavaScriptFrame* frame, 2961 int inlined_jsframe_index, 2962 int formal_parameter_count) { 2963 DisallowHeapAllocation no_gc; 2964 int deopt_index = Safepoint::kNoDeoptimizationIndex; 2965 DeoptimizationInputData* data = 2966 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index); 2967 TranslationIterator it(data->TranslationByteArray(), 2968 data->TranslationIndex(deopt_index)->value()); 2969 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); 2970 ASSERT(opcode == Translation::BEGIN); 2971 it.Next(); // Drop frame count. 2972 int jsframe_count = it.Next(); 2973 USE(jsframe_count); 2974 ASSERT(jsframe_count > inlined_jsframe_index); 2975 int jsframes_to_skip = inlined_jsframe_index; 2976 while (true) { 2977 opcode = static_cast<Translation::Opcode>(it.Next()); 2978 if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) { 2979 if (jsframes_to_skip == 0) { 2980 ASSERT(Translation::NumberOfOperandsFor(opcode) == 2); 2981 2982 it.Skip(1); // literal id 2983 int height = it.Next(); 2984 2985 // We reached the arguments adaptor frame corresponding to the 2986 // inlined function in question. Number of arguments is height - 1. 2987 Vector<SlotRef> args_slots = 2988 Vector<SlotRef>::New(height - 1); // Minus receiver. 2989 ComputeSlotsForArguments(&args_slots, &it, data, frame); 2990 return args_slots; 2991 } 2992 } else if (opcode == Translation::JS_FRAME) { 2993 if (jsframes_to_skip == 0) { 2994 // Skip over operands to advance to the next opcode. 2995 it.Skip(Translation::NumberOfOperandsFor(opcode)); 2996 2997 // We reached the frame corresponding to the inlined function 2998 // in question. Process the translation commands for the 2999 // arguments. Number of arguments is equal to the number of 3000 // format parameter count. 3001 Vector<SlotRef> args_slots = 3002 Vector<SlotRef>::New(formal_parameter_count); 3003 ComputeSlotsForArguments(&args_slots, &it, data, frame); 3004 return args_slots; 3005 } 3006 jsframes_to_skip--; 3007 } 3008 3009 // Skip over operands to advance to the next opcode. 3010 it.Skip(Translation::NumberOfOperandsFor(opcode)); 3011 } 3012 3013 UNREACHABLE(); 3014 return Vector<SlotRef>(); 3015 } 3016 3017 #ifdef ENABLE_DEBUGGER_SUPPORT 3018 3019 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer, 3020 int frame_index, 3021 bool has_arguments_adaptor, 3022 bool has_construct_stub) { 3023 FrameDescription* output_frame = deoptimizer->output_[frame_index]; 3024 function_ = output_frame->GetFunction(); 3025 has_construct_stub_ = has_construct_stub; 3026 expression_count_ = output_frame->GetExpressionCount(); 3027 expression_stack_ = new Object*[expression_count_]; 3028 // Get the source position using the unoptimized code. 3029 Address pc = reinterpret_cast<Address>(output_frame->GetPc()); 3030 Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc)); 3031 source_position_ = code->SourcePosition(pc); 3032 3033 for (int i = 0; i < expression_count_; i++) { 3034 SetExpression(i, output_frame->GetExpression(i)); 3035 } 3036 3037 if (has_arguments_adaptor) { 3038 output_frame = deoptimizer->output_[frame_index - 1]; 3039 ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR); 3040 } 3041 3042 parameters_count_ = output_frame->ComputeParametersCount(); 3043 parameters_ = new Object*[parameters_count_]; 3044 for (int i = 0; i < parameters_count_; i++) { 3045 SetParameter(i, output_frame->GetParameter(i)); 3046 } 3047 } 3048 3049 3050 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() { 3051 delete[] expression_stack_; 3052 delete[] parameters_; 3053 } 3054 3055 3056 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { 3057 v->VisitPointer(BitCast<Object**>(&function_)); 3058 v->VisitPointers(parameters_, parameters_ + parameters_count_); 3059 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); 3060 } 3061 3062 #endif // ENABLE_DEBUGGER_SUPPORT 3063 3064 } } // namespace v8::internal 3065