1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/deoptimizer.h" 6 7 #include "src/accessors.h" 8 #include "src/ast/prettyprinter.h" 9 #include "src/codegen.h" 10 #include "src/disasm.h" 11 #include "src/frames-inl.h" 12 #include "src/full-codegen/full-codegen.h" 13 #include "src/global-handles.h" 14 #include "src/interpreter/interpreter.h" 15 #include "src/macro-assembler.h" 16 #include "src/tracing/trace-event.h" 17 #include "src/v8.h" 18 19 20 namespace v8 { 21 namespace internal { 22 23 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) { 24 return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(), 25 base::OS::CommitPageSize(), 26 #if defined(__native_client__) 27 // The Native Client port of V8 uses an interpreter, 28 // so code pages don't need PROT_EXEC. 29 NOT_EXECUTABLE, 30 #else 31 EXECUTABLE, 32 #endif 33 NULL); 34 } 35 36 37 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) 38 : allocator_(allocator), 39 current_(NULL) { 40 for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) { 41 deopt_entry_code_entries_[i] = -1; 42 deopt_entry_code_[i] = AllocateCodeChunk(allocator); 43 } 44 } 45 46 47 DeoptimizerData::~DeoptimizerData() { 48 for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) { 49 allocator_->Free<MemoryAllocator::kFull>(deopt_entry_code_[i]); 50 deopt_entry_code_[i] = NULL; 51 } 52 } 53 54 55 Code* Deoptimizer::FindDeoptimizingCode(Address addr) { 56 if (function_->IsHeapObject()) { 57 // Search all deoptimizing code in the native context of the function. 58 Isolate* isolate = function_->GetIsolate(); 59 Context* native_context = function_->context()->native_context(); 60 Object* element = native_context->DeoptimizedCodeListHead(); 61 while (!element->IsUndefined(isolate)) { 62 Code* code = Code::cast(element); 63 CHECK(code->kind() == Code::OPTIMIZED_FUNCTION); 64 if (code->contains(addr)) return code; 65 element = code->next_code_link(); 66 } 67 } 68 return NULL; 69 } 70 71 72 // We rely on this function not causing a GC. It is called from generated code 73 // without having a real stack frame in place. 74 Deoptimizer* Deoptimizer::New(JSFunction* function, 75 BailoutType type, 76 unsigned bailout_id, 77 Address from, 78 int fp_to_sp_delta, 79 Isolate* isolate) { 80 Deoptimizer* deoptimizer = new Deoptimizer(isolate, 81 function, 82 type, 83 bailout_id, 84 from, 85 fp_to_sp_delta, 86 NULL); 87 CHECK(isolate->deoptimizer_data()->current_ == NULL); 88 isolate->deoptimizer_data()->current_ = deoptimizer; 89 return deoptimizer; 90 } 91 92 93 // No larger than 2K on all platforms 94 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB; 95 96 97 size_t Deoptimizer::GetMaxDeoptTableSize() { 98 int entries_size = 99 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_; 100 int commit_page_size = static_cast<int>(base::OS::CommitPageSize()); 101 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) / 102 commit_page_size) + 1; 103 return static_cast<size_t>(commit_page_size * page_count); 104 } 105 106 107 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { 108 Deoptimizer* result = isolate->deoptimizer_data()->current_; 109 CHECK_NOT_NULL(result); 110 result->DeleteFrameDescriptions(); 111 isolate->deoptimizer_data()->current_ = NULL; 112 return result; 113 } 114 115 116 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) { 117 if (jsframe_index == 0) return 0; 118 119 int frame_index = 0; 120 while (jsframe_index >= 0) { 121 FrameDescription* frame = output_[frame_index]; 122 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) { 123 jsframe_index--; 124 } 125 frame_index++; 126 } 127 128 return frame_index - 1; 129 } 130 131 132 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( 133 JavaScriptFrame* frame, 134 int jsframe_index, 135 Isolate* isolate) { 136 CHECK(frame->is_optimized()); 137 138 TranslatedState translated_values(frame); 139 translated_values.Prepare(false, frame->fp()); 140 141 TranslatedState::iterator frame_it = translated_values.end(); 142 int counter = jsframe_index; 143 for (auto it = translated_values.begin(); it != translated_values.end(); 144 it++) { 145 if (it->kind() == TranslatedFrame::kFunction || 146 it->kind() == TranslatedFrame::kInterpretedFunction) { 147 if (counter == 0) { 148 frame_it = it; 149 break; 150 } 151 counter--; 152 } 153 } 154 CHECK(frame_it != translated_values.end()); 155 156 DeoptimizedFrameInfo* info = 157 new DeoptimizedFrameInfo(&translated_values, frame_it, isolate); 158 159 return info; 160 } 161 162 163 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, 164 Isolate* isolate) { 165 delete info; 166 } 167 168 169 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, 170 int count, 171 BailoutType type) { 172 TableEntryGenerator generator(masm, type, count); 173 generator.Generate(); 174 } 175 176 177 void Deoptimizer::VisitAllOptimizedFunctionsForContext( 178 Context* context, OptimizedFunctionVisitor* visitor) { 179 DisallowHeapAllocation no_allocation; 180 181 CHECK(context->IsNativeContext()); 182 183 visitor->EnterContext(context); 184 185 // Visit the list of optimized functions, removing elements that 186 // no longer refer to optimized code. 187 JSFunction* prev = NULL; 188 Object* element = context->OptimizedFunctionsListHead(); 189 Isolate* isolate = context->GetIsolate(); 190 while (!element->IsUndefined(isolate)) { 191 JSFunction* function = JSFunction::cast(element); 192 Object* next = function->next_function_link(); 193 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION || 194 (visitor->VisitFunction(function), 195 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) { 196 // The function no longer refers to optimized code, or the visitor 197 // changed the code to which it refers to no longer be optimized code. 198 // Remove the function from this list. 199 if (prev != NULL) { 200 prev->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER); 201 } else { 202 context->SetOptimizedFunctionsListHead(next); 203 } 204 // The visitor should not alter the link directly. 205 CHECK_EQ(function->next_function_link(), next); 206 // Set the next function link to undefined to indicate it is no longer 207 // in the optimized functions list. 208 function->set_next_function_link(context->GetHeap()->undefined_value(), 209 SKIP_WRITE_BARRIER); 210 } else { 211 // The visitor should not alter the link directly. 212 CHECK_EQ(function->next_function_link(), next); 213 // preserve this element. 214 prev = function; 215 } 216 element = next; 217 } 218 219 visitor->LeaveContext(context); 220 } 221 222 223 void Deoptimizer::VisitAllOptimizedFunctions( 224 Isolate* isolate, 225 OptimizedFunctionVisitor* visitor) { 226 DisallowHeapAllocation no_allocation; 227 228 // Run through the list of all native contexts. 229 Object* context = isolate->heap()->native_contexts_list(); 230 while (!context->IsUndefined(isolate)) { 231 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); 232 context = Context::cast(context)->next_context_link(); 233 } 234 } 235 236 237 // Unlink functions referring to code marked for deoptimization, then move 238 // marked code from the optimized code list to the deoptimized code list, 239 // and patch code for lazy deopt. 240 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { 241 DisallowHeapAllocation no_allocation; 242 243 // A "closure" that unlinks optimized code that is going to be 244 // deoptimized from the functions that refer to it. 245 class SelectedCodeUnlinker: public OptimizedFunctionVisitor { 246 public: 247 virtual void EnterContext(Context* context) { } // Don't care. 248 virtual void LeaveContext(Context* context) { } // Don't care. 249 virtual void VisitFunction(JSFunction* function) { 250 Code* code = function->code(); 251 if (!code->marked_for_deoptimization()) return; 252 253 // Unlink this function and evict from optimized code map. 254 SharedFunctionInfo* shared = function->shared(); 255 function->set_code(shared->code()); 256 257 if (FLAG_trace_deopt) { 258 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer()); 259 PrintF(scope.file(), "[deoptimizer unlinked: "); 260 function->PrintName(scope.file()); 261 PrintF(scope.file(), 262 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); 263 } 264 } 265 }; 266 267 // Unlink all functions that refer to marked code. 268 SelectedCodeUnlinker unlinker; 269 VisitAllOptimizedFunctionsForContext(context, &unlinker); 270 271 Isolate* isolate = context->GetHeap()->isolate(); 272 #ifdef DEBUG 273 Code* topmost_optimized_code = NULL; 274 bool safe_to_deopt_topmost_optimized_code = false; 275 // Make sure all activations of optimized code can deopt at their current PC. 276 // The topmost optimized code has special handling because it cannot be 277 // deoptimized due to weak object dependency. 278 for (StackFrameIterator it(isolate, isolate->thread_local_top()); 279 !it.done(); it.Advance()) { 280 StackFrame::Type type = it.frame()->type(); 281 if (type == StackFrame::OPTIMIZED) { 282 Code* code = it.frame()->LookupCode(); 283 JSFunction* function = 284 static_cast<OptimizedFrame*>(it.frame())->function(); 285 if (FLAG_trace_deopt) { 286 CodeTracer::Scope scope(isolate->GetCodeTracer()); 287 PrintF(scope.file(), "[deoptimizer found activation of function: "); 288 function->PrintName(scope.file()); 289 PrintF(scope.file(), 290 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); 291 } 292 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); 293 int deopt_index = safepoint.deoptimization_index(); 294 // Turbofan deopt is checked when we are patching addresses on stack. 295 bool turbofanned = code->is_turbofanned() && 296 function->shared()->asm_function() && 297 !FLAG_turbo_asm_deoptimization; 298 bool safe_to_deopt = 299 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned; 300 bool builtin = code->kind() == Code::BUILTIN; 301 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned || 302 builtin); 303 if (topmost_optimized_code == NULL) { 304 topmost_optimized_code = code; 305 safe_to_deopt_topmost_optimized_code = safe_to_deopt; 306 } 307 } 308 } 309 #endif 310 311 // Move marked code from the optimized code list to the deoptimized 312 // code list, collecting them into a ZoneList. 313 Zone zone(isolate->allocator()); 314 ZoneList<Code*> codes(10, &zone); 315 316 // Walk over all optimized code objects in this native context. 317 Code* prev = NULL; 318 Object* element = context->OptimizedCodeListHead(); 319 while (!element->IsUndefined(isolate)) { 320 Code* code = Code::cast(element); 321 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); 322 Object* next = code->next_code_link(); 323 324 if (code->marked_for_deoptimization()) { 325 // Put the code into the list for later patching. 326 codes.Add(code, &zone); 327 328 if (prev != NULL) { 329 // Skip this code in the optimized code list. 330 prev->set_next_code_link(next); 331 } else { 332 // There was no previous node, the next node is the new head. 333 context->SetOptimizedCodeListHead(next); 334 } 335 336 // Move the code to the _deoptimized_ code list. 337 code->set_next_code_link(context->DeoptimizedCodeListHead()); 338 context->SetDeoptimizedCodeListHead(code); 339 } else { 340 // Not marked; preserve this element. 341 prev = code; 342 } 343 element = next; 344 } 345 346 // We need a handle scope only because of the macro assembler, 347 // which is used in code patching in EnsureCodeForDeoptimizationEntry. 348 HandleScope scope(isolate); 349 350 // Now patch all the codes for deoptimization. 351 for (int i = 0; i < codes.length(); i++) { 352 #ifdef DEBUG 353 if (codes[i] == topmost_optimized_code) { 354 DCHECK(safe_to_deopt_topmost_optimized_code); 355 } 356 #endif 357 // It is finally time to die, code object. 358 359 // Remove the code from optimized code map. 360 DeoptimizationInputData* deopt_data = 361 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); 362 SharedFunctionInfo* shared = 363 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); 364 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); 365 366 // Do platform-specific patching to force any activations to lazy deopt. 367 PatchCodeForDeoptimization(isolate, codes[i]); 368 369 // We might be in the middle of incremental marking with compaction. 370 // Tell collector to treat this code object in a special way and 371 // ignore all slots that might have been recorded on it. 372 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); 373 } 374 } 375 376 377 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { 378 RuntimeCallTimerScope runtimeTimer(isolate, 379 &RuntimeCallStats::DeoptimizeCode); 380 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate); 381 TRACE_EVENT0("v8", "V8.DeoptimizeCode"); 382 if (FLAG_trace_deopt) { 383 CodeTracer::Scope scope(isolate->GetCodeTracer()); 384 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); 385 } 386 DisallowHeapAllocation no_allocation; 387 // For all contexts, mark all code, then deoptimize. 388 Object* context = isolate->heap()->native_contexts_list(); 389 while (!context->IsUndefined(isolate)) { 390 Context* native_context = Context::cast(context); 391 MarkAllCodeForContext(native_context); 392 DeoptimizeMarkedCodeForContext(native_context); 393 context = native_context->next_context_link(); 394 } 395 } 396 397 398 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) { 399 RuntimeCallTimerScope runtimeTimer(isolate, 400 &RuntimeCallStats::DeoptimizeCode); 401 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate); 402 TRACE_EVENT0("v8", "V8.DeoptimizeCode"); 403 if (FLAG_trace_deopt) { 404 CodeTracer::Scope scope(isolate->GetCodeTracer()); 405 PrintF(scope.file(), "[deoptimize marked code in all contexts]\n"); 406 } 407 DisallowHeapAllocation no_allocation; 408 // For all contexts, deoptimize code already marked. 409 Object* context = isolate->heap()->native_contexts_list(); 410 while (!context->IsUndefined(isolate)) { 411 Context* native_context = Context::cast(context); 412 DeoptimizeMarkedCodeForContext(native_context); 413 context = native_context->next_context_link(); 414 } 415 } 416 417 418 void Deoptimizer::MarkAllCodeForContext(Context* context) { 419 Object* element = context->OptimizedCodeListHead(); 420 Isolate* isolate = context->GetIsolate(); 421 while (!element->IsUndefined(isolate)) { 422 Code* code = Code::cast(element); 423 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); 424 code->set_marked_for_deoptimization(true); 425 element = code->next_code_link(); 426 } 427 } 428 429 430 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { 431 Isolate* isolate = function->GetIsolate(); 432 RuntimeCallTimerScope runtimeTimer(isolate, 433 &RuntimeCallStats::DeoptimizeCode); 434 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate); 435 TRACE_EVENT0("v8", "V8.DeoptimizeCode"); 436 Code* code = function->code(); 437 if (code->kind() == Code::OPTIMIZED_FUNCTION) { 438 // Mark the code for deoptimization and unlink any functions that also 439 // refer to that code. The code cannot be shared across native contexts, 440 // so we only need to search one. 441 code->set_marked_for_deoptimization(true); 442 DeoptimizeMarkedCodeForContext(function->context()->native_context()); 443 } 444 } 445 446 447 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { 448 deoptimizer->DoComputeOutputFrames(); 449 } 450 451 452 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, 453 StackFrame::Type frame_type) { 454 switch (deopt_type) { 455 case EAGER: 456 case SOFT: 457 case LAZY: 458 return (frame_type == StackFrame::STUB) 459 ? FLAG_trace_stub_failures 460 : FLAG_trace_deopt; 461 } 462 FATAL("Unsupported deopt type"); 463 return false; 464 } 465 466 467 const char* Deoptimizer::MessageFor(BailoutType type) { 468 switch (type) { 469 case EAGER: return "eager"; 470 case SOFT: return "soft"; 471 case LAZY: return "lazy"; 472 } 473 FATAL("Unsupported deopt type"); 474 return NULL; 475 } 476 477 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function, 478 BailoutType type, unsigned bailout_id, Address from, 479 int fp_to_sp_delta, Code* optimized_code) 480 : isolate_(isolate), 481 function_(function), 482 bailout_id_(bailout_id), 483 bailout_type_(type), 484 from_(from), 485 fp_to_sp_delta_(fp_to_sp_delta), 486 deoptimizing_throw_(false), 487 catch_handler_data_(-1), 488 catch_handler_pc_offset_(-1), 489 input_(nullptr), 490 output_count_(0), 491 jsframe_count_(0), 492 output_(nullptr), 493 caller_frame_top_(0), 494 caller_fp_(0), 495 caller_pc_(0), 496 caller_constant_pool_(0), 497 input_frame_context_(0), 498 stack_fp_(0), 499 trace_scope_(nullptr) { 500 if (isolate->deoptimizer_lazy_throw()) { 501 isolate->set_deoptimizer_lazy_throw(false); 502 deoptimizing_throw_ = true; 503 } 504 505 // For COMPILED_STUBs called from builtins, the function pointer is a SMI 506 // indicating an internal frame. 507 if (function->IsSmi()) { 508 function = nullptr; 509 } 510 DCHECK(from != nullptr); 511 if (function != nullptr && function->IsOptimized()) { 512 function->shared()->increment_deopt_count(); 513 if (bailout_type_ == Deoptimizer::SOFT) { 514 isolate->counters()->soft_deopts_executed()->Increment(); 515 // Soft deopts shouldn't count against the overall re-optimization count 516 // that can eventually lead to disabling optimization for a function. 517 int opt_count = function->shared()->opt_count(); 518 if (opt_count > 0) opt_count--; 519 function->shared()->set_opt_count(opt_count); 520 } 521 } 522 compiled_code_ = FindOptimizedCode(function, optimized_code); 523 #if DEBUG 524 DCHECK(compiled_code_ != NULL); 525 if (type == EAGER || type == SOFT || type == LAZY) { 526 DCHECK(compiled_code_->kind() != Code::FUNCTION); 527 } 528 #endif 529 530 StackFrame::Type frame_type = function == NULL 531 ? StackFrame::STUB 532 : StackFrame::JAVA_SCRIPT; 533 trace_scope_ = TraceEnabledFor(type, frame_type) ? 534 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL; 535 #ifdef DEBUG 536 CHECK(AllowHeapAllocation::IsAllowed()); 537 disallow_heap_allocation_ = new DisallowHeapAllocation(); 538 #endif // DEBUG 539 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) { 540 PROFILE(isolate_, CodeDeoptEvent(compiled_code_, from_, fp_to_sp_delta_)); 541 } 542 unsigned size = ComputeInputFrameSize(); 543 int parameter_count = 544 function == nullptr 545 ? 0 546 : (function->shared()->internal_formal_parameter_count() + 1); 547 input_ = new (size) FrameDescription(size, parameter_count); 548 input_->SetFrameType(frame_type); 549 } 550 551 552 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, 553 Code* optimized_code) { 554 switch (bailout_type_) { 555 case Deoptimizer::SOFT: 556 case Deoptimizer::EAGER: 557 case Deoptimizer::LAZY: { 558 Code* compiled_code = FindDeoptimizingCode(from_); 559 return (compiled_code == NULL) 560 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) 561 : compiled_code; 562 } 563 } 564 FATAL("Could not find code for optimized function"); 565 return NULL; 566 } 567 568 569 void Deoptimizer::PrintFunctionName() { 570 if (function_ != nullptr && function_->IsJSFunction()) { 571 function_->ShortPrint(trace_scope_->file()); 572 } else { 573 PrintF(trace_scope_->file(), 574 "%s", Code::Kind2String(compiled_code_->kind())); 575 } 576 } 577 578 579 Deoptimizer::~Deoptimizer() { 580 DCHECK(input_ == NULL && output_ == NULL); 581 DCHECK(disallow_heap_allocation_ == NULL); 582 delete trace_scope_; 583 } 584 585 586 void Deoptimizer::DeleteFrameDescriptions() { 587 delete input_; 588 for (int i = 0; i < output_count_; ++i) { 589 if (output_[i] != input_) delete output_[i]; 590 } 591 delete[] output_; 592 input_ = NULL; 593 output_ = NULL; 594 #ifdef DEBUG 595 CHECK(!AllowHeapAllocation::IsAllowed()); 596 CHECK(disallow_heap_allocation_ != NULL); 597 delete disallow_heap_allocation_; 598 disallow_heap_allocation_ = NULL; 599 #endif // DEBUG 600 } 601 602 603 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, 604 int id, 605 BailoutType type, 606 GetEntryMode mode) { 607 CHECK_GE(id, 0); 608 if (id >= kMaxNumberOfEntries) return NULL; 609 if (mode == ENSURE_ENTRY_CODE) { 610 EnsureCodeForDeoptimizationEntry(isolate, type, id); 611 } else { 612 CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS); 613 } 614 DeoptimizerData* data = isolate->deoptimizer_data(); 615 CHECK_LE(type, kLastBailoutType); 616 MemoryChunk* base = data->deopt_entry_code_[type]; 617 return base->area_start() + (id * table_entry_size_); 618 } 619 620 621 int Deoptimizer::GetDeoptimizationId(Isolate* isolate, 622 Address addr, 623 BailoutType type) { 624 DeoptimizerData* data = isolate->deoptimizer_data(); 625 MemoryChunk* base = data->deopt_entry_code_[type]; 626 Address start = base->area_start(); 627 if (addr < start || 628 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) { 629 return kNotDeoptimizationEntry; 630 } 631 DCHECK_EQ(0, 632 static_cast<int>(addr - start) % table_entry_size_); 633 return static_cast<int>(addr - start) / table_entry_size_; 634 } 635 636 637 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, 638 BailoutId id, 639 SharedFunctionInfo* shared) { 640 // TODO(kasperl): For now, we do a simple linear search for the PC 641 // offset associated with the given node id. This should probably be 642 // changed to a binary search. 643 int length = data->DeoptPoints(); 644 for (int i = 0; i < length; i++) { 645 if (data->AstId(i) == id) { 646 return data->PcAndState(i)->value(); 647 } 648 } 649 OFStream os(stderr); 650 os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n" 651 << "[method: " << shared->DebugName()->ToCString().get() << "]\n" 652 << "[source:\n" << SourceCodeOf(shared) << "\n]" << std::endl; 653 654 shared->GetHeap()->isolate()->PushStackTraceAndDie(0xfefefefe, data, shared, 655 0xfefefeff); 656 FATAL("unable to find pc offset during deoptimization"); 657 return -1; 658 } 659 660 661 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { 662 int length = 0; 663 // Count all entries in the deoptimizing code list of every context. 664 Object* context = isolate->heap()->native_contexts_list(); 665 while (!context->IsUndefined(isolate)) { 666 Context* native_context = Context::cast(context); 667 Object* element = native_context->DeoptimizedCodeListHead(); 668 while (!element->IsUndefined(isolate)) { 669 Code* code = Code::cast(element); 670 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); 671 length++; 672 element = code->next_code_link(); 673 } 674 context = Context::cast(context)->next_context_link(); 675 } 676 return length; 677 } 678 679 namespace { 680 681 int LookupCatchHandler(TranslatedFrame* translated_frame, int* data_out) { 682 switch (translated_frame->kind()) { 683 case TranslatedFrame::kFunction: { 684 BailoutId node_id = translated_frame->node_id(); 685 JSFunction* function = 686 JSFunction::cast(translated_frame->begin()->GetRawValue()); 687 Code* non_optimized_code = function->shared()->code(); 688 FixedArray* raw_data = non_optimized_code->deoptimization_data(); 689 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data); 690 unsigned pc_and_state = 691 Deoptimizer::GetOutputInfo(data, node_id, function->shared()); 692 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state); 693 HandlerTable* table = 694 HandlerTable::cast(non_optimized_code->handler_table()); 695 HandlerTable::CatchPrediction prediction; 696 return table->LookupRange(pc_offset, data_out, &prediction); 697 } 698 case TranslatedFrame::kInterpretedFunction: { 699 int bytecode_offset = translated_frame->node_id().ToInt(); 700 JSFunction* function = 701 JSFunction::cast(translated_frame->begin()->GetRawValue()); 702 BytecodeArray* bytecode = function->shared()->bytecode_array(); 703 HandlerTable* table = HandlerTable::cast(bytecode->handler_table()); 704 HandlerTable::CatchPrediction prediction; 705 return table->LookupRange(bytecode_offset, data_out, &prediction); 706 } 707 default: 708 break; 709 } 710 return -1; 711 } 712 713 } // namespace 714 715 // We rely on this function not causing a GC. It is called from generated code 716 // without having a real stack frame in place. 717 void Deoptimizer::DoComputeOutputFrames() { 718 base::ElapsedTimer timer; 719 720 // Determine basic deoptimization information. The optimized frame is 721 // described by the input data. 722 DeoptimizationInputData* input_data = 723 DeoptimizationInputData::cast(compiled_code_->deoptimization_data()); 724 725 { 726 // Read caller's PC, caller's FP and caller's constant pool values 727 // from input frame. Compute caller's frame top address. 728 729 Register fp_reg = JavaScriptFrame::fp_register(); 730 stack_fp_ = input_->GetRegister(fp_reg.code()); 731 732 caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize(); 733 734 Address fp_address = input_->GetFramePointerAddress(); 735 caller_fp_ = Memory::intptr_at(fp_address); 736 caller_pc_ = 737 Memory::intptr_at(fp_address + CommonFrameConstants::kCallerPCOffset); 738 input_frame_context_ = Memory::intptr_at( 739 fp_address + CommonFrameConstants::kContextOrFrameTypeOffset); 740 741 if (FLAG_enable_embedded_constant_pool) { 742 caller_constant_pool_ = Memory::intptr_at( 743 fp_address + CommonFrameConstants::kConstantPoolOffset); 744 } 745 } 746 747 if (trace_scope_ != NULL) { 748 timer.Start(); 749 PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ", 750 MessageFor(bailout_type_)); 751 PrintFunctionName(); 752 PrintF(trace_scope_->file(), 753 " (opt #%d) @%d, FP to SP delta: %d, caller sp: 0x%08" V8PRIxPTR 754 "]\n", 755 input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_, 756 caller_frame_top_); 757 if (bailout_type_ == EAGER || bailout_type_ == SOFT || 758 (compiled_code_->is_hydrogen_stub())) { 759 compiled_code_->PrintDeoptLocation(trace_scope_->file(), from_); 760 } 761 } 762 763 BailoutId node_id = input_data->AstId(bailout_id_); 764 ByteArray* translations = input_data->TranslationByteArray(); 765 unsigned translation_index = 766 input_data->TranslationIndex(bailout_id_)->value(); 767 768 TranslationIterator state_iterator(translations, translation_index); 769 translated_state_.Init( 770 input_->GetFramePointerAddress(), &state_iterator, 771 input_data->LiteralArray(), input_->GetRegisterValues(), 772 trace_scope_ == nullptr ? nullptr : trace_scope_->file()); 773 774 // Do the input frame to output frame(s) translation. 775 size_t count = translated_state_.frames().size(); 776 // If we are supposed to go to the catch handler, find the catching frame 777 // for the catch and make sure we only deoptimize upto that frame. 778 if (deoptimizing_throw_) { 779 size_t catch_handler_frame_index = count; 780 for (size_t i = count; i-- > 0;) { 781 catch_handler_pc_offset_ = LookupCatchHandler( 782 &(translated_state_.frames()[i]), &catch_handler_data_); 783 if (catch_handler_pc_offset_ >= 0) { 784 catch_handler_frame_index = i; 785 break; 786 } 787 } 788 CHECK_LT(catch_handler_frame_index, count); 789 count = catch_handler_frame_index + 1; 790 } 791 792 DCHECK(output_ == NULL); 793 output_ = new FrameDescription*[count]; 794 for (size_t i = 0; i < count; ++i) { 795 output_[i] = NULL; 796 } 797 output_count_ = static_cast<int>(count); 798 799 // Translate each output frame. 800 int frame_index = 0; // output_frame_index 801 for (size_t i = 0; i < count; ++i, ++frame_index) { 802 // Read the ast node id, function, and frame height for this output frame. 803 TranslatedFrame* translated_frame = &(translated_state_.frames()[i]); 804 switch (translated_frame->kind()) { 805 case TranslatedFrame::kFunction: 806 DoComputeJSFrame(translated_frame, frame_index, 807 deoptimizing_throw_ && i == count - 1); 808 jsframe_count_++; 809 break; 810 case TranslatedFrame::kInterpretedFunction: 811 DoComputeInterpretedFrame(translated_frame, frame_index, 812 deoptimizing_throw_ && i == count - 1); 813 jsframe_count_++; 814 break; 815 case TranslatedFrame::kArgumentsAdaptor: 816 DoComputeArgumentsAdaptorFrame(translated_frame, frame_index); 817 break; 818 case TranslatedFrame::kTailCallerFunction: 819 DoComputeTailCallerFrame(translated_frame, frame_index); 820 // Tail caller frame translations do not produce output frames. 821 frame_index--; 822 output_count_--; 823 break; 824 case TranslatedFrame::kConstructStub: 825 DoComputeConstructStubFrame(translated_frame, frame_index); 826 break; 827 case TranslatedFrame::kGetter: 828 DoComputeAccessorStubFrame(translated_frame, frame_index, false); 829 break; 830 case TranslatedFrame::kSetter: 831 DoComputeAccessorStubFrame(translated_frame, frame_index, true); 832 break; 833 case TranslatedFrame::kCompiledStub: 834 DoComputeCompiledStubFrame(translated_frame, frame_index); 835 break; 836 case TranslatedFrame::kInvalid: 837 FATAL("invalid frame"); 838 break; 839 } 840 } 841 842 // Print some helpful diagnostic information. 843 if (trace_scope_ != NULL) { 844 double ms = timer.Elapsed().InMillisecondsF(); 845 int index = output_count_ - 1; // Index of the topmost frame. 846 PrintF(trace_scope_->file(), "[deoptimizing (%s): end ", 847 MessageFor(bailout_type_)); 848 PrintFunctionName(); 849 PrintF(trace_scope_->file(), 850 " @%d => node=%d, pc=0x%08" V8PRIxPTR ", caller sp=0x%08" V8PRIxPTR 851 ", state=%s, took %0.3f ms]\n", 852 bailout_id_, node_id.ToInt(), output_[index]->GetPc(), 853 caller_frame_top_, BailoutStateToString(static_cast<BailoutState>( 854 output_[index]->GetState()->value())), 855 ms); 856 } 857 } 858 859 void Deoptimizer::DoComputeJSFrame(TranslatedFrame* translated_frame, 860 int frame_index, bool goto_catch_handler) { 861 SharedFunctionInfo* shared = translated_frame->raw_shared_info(); 862 863 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 864 bool is_bottommost = (0 == frame_index); 865 bool is_topmost = (output_count_ - 1 == frame_index); 866 int input_index = 0; 867 868 BailoutId node_id = translated_frame->node_id(); 869 unsigned height = 870 translated_frame->height() - 1; // Do not count the context. 871 unsigned height_in_bytes = height * kPointerSize; 872 if (goto_catch_handler) { 873 // Take the stack height from the handler table. 874 height = catch_handler_data_; 875 // We also make space for the exception itself. 876 height_in_bytes = (height + 1) * kPointerSize; 877 CHECK(is_topmost); 878 } 879 880 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue()); 881 value_iterator++; 882 input_index++; 883 if (trace_scope_ != NULL) { 884 PrintF(trace_scope_->file(), " translating frame "); 885 base::SmartArrayPointer<char> name = shared->DebugName()->ToCString(); 886 PrintF(trace_scope_->file(), "%s", name.get()); 887 PrintF(trace_scope_->file(), " => node=%d, height=%d%s\n", node_id.ToInt(), 888 height_in_bytes, goto_catch_handler ? " (throw)" : ""); 889 } 890 891 // The 'fixed' part of the frame consists of the incoming parameters and 892 // the part described by JavaScriptFrameConstants. 893 unsigned fixed_frame_size = ComputeJavascriptFixedSize(shared); 894 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 895 896 // Allocate and store the output frame description. 897 int parameter_count = shared->internal_formal_parameter_count() + 1; 898 FrameDescription* output_frame = new (output_frame_size) 899 FrameDescription(output_frame_size, parameter_count); 900 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT); 901 902 CHECK(frame_index >= 0 && frame_index < output_count_); 903 CHECK_NULL(output_[frame_index]); 904 output_[frame_index] = output_frame; 905 906 // The top address of the frame is computed from the previous frame's top and 907 // this frame's size. 908 intptr_t top_address; 909 if (is_bottommost) { 910 top_address = caller_frame_top_ - output_frame_size; 911 } else { 912 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 913 } 914 output_frame->SetTop(top_address); 915 916 // Compute the incoming parameter translation. 917 unsigned output_offset = output_frame_size; 918 for (int i = 0; i < parameter_count; ++i) { 919 output_offset -= kPointerSize; 920 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 921 output_offset); 922 } 923 924 // There are no translation commands for the caller's pc and fp, the 925 // context, and the function. Synthesize their values and set them up 926 // explicitly. 927 // 928 // The caller's pc for the bottommost output frame is the same as in the 929 // input frame. For all subsequent output frames, it can be read from the 930 // previous one. This frame's pc can be computed from the non-optimized 931 // function code and AST id of the bailout. 932 output_offset -= kPCOnStackSize; 933 intptr_t value; 934 if (is_bottommost) { 935 value = caller_pc_; 936 } else { 937 value = output_[frame_index - 1]->GetPc(); 938 } 939 output_frame->SetCallerPc(output_offset, value); 940 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n"); 941 942 // The caller's frame pointer for the bottommost output frame is the same 943 // as in the input frame. For all subsequent output frames, it can be 944 // read from the previous one. Also compute and set this frame's frame 945 // pointer. 946 output_offset -= kFPOnStackSize; 947 if (is_bottommost) { 948 value = caller_fp_; 949 } else { 950 value = output_[frame_index - 1]->GetFp(); 951 } 952 output_frame->SetCallerFp(output_offset, value); 953 intptr_t fp_value = top_address + output_offset; 954 output_frame->SetFp(fp_value); 955 if (is_topmost) { 956 Register fp_reg = JavaScriptFrame::fp_register(); 957 output_frame->SetRegister(fp_reg.code(), fp_value); 958 } 959 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); 960 961 if (FLAG_enable_embedded_constant_pool) { 962 // For the bottommost output frame the constant pool pointer can be gotten 963 // from the input frame. For subsequent output frames, it can be read from 964 // the previous frame. 965 output_offset -= kPointerSize; 966 if (is_bottommost) { 967 value = caller_constant_pool_; 968 } else { 969 value = output_[frame_index - 1]->GetConstantPool(); 970 } 971 output_frame->SetCallerConstantPool(output_offset, value); 972 DebugPrintOutputSlot(value, frame_index, output_offset, 973 "caller's constant_pool\n"); 974 } 975 976 // For the bottommost output frame the context can be gotten from the input 977 // frame. For all subsequent output frames it can be gotten from the function 978 // so long as we don't inline functions that need local contexts. 979 output_offset -= kPointerSize; 980 981 TranslatedFrame::iterator context_pos = value_iterator; 982 int context_input_index = input_index; 983 // When deoptimizing into a catch block, we need to take the context 984 // from just above the top of the operand stack (we push the context 985 // at the entry of the try block). 986 if (goto_catch_handler) { 987 for (unsigned i = 0; i < height + 1; ++i) { 988 context_pos++; 989 context_input_index++; 990 } 991 } 992 // Read the context from the translations. 993 Object* context = context_pos->GetRawValue(); 994 if (context->IsUndefined(isolate_)) { 995 // If the context was optimized away, just use the context from 996 // the activation. This should only apply to Crankshaft code. 997 CHECK(!compiled_code_->is_turbofanned()); 998 context = is_bottommost ? reinterpret_cast<Object*>(input_frame_context_) 999 : function->context(); 1000 } 1001 value = reinterpret_cast<intptr_t>(context); 1002 output_frame->SetContext(value); 1003 if (is_topmost) { 1004 Register context_reg = JavaScriptFrame::context_register(); 1005 output_frame->SetRegister(context_reg.code(), value); 1006 } 1007 WriteValueToOutput(context, context_input_index, frame_index, output_offset, 1008 "context "); 1009 if (context == isolate_->heap()->arguments_marker()) { 1010 Address output_address = 1011 reinterpret_cast<Address>(output_[frame_index]->GetTop()) + 1012 output_offset; 1013 values_to_materialize_.push_back({output_address, context_pos}); 1014 } 1015 value_iterator++; 1016 input_index++; 1017 1018 // The function was mentioned explicitly in the BEGIN_FRAME. 1019 output_offset -= kPointerSize; 1020 value = reinterpret_cast<intptr_t>(function); 1021 WriteValueToOutput(function, 0, frame_index, output_offset, "function "); 1022 1023 // Translate the rest of the frame. 1024 for (unsigned i = 0; i < height; ++i) { 1025 output_offset -= kPointerSize; 1026 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1027 output_offset); 1028 } 1029 if (goto_catch_handler) { 1030 // Write out the exception for the catch handler. 1031 output_offset -= kPointerSize; 1032 Object* exception_obj = reinterpret_cast<Object*>( 1033 input_->GetRegister(FullCodeGenerator::result_register().code())); 1034 WriteValueToOutput(exception_obj, input_index, frame_index, output_offset, 1035 "exception "); 1036 input_index++; 1037 } 1038 CHECK_EQ(0u, output_offset); 1039 1040 // Update constant pool. 1041 Code* non_optimized_code = shared->code(); 1042 if (FLAG_enable_embedded_constant_pool) { 1043 intptr_t constant_pool_value = 1044 reinterpret_cast<intptr_t>(non_optimized_code->constant_pool()); 1045 output_frame->SetConstantPool(constant_pool_value); 1046 if (is_topmost) { 1047 Register constant_pool_reg = 1048 JavaScriptFrame::constant_pool_pointer_register(); 1049 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); 1050 } 1051 } 1052 1053 // Compute this frame's PC, state, and continuation. 1054 FixedArray* raw_data = non_optimized_code->deoptimization_data(); 1055 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data); 1056 Address start = non_optimized_code->instruction_start(); 1057 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared()); 1058 unsigned pc_offset = goto_catch_handler 1059 ? catch_handler_pc_offset_ 1060 : FullCodeGenerator::PcField::decode(pc_and_state); 1061 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset); 1062 output_frame->SetPc(pc_value); 1063 1064 // If we are going to the catch handler, then the exception lives in 1065 // the accumulator. 1066 BailoutState state = 1067 goto_catch_handler 1068 ? BailoutState::TOS_REGISTER 1069 : FullCodeGenerator::BailoutStateField::decode(pc_and_state); 1070 output_frame->SetState(Smi::FromInt(static_cast<int>(state))); 1071 1072 // Set the continuation for the topmost frame. 1073 if (is_topmost) { 1074 Builtins* builtins = isolate_->builtins(); 1075 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); 1076 if (bailout_type_ == LAZY) { 1077 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); 1078 } else if (bailout_type_ == SOFT) { 1079 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); 1080 } else { 1081 CHECK_EQ(bailout_type_, EAGER); 1082 } 1083 output_frame->SetContinuation( 1084 reinterpret_cast<intptr_t>(continuation->entry())); 1085 } 1086 } 1087 1088 void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame, 1089 int frame_index, 1090 bool goto_catch_handler) { 1091 SharedFunctionInfo* shared = translated_frame->raw_shared_info(); 1092 1093 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 1094 int input_index = 0; 1095 1096 int bytecode_offset = translated_frame->node_id().ToInt(); 1097 unsigned height = translated_frame->height(); 1098 unsigned height_in_bytes = height * kPointerSize; 1099 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue()); 1100 value_iterator++; 1101 input_index++; 1102 if (trace_scope_ != NULL) { 1103 PrintF(trace_scope_->file(), " translating interpreted frame "); 1104 base::SmartArrayPointer<char> name = shared->DebugName()->ToCString(); 1105 PrintF(trace_scope_->file(), "%s", name.get()); 1106 PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n", 1107 bytecode_offset, height_in_bytes, 1108 goto_catch_handler ? " (throw)" : ""); 1109 } 1110 if (goto_catch_handler) { 1111 bytecode_offset = catch_handler_pc_offset_; 1112 } 1113 1114 // The 'fixed' part of the frame consists of the incoming parameters and 1115 // the part described by InterpreterFrameConstants. 1116 unsigned fixed_frame_size = ComputeInterpretedFixedSize(shared); 1117 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1118 1119 // Allocate and store the output frame description. 1120 int parameter_count = shared->internal_formal_parameter_count() + 1; 1121 FrameDescription* output_frame = new (output_frame_size) 1122 FrameDescription(output_frame_size, parameter_count); 1123 output_frame->SetFrameType(StackFrame::INTERPRETED); 1124 1125 bool is_bottommost = (0 == frame_index); 1126 bool is_topmost = (output_count_ - 1 == frame_index); 1127 CHECK(frame_index >= 0 && frame_index < output_count_); 1128 CHECK_NULL(output_[frame_index]); 1129 output_[frame_index] = output_frame; 1130 1131 // The top address of the frame is computed from the previous frame's top and 1132 // this frame's size. 1133 intptr_t top_address; 1134 if (is_bottommost) { 1135 top_address = caller_frame_top_ - output_frame_size; 1136 } else { 1137 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1138 } 1139 output_frame->SetTop(top_address); 1140 1141 // Compute the incoming parameter translation. 1142 unsigned output_offset = output_frame_size; 1143 for (int i = 0; i < parameter_count; ++i) { 1144 output_offset -= kPointerSize; 1145 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1146 output_offset); 1147 } 1148 1149 // There are no translation commands for the caller's pc and fp, the 1150 // context, the function, new.target and the bytecode offset. Synthesize 1151 // their values and set them up 1152 // explicitly. 1153 // 1154 // The caller's pc for the bottommost output frame is the same as in the 1155 // input frame. For all subsequent output frames, it can be read from the 1156 // previous one. This frame's pc can be computed from the non-optimized 1157 // function code and AST id of the bailout. 1158 output_offset -= kPCOnStackSize; 1159 intptr_t value; 1160 if (is_bottommost) { 1161 value = caller_pc_; 1162 } else { 1163 value = output_[frame_index - 1]->GetPc(); 1164 } 1165 output_frame->SetCallerPc(output_offset, value); 1166 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n"); 1167 1168 // The caller's frame pointer for the bottommost output frame is the same 1169 // as in the input frame. For all subsequent output frames, it can be 1170 // read from the previous one. Also compute and set this frame's frame 1171 // pointer. 1172 output_offset -= kFPOnStackSize; 1173 if (is_bottommost) { 1174 value = caller_fp_; 1175 } else { 1176 value = output_[frame_index - 1]->GetFp(); 1177 } 1178 output_frame->SetCallerFp(output_offset, value); 1179 intptr_t fp_value = top_address + output_offset; 1180 output_frame->SetFp(fp_value); 1181 if (is_topmost) { 1182 Register fp_reg = InterpretedFrame::fp_register(); 1183 output_frame->SetRegister(fp_reg.code(), fp_value); 1184 } 1185 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); 1186 1187 if (FLAG_enable_embedded_constant_pool) { 1188 // For the bottommost output frame the constant pool pointer can be gotten 1189 // from the input frame. For subsequent output frames, it can be read from 1190 // the previous frame. 1191 output_offset -= kPointerSize; 1192 if (is_bottommost) { 1193 value = caller_constant_pool_; 1194 } else { 1195 value = output_[frame_index - 1]->GetConstantPool(); 1196 } 1197 output_frame->SetCallerConstantPool(output_offset, value); 1198 DebugPrintOutputSlot(value, frame_index, output_offset, 1199 "caller's constant_pool\n"); 1200 } 1201 1202 // For the bottommost output frame the context can be gotten from the input 1203 // frame. For all subsequent output frames it can be gotten from the function 1204 // so long as we don't inline functions that need local contexts. 1205 Register context_reg = InterpretedFrame::context_register(); 1206 output_offset -= kPointerSize; 1207 1208 // When deoptimizing into a catch block, we need to take the context 1209 // from a register that was specified in the handler table. 1210 TranslatedFrame::iterator context_pos = value_iterator; 1211 int context_input_index = input_index; 1212 if (goto_catch_handler) { 1213 // Skip to the translated value of the register specified 1214 // in the handler table. 1215 for (int i = 0; i < catch_handler_data_ + 1; ++i) { 1216 context_pos++; 1217 context_input_index++; 1218 } 1219 } 1220 // Read the context from the translations. 1221 Object* context = context_pos->GetRawValue(); 1222 // The context should not be a placeholder for a materialized object. 1223 CHECK(context != isolate_->heap()->arguments_marker()); 1224 value = reinterpret_cast<intptr_t>(context); 1225 output_frame->SetContext(value); 1226 if (is_topmost) output_frame->SetRegister(context_reg.code(), value); 1227 WriteValueToOutput(context, context_input_index, frame_index, output_offset, 1228 "context "); 1229 value_iterator++; 1230 input_index++; 1231 1232 // The function was mentioned explicitly in the BEGIN_FRAME. 1233 output_offset -= kPointerSize; 1234 value = reinterpret_cast<intptr_t>(function); 1235 WriteValueToOutput(function, 0, frame_index, output_offset, "function "); 1236 1237 // The new.target slot is only used during function activiation which is 1238 // before the first deopt point, so should never be needed. Just set it to 1239 // undefined. 1240 output_offset -= kPointerSize; 1241 Object* new_target = isolate_->heap()->undefined_value(); 1242 WriteValueToOutput(new_target, 0, frame_index, output_offset, "new_target "); 1243 1244 // Set the bytecode array pointer. 1245 output_offset -= kPointerSize; 1246 Object* bytecode_array = shared->bytecode_array(); 1247 WriteValueToOutput(bytecode_array, 0, frame_index, output_offset, 1248 "bytecode array "); 1249 1250 // The bytecode offset was mentioned explicitly in the BEGIN_FRAME. 1251 output_offset -= kPointerSize; 1252 int raw_bytecode_offset = 1253 BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset; 1254 Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset); 1255 WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset, 1256 "bytecode offset "); 1257 1258 // Translate the rest of the interpreter registers in the frame. 1259 for (unsigned i = 0; i < height - 1; ++i) { 1260 output_offset -= kPointerSize; 1261 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1262 output_offset); 1263 } 1264 1265 // Put the accumulator on the stack. It will be popped by the 1266 // InterpreterNotifyDeopt builtin (possibly after materialization). 1267 output_offset -= kPointerSize; 1268 if (goto_catch_handler) { 1269 // If we are lazy deopting to a catch handler, we set the accumulator to 1270 // the exception (which lives in the result register). 1271 intptr_t accumulator_value = 1272 input_->GetRegister(FullCodeGenerator::result_register().code()); 1273 WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0, 1274 frame_index, output_offset, "accumulator "); 1275 value_iterator++; 1276 } else { 1277 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1278 output_offset); 1279 } 1280 CHECK_EQ(0u, output_offset); 1281 1282 Builtins* builtins = isolate_->builtins(); 1283 Code* dispatch_builtin = 1284 builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch); 1285 output_frame->SetPc(reinterpret_cast<intptr_t>(dispatch_builtin->entry())); 1286 // Restore accumulator (TOS) register. 1287 output_frame->SetState( 1288 Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER))); 1289 1290 // Update constant pool. 1291 if (FLAG_enable_embedded_constant_pool) { 1292 intptr_t constant_pool_value = 1293 reinterpret_cast<intptr_t>(dispatch_builtin->constant_pool()); 1294 output_frame->SetConstantPool(constant_pool_value); 1295 if (is_topmost) { 1296 Register constant_pool_reg = 1297 InterpretedFrame::constant_pool_pointer_register(); 1298 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); 1299 } 1300 } 1301 1302 // Set the continuation for the topmost frame. 1303 if (is_topmost) { 1304 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); 1305 if (bailout_type_ == LAZY) { 1306 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); 1307 } else if (bailout_type_ == SOFT) { 1308 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); 1309 } else { 1310 CHECK_EQ(bailout_type_, EAGER); 1311 } 1312 output_frame->SetContinuation( 1313 reinterpret_cast<intptr_t>(continuation->entry())); 1314 } 1315 } 1316 1317 void Deoptimizer::DoComputeArgumentsAdaptorFrame( 1318 TranslatedFrame* translated_frame, int frame_index) { 1319 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 1320 bool is_bottommost = (0 == frame_index); 1321 int input_index = 0; 1322 1323 unsigned height = translated_frame->height(); 1324 unsigned height_in_bytes = height * kPointerSize; 1325 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue()); 1326 value_iterator++; 1327 input_index++; 1328 if (trace_scope_ != NULL) { 1329 PrintF(trace_scope_->file(), 1330 " translating arguments adaptor => height=%d\n", height_in_bytes); 1331 } 1332 1333 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFixedFrameSize; 1334 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1335 1336 // Allocate and store the output frame description. 1337 int parameter_count = height; 1338 FrameDescription* output_frame = new (output_frame_size) 1339 FrameDescription(output_frame_size, parameter_count); 1340 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR); 1341 1342 // Arguments adaptor can not be topmost. 1343 CHECK(frame_index < output_count_ - 1); 1344 CHECK(output_[frame_index] == NULL); 1345 output_[frame_index] = output_frame; 1346 1347 // The top address of the frame is computed from the previous frame's top and 1348 // this frame's size. 1349 intptr_t top_address; 1350 if (is_bottommost) { 1351 top_address = caller_frame_top_ - output_frame_size; 1352 } else { 1353 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1354 } 1355 output_frame->SetTop(top_address); 1356 1357 // Compute the incoming parameter translation. 1358 unsigned output_offset = output_frame_size; 1359 for (int i = 0; i < parameter_count; ++i) { 1360 output_offset -= kPointerSize; 1361 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1362 output_offset); 1363 } 1364 1365 // Read caller's PC from the previous frame. 1366 output_offset -= kPCOnStackSize; 1367 intptr_t value; 1368 if (is_bottommost) { 1369 value = caller_pc_; 1370 } else { 1371 value = output_[frame_index - 1]->GetPc(); 1372 } 1373 output_frame->SetCallerPc(output_offset, value); 1374 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n"); 1375 1376 // Read caller's FP from the previous frame, and set this frame's FP. 1377 output_offset -= kFPOnStackSize; 1378 if (is_bottommost) { 1379 value = caller_fp_; 1380 } else { 1381 value = output_[frame_index - 1]->GetFp(); 1382 } 1383 output_frame->SetCallerFp(output_offset, value); 1384 intptr_t fp_value = top_address + output_offset; 1385 output_frame->SetFp(fp_value); 1386 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); 1387 1388 if (FLAG_enable_embedded_constant_pool) { 1389 // Read the caller's constant pool from the previous frame. 1390 output_offset -= kPointerSize; 1391 if (is_bottommost) { 1392 value = caller_constant_pool_; 1393 } else { 1394 value = output_[frame_index - 1]->GetConstantPool(); 1395 } 1396 output_frame->SetCallerConstantPool(output_offset, value); 1397 DebugPrintOutputSlot(value, frame_index, output_offset, 1398 "caller's constant_pool\n"); 1399 } 1400 1401 // A marker value is used in place of the context. 1402 output_offset -= kPointerSize; 1403 intptr_t context = reinterpret_cast<intptr_t>( 1404 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1405 output_frame->SetFrameSlot(output_offset, context); 1406 DebugPrintOutputSlot(context, frame_index, output_offset, 1407 "context (adaptor sentinel)\n"); 1408 1409 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME. 1410 output_offset -= kPointerSize; 1411 value = reinterpret_cast<intptr_t>(function); 1412 WriteValueToOutput(function, 0, frame_index, output_offset, "function "); 1413 1414 // Number of incoming arguments. 1415 output_offset -= kPointerSize; 1416 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); 1417 output_frame->SetFrameSlot(output_offset, value); 1418 DebugPrintOutputSlot(value, frame_index, output_offset, "argc "); 1419 if (trace_scope_ != nullptr) { 1420 PrintF(trace_scope_->file(), "(%d)\n", height - 1); 1421 } 1422 1423 DCHECK(0 == output_offset); 1424 1425 Builtins* builtins = isolate_->builtins(); 1426 Code* adaptor_trampoline = 1427 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline); 1428 intptr_t pc_value = reinterpret_cast<intptr_t>( 1429 adaptor_trampoline->instruction_start() + 1430 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value()); 1431 output_frame->SetPc(pc_value); 1432 if (FLAG_enable_embedded_constant_pool) { 1433 intptr_t constant_pool_value = 1434 reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool()); 1435 output_frame->SetConstantPool(constant_pool_value); 1436 } 1437 } 1438 1439 void Deoptimizer::DoComputeTailCallerFrame(TranslatedFrame* translated_frame, 1440 int frame_index) { 1441 SharedFunctionInfo* shared = translated_frame->raw_shared_info(); 1442 1443 bool is_bottommost = (0 == frame_index); 1444 // Tail caller frame can't be topmost. 1445 CHECK_NE(output_count_ - 1, frame_index); 1446 1447 if (trace_scope_ != NULL) { 1448 PrintF(trace_scope_->file(), " translating tail caller frame "); 1449 base::SmartArrayPointer<char> name = shared->DebugName()->ToCString(); 1450 PrintF(trace_scope_->file(), "%s\n", name.get()); 1451 } 1452 1453 if (!is_bottommost) return; 1454 1455 // Drop arguments adaptor frame below current frame if it exsits. 1456 Address fp_address = input_->GetFramePointerAddress(); 1457 Address adaptor_fp_address = 1458 Memory::Address_at(fp_address + CommonFrameConstants::kCallerFPOffset); 1459 1460 if (Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR) != 1461 Memory::Object_at(adaptor_fp_address + 1462 CommonFrameConstants::kContextOrFrameTypeOffset)) { 1463 return; 1464 } 1465 1466 int caller_params_count = 1467 Smi::cast( 1468 Memory::Object_at(adaptor_fp_address + 1469 ArgumentsAdaptorFrameConstants::kLengthOffset)) 1470 ->value(); 1471 1472 int callee_params_count = 1473 function_->shared()->internal_formal_parameter_count(); 1474 1475 // Both caller and callee parameters count do not include receiver. 1476 int offset = (caller_params_count - callee_params_count) * kPointerSize; 1477 intptr_t new_stack_fp = 1478 reinterpret_cast<intptr_t>(adaptor_fp_address) + offset; 1479 1480 intptr_t new_caller_frame_top = new_stack_fp + 1481 (callee_params_count + 1) * kPointerSize + 1482 CommonFrameConstants::kFixedFrameSizeAboveFp; 1483 1484 intptr_t adaptor_caller_pc = Memory::intptr_at( 1485 adaptor_fp_address + CommonFrameConstants::kCallerPCOffset); 1486 intptr_t adaptor_caller_fp = Memory::intptr_at( 1487 adaptor_fp_address + CommonFrameConstants::kCallerFPOffset); 1488 1489 if (trace_scope_ != NULL) { 1490 PrintF(trace_scope_->file(), 1491 " dropping caller arguments adaptor frame: offset=%d, " 1492 "fp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR 1493 ", " 1494 "caller sp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR "\n", 1495 offset, stack_fp_, new_stack_fp, caller_frame_top_, 1496 new_caller_frame_top); 1497 } 1498 caller_frame_top_ = new_caller_frame_top; 1499 caller_fp_ = adaptor_caller_fp; 1500 caller_pc_ = adaptor_caller_pc; 1501 } 1502 1503 void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame, 1504 int frame_index) { 1505 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 1506 bool is_topmost = (output_count_ - 1 == frame_index); 1507 // The construct frame could become topmost only if we inlined a constructor 1508 // call which does a tail call (otherwise the tail callee's frame would be 1509 // the topmost one). So it could only be the LAZY case. 1510 CHECK(!is_topmost || bailout_type_ == LAZY); 1511 int input_index = 0; 1512 1513 Builtins* builtins = isolate_->builtins(); 1514 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric); 1515 unsigned height = translated_frame->height(); 1516 unsigned height_in_bytes = height * kPointerSize; 1517 1518 // If the construct frame appears to be topmost we should ensure that the 1519 // value of result register is preserved during continuation execution. 1520 // We do this here by "pushing" the result of the constructor function to the 1521 // top of the reconstructed stack and then using the 1522 // BailoutState::TOS_REGISTER machinery. 1523 if (is_topmost) { 1524 height_in_bytes += kPointerSize; 1525 } 1526 1527 // Skip function. 1528 value_iterator++; 1529 input_index++; 1530 if (trace_scope_ != NULL) { 1531 PrintF(trace_scope_->file(), 1532 " translating construct stub => height=%d\n", height_in_bytes); 1533 } 1534 1535 unsigned fixed_frame_size = ConstructFrameConstants::kFixedFrameSize; 1536 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1537 1538 // Allocate and store the output frame description. 1539 FrameDescription* output_frame = 1540 new (output_frame_size) FrameDescription(output_frame_size); 1541 output_frame->SetFrameType(StackFrame::CONSTRUCT); 1542 1543 // Construct stub can not be topmost. 1544 DCHECK(frame_index > 0 && frame_index < output_count_); 1545 DCHECK(output_[frame_index] == NULL); 1546 output_[frame_index] = output_frame; 1547 1548 // The top address of the frame is computed from the previous frame's top and 1549 // this frame's size. 1550 intptr_t top_address; 1551 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1552 output_frame->SetTop(top_address); 1553 1554 // Compute the incoming parameter translation. 1555 int parameter_count = height; 1556 unsigned output_offset = output_frame_size; 1557 for (int i = 0; i < parameter_count; ++i) { 1558 output_offset -= kPointerSize; 1559 // The allocated receiver of a construct stub frame is passed as the 1560 // receiver parameter through the translation. It might be encoding 1561 // a captured object, override the slot address for a captured object. 1562 WriteTranslatedValueToOutput( 1563 &value_iterator, &input_index, frame_index, output_offset, nullptr, 1564 (i == 0) ? reinterpret_cast<Address>(top_address) : nullptr); 1565 } 1566 1567 // Read caller's PC from the previous frame. 1568 output_offset -= kPCOnStackSize; 1569 intptr_t callers_pc = output_[frame_index - 1]->GetPc(); 1570 output_frame->SetCallerPc(output_offset, callers_pc); 1571 DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n"); 1572 1573 // Read caller's FP from the previous frame, and set this frame's FP. 1574 output_offset -= kFPOnStackSize; 1575 intptr_t value = output_[frame_index - 1]->GetFp(); 1576 output_frame->SetCallerFp(output_offset, value); 1577 intptr_t fp_value = top_address + output_offset; 1578 output_frame->SetFp(fp_value); 1579 if (is_topmost) { 1580 Register fp_reg = JavaScriptFrame::fp_register(); 1581 output_frame->SetRegister(fp_reg.code(), fp_value); 1582 } 1583 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); 1584 1585 if (FLAG_enable_embedded_constant_pool) { 1586 // Read the caller's constant pool from the previous frame. 1587 output_offset -= kPointerSize; 1588 value = output_[frame_index - 1]->GetConstantPool(); 1589 output_frame->SetCallerConstantPool(output_offset, value); 1590 DebugPrintOutputSlot(value, frame_index, output_offset, 1591 "caller's constant_pool\n"); 1592 } 1593 1594 // A marker value is used to mark the frame. 1595 output_offset -= kPointerSize; 1596 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT)); 1597 output_frame->SetFrameSlot(output_offset, value); 1598 DebugPrintOutputSlot(value, frame_index, output_offset, 1599 "typed frame marker\n"); 1600 1601 // The context can be gotten from the previous frame. 1602 output_offset -= kPointerSize; 1603 value = output_[frame_index - 1]->GetContext(); 1604 output_frame->SetFrameSlot(output_offset, value); 1605 if (is_topmost) { 1606 Register context_reg = JavaScriptFrame::context_register(); 1607 output_frame->SetRegister(context_reg.code(), value); 1608 } 1609 DebugPrintOutputSlot(value, frame_index, output_offset, "context\n"); 1610 1611 // The allocation site. 1612 output_offset -= kPointerSize; 1613 value = reinterpret_cast<intptr_t>(isolate_->heap()->undefined_value()); 1614 output_frame->SetFrameSlot(output_offset, value); 1615 DebugPrintOutputSlot(value, frame_index, output_offset, "allocation site\n"); 1616 1617 // Number of incoming arguments. 1618 output_offset -= kPointerSize; 1619 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); 1620 output_frame->SetFrameSlot(output_offset, value); 1621 DebugPrintOutputSlot(value, frame_index, output_offset, "argc "); 1622 if (trace_scope_ != nullptr) { 1623 PrintF(trace_scope_->file(), "(%d)\n", height - 1); 1624 } 1625 1626 // The newly allocated object was passed as receiver in the artificial 1627 // constructor stub environment created by HEnvironment::CopyForInlining(). 1628 output_offset -= kPointerSize; 1629 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize); 1630 output_frame->SetFrameSlot(output_offset, value); 1631 DebugPrintOutputSlot(value, frame_index, output_offset, 1632 "allocated receiver\n"); 1633 1634 if (is_topmost) { 1635 // Ensure the result is restored back when we return to the stub. 1636 output_offset -= kPointerSize; 1637 Register result_reg = FullCodeGenerator::result_register(); 1638 value = input_->GetRegister(result_reg.code()); 1639 output_frame->SetFrameSlot(output_offset, value); 1640 DebugPrintOutputSlot(value, frame_index, output_offset, 1641 "constructor result\n"); 1642 1643 output_frame->SetState( 1644 Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER))); 1645 } 1646 1647 CHECK_EQ(0u, output_offset); 1648 1649 intptr_t pc = reinterpret_cast<intptr_t>( 1650 construct_stub->instruction_start() + 1651 isolate_->heap()->construct_stub_deopt_pc_offset()->value()); 1652 output_frame->SetPc(pc); 1653 if (FLAG_enable_embedded_constant_pool) { 1654 intptr_t constant_pool_value = 1655 reinterpret_cast<intptr_t>(construct_stub->constant_pool()); 1656 output_frame->SetConstantPool(constant_pool_value); 1657 if (is_topmost) { 1658 Register constant_pool_reg = 1659 JavaScriptFrame::constant_pool_pointer_register(); 1660 output_frame->SetRegister(constant_pool_reg.code(), fp_value); 1661 } 1662 } 1663 1664 // Set the continuation for the topmost frame. 1665 if (is_topmost) { 1666 Builtins* builtins = isolate_->builtins(); 1667 DCHECK_EQ(LAZY, bailout_type_); 1668 Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); 1669 output_frame->SetContinuation( 1670 reinterpret_cast<intptr_t>(continuation->entry())); 1671 } 1672 } 1673 1674 void Deoptimizer::DoComputeAccessorStubFrame(TranslatedFrame* translated_frame, 1675 int frame_index, 1676 bool is_setter_stub_frame) { 1677 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 1678 bool is_topmost = (output_count_ - 1 == frame_index); 1679 // The accessor frame could become topmost only if we inlined an accessor 1680 // call which does a tail call (otherwise the tail callee's frame would be 1681 // the topmost one). So it could only be the LAZY case. 1682 CHECK(!is_topmost || bailout_type_ == LAZY); 1683 int input_index = 0; 1684 1685 // Skip accessor. 1686 value_iterator++; 1687 input_index++; 1688 // The receiver (and the implicit return value, if any) are expected in 1689 // registers by the LoadIC/StoreIC, so they don't belong to the output stack 1690 // frame. This means that we have to use a height of 0. 1691 unsigned height = 0; 1692 unsigned height_in_bytes = height * kPointerSize; 1693 1694 // If the accessor frame appears to be topmost we should ensure that the 1695 // value of result register is preserved during continuation execution. 1696 // We do this here by "pushing" the result of the accessor function to the 1697 // top of the reconstructed stack and then using the 1698 // BailoutState::TOS_REGISTER machinery. 1699 // We don't need to restore the result in case of a setter call because we 1700 // have to return the stored value but not the result of the setter function. 1701 bool should_preserve_result = is_topmost && !is_setter_stub_frame; 1702 if (should_preserve_result) { 1703 height_in_bytes += kPointerSize; 1704 } 1705 1706 const char* kind = is_setter_stub_frame ? "setter" : "getter"; 1707 if (trace_scope_ != NULL) { 1708 PrintF(trace_scope_->file(), 1709 " translating %s stub => height=%u\n", kind, height_in_bytes); 1710 } 1711 1712 // We need 1 stack entry for the return address and enough entries for the 1713 // StackFrame::INTERNAL (FP, frame type, context, code object and constant 1714 // pool (if enabled)- see MacroAssembler::EnterFrame). 1715 // For a setter stub frame we need one additional entry for the implicit 1716 // return value, see StoreStubCompiler::CompileStoreViaSetter. 1717 unsigned fixed_frame_entries = 1718 (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 + 1719 (is_setter_stub_frame ? 1 : 0); 1720 unsigned fixed_frame_size = fixed_frame_entries * kPointerSize; 1721 unsigned output_frame_size = height_in_bytes + fixed_frame_size; 1722 1723 // Allocate and store the output frame description. 1724 FrameDescription* output_frame = 1725 new (output_frame_size) FrameDescription(output_frame_size); 1726 output_frame->SetFrameType(StackFrame::INTERNAL); 1727 1728 // A frame for an accessor stub can not be bottommost. 1729 CHECK(frame_index > 0 && frame_index < output_count_); 1730 CHECK_NULL(output_[frame_index]); 1731 output_[frame_index] = output_frame; 1732 1733 // The top address of the frame is computed from the previous frame's top and 1734 // this frame's size. 1735 intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size; 1736 output_frame->SetTop(top_address); 1737 1738 unsigned output_offset = output_frame_size; 1739 1740 // Read caller's PC from the previous frame. 1741 output_offset -= kPCOnStackSize; 1742 intptr_t callers_pc = output_[frame_index - 1]->GetPc(); 1743 output_frame->SetCallerPc(output_offset, callers_pc); 1744 DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n"); 1745 1746 // Read caller's FP from the previous frame, and set this frame's FP. 1747 output_offset -= kFPOnStackSize; 1748 intptr_t value = output_[frame_index - 1]->GetFp(); 1749 output_frame->SetCallerFp(output_offset, value); 1750 intptr_t fp_value = top_address + output_offset; 1751 output_frame->SetFp(fp_value); 1752 if (is_topmost) { 1753 Register fp_reg = JavaScriptFrame::fp_register(); 1754 output_frame->SetRegister(fp_reg.code(), fp_value); 1755 } 1756 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); 1757 1758 if (FLAG_enable_embedded_constant_pool) { 1759 // Read the caller's constant pool from the previous frame. 1760 output_offset -= kPointerSize; 1761 value = output_[frame_index - 1]->GetConstantPool(); 1762 output_frame->SetCallerConstantPool(output_offset, value); 1763 DebugPrintOutputSlot(value, frame_index, output_offset, 1764 "caller's constant_pool\n"); 1765 } 1766 1767 // Set the frame type. 1768 output_offset -= kPointerSize; 1769 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL)); 1770 output_frame->SetFrameSlot(output_offset, value); 1771 DebugPrintOutputSlot(value, frame_index, output_offset, "frame type "); 1772 if (trace_scope_ != nullptr) { 1773 PrintF(trace_scope_->file(), "(%s sentinel)\n", kind); 1774 } 1775 1776 // Get Code object from accessor stub. 1777 output_offset -= kPointerSize; 1778 Builtins::Name name = is_setter_stub_frame ? 1779 Builtins::kStoreIC_Setter_ForDeopt : 1780 Builtins::kLoadIC_Getter_ForDeopt; 1781 Code* accessor_stub = isolate_->builtins()->builtin(name); 1782 value = reinterpret_cast<intptr_t>(accessor_stub); 1783 output_frame->SetFrameSlot(output_offset, value); 1784 DebugPrintOutputSlot(value, frame_index, output_offset, "code object\n"); 1785 1786 // The context can be gotten from the previous frame. 1787 output_offset -= kPointerSize; 1788 value = output_[frame_index - 1]->GetContext(); 1789 output_frame->SetFrameSlot(output_offset, value); 1790 if (is_topmost) { 1791 Register context_reg = JavaScriptFrame::context_register(); 1792 output_frame->SetRegister(context_reg.code(), value); 1793 } 1794 DebugPrintOutputSlot(value, frame_index, output_offset, "context\n"); 1795 1796 // Skip receiver. 1797 value_iterator++; 1798 input_index++; 1799 1800 if (is_setter_stub_frame) { 1801 // The implicit return value was part of the artificial setter stub 1802 // environment. 1803 output_offset -= kPointerSize; 1804 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, 1805 output_offset); 1806 } 1807 1808 if (should_preserve_result) { 1809 // Ensure the result is restored back when we return to the stub. 1810 output_offset -= kPointerSize; 1811 Register result_reg = FullCodeGenerator::result_register(); 1812 value = input_->GetRegister(result_reg.code()); 1813 output_frame->SetFrameSlot(output_offset, value); 1814 DebugPrintOutputSlot(value, frame_index, output_offset, 1815 "accessor result\n"); 1816 1817 output_frame->SetState( 1818 Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER))); 1819 } else { 1820 output_frame->SetState( 1821 Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS))); 1822 } 1823 1824 CHECK_EQ(0u, output_offset); 1825 1826 Smi* offset = is_setter_stub_frame ? 1827 isolate_->heap()->setter_stub_deopt_pc_offset() : 1828 isolate_->heap()->getter_stub_deopt_pc_offset(); 1829 intptr_t pc = reinterpret_cast<intptr_t>( 1830 accessor_stub->instruction_start() + offset->value()); 1831 output_frame->SetPc(pc); 1832 if (FLAG_enable_embedded_constant_pool) { 1833 intptr_t constant_pool_value = 1834 reinterpret_cast<intptr_t>(accessor_stub->constant_pool()); 1835 output_frame->SetConstantPool(constant_pool_value); 1836 if (is_topmost) { 1837 Register constant_pool_reg = 1838 JavaScriptFrame::constant_pool_pointer_register(); 1839 output_frame->SetRegister(constant_pool_reg.code(), fp_value); 1840 } 1841 } 1842 1843 // Set the continuation for the topmost frame. 1844 if (is_topmost) { 1845 Builtins* builtins = isolate_->builtins(); 1846 DCHECK_EQ(LAZY, bailout_type_); 1847 Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); 1848 output_frame->SetContinuation( 1849 reinterpret_cast<intptr_t>(continuation->entry())); 1850 } 1851 } 1852 1853 void Deoptimizer::DoComputeCompiledStubFrame(TranslatedFrame* translated_frame, 1854 int frame_index) { 1855 // 1856 // FROM TO 1857 // | .... | | .... | 1858 // +-------------------------+ +-------------------------+ 1859 // | JSFunction continuation | | JSFunction continuation | 1860 // +-------------------------+ +-------------------------+ 1861 // | | saved frame (FP) | | saved frame (FP) | 1862 // | +=========================+<-fpreg +=========================+<-fpreg 1863 // | |constant pool (if ool_cp)| |constant pool (if ool_cp)| 1864 // | +-------------------------+ +-------------------------| 1865 // | | JSFunction context | | JSFunction context | 1866 // v +-------------------------+ +-------------------------| 1867 // | COMPILED_STUB marker | | STUB_FAILURE marker | 1868 // +-------------------------+ +-------------------------+ 1869 // | | | caller args.arguments_ | 1870 // | ... | +-------------------------+ 1871 // | | | caller args.length_ | 1872 // |-------------------------|<-spreg +-------------------------+ 1873 // | caller args pointer | 1874 // +-------------------------+ 1875 // | caller stack param 1 | 1876 // parameters in registers +-------------------------+ 1877 // and spilled to stack | .... | 1878 // +-------------------------+ 1879 // | caller stack param n | 1880 // +-------------------------+<-spreg 1881 // reg = number of parameters 1882 // reg = failure handler address 1883 // reg = saved frame 1884 // reg = JSFunction context 1885 // 1886 // Caller stack params contain the register parameters to the stub first, 1887 // and then, if the descriptor specifies a constant number of stack 1888 // parameters, the stack parameters as well. 1889 1890 TranslatedFrame::iterator value_iterator = translated_frame->begin(); 1891 int input_index = 0; 1892 1893 CHECK(compiled_code_->is_hydrogen_stub()); 1894 int major_key = CodeStub::GetMajorKey(compiled_code_); 1895 CodeStubDescriptor descriptor(isolate_, compiled_code_->stub_key()); 1896 1897 // The output frame must have room for all pushed register parameters 1898 // and the standard stack frame slots. Include space for an argument 1899 // object to the callee and optionally the space to pass the argument 1900 // object to the stub failure handler. 1901 int param_count = descriptor.GetRegisterParameterCount(); 1902 int stack_param_count = descriptor.GetStackParameterCount(); 1903 // The translated frame contains all of the register parameters 1904 // plus the context. 1905 CHECK_EQ(translated_frame->height(), param_count + 1); 1906 CHECK_GE(param_count, 0); 1907 1908 int height_in_bytes = kPointerSize * (param_count + stack_param_count); 1909 int fixed_frame_size = StubFailureTrampolineFrameConstants::kFixedFrameSize; 1910 int output_frame_size = height_in_bytes + fixed_frame_size; 1911 if (trace_scope_ != NULL) { 1912 PrintF(trace_scope_->file(), 1913 " translating %s => StubFailureTrampolineStub, height=%d\n", 1914 CodeStub::MajorName(static_cast<CodeStub::Major>(major_key)), 1915 height_in_bytes); 1916 } 1917 1918 // The stub failure trampoline is a single frame. 1919 FrameDescription* output_frame = 1920 new (output_frame_size) FrameDescription(output_frame_size); 1921 output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE); 1922 CHECK_EQ(frame_index, 0); 1923 output_[frame_index] = output_frame; 1924 1925 // The top address of the frame is computed from the previous frame's top and 1926 // this frame's size. 1927 intptr_t top_address = caller_frame_top_ - output_frame_size; 1928 output_frame->SetTop(top_address); 1929 1930 // Set caller's PC (JSFunction continuation). 1931 unsigned output_frame_offset = output_frame_size - kFPOnStackSize; 1932 intptr_t value = caller_pc_; 1933 output_frame->SetCallerPc(output_frame_offset, value); 1934 DebugPrintOutputSlot(value, frame_index, output_frame_offset, 1935 "caller's pc\n"); 1936 1937 // Read caller's FP from the input frame, and set this frame's FP. 1938 value = caller_fp_; 1939 output_frame_offset -= kFPOnStackSize; 1940 output_frame->SetCallerFp(output_frame_offset, value); 1941 intptr_t frame_ptr = top_address + output_frame_offset; 1942 Register fp_reg = StubFailureTrampolineFrame::fp_register(); 1943 output_frame->SetRegister(fp_reg.code(), frame_ptr); 1944 output_frame->SetFp(frame_ptr); 1945 DebugPrintOutputSlot(value, frame_index, output_frame_offset, 1946 "caller's fp\n"); 1947 1948 if (FLAG_enable_embedded_constant_pool) { 1949 // Read the caller's constant pool from the input frame. 1950 value = caller_constant_pool_; 1951 output_frame_offset -= kPointerSize; 1952 output_frame->SetCallerConstantPool(output_frame_offset, value); 1953 DebugPrintOutputSlot(value, frame_index, output_frame_offset, 1954 "caller's constant_pool\n"); 1955 } 1956 1957 // The marker for the typed stack frame 1958 output_frame_offset -= kPointerSize; 1959 value = reinterpret_cast<intptr_t>( 1960 Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE)); 1961 output_frame->SetFrameSlot(output_frame_offset, value); 1962 DebugPrintOutputSlot(value, frame_index, output_frame_offset, 1963 "function (stub failure sentinel)\n"); 1964 1965 intptr_t caller_arg_count = stack_param_count; 1966 bool arg_count_known = !descriptor.stack_parameter_count().is_valid(); 1967 1968 // Build the Arguments object for the caller's parameters and a pointer to it. 1969 output_frame_offset -= kPointerSize; 1970 int args_arguments_offset = output_frame_offset; 1971 intptr_t the_hole = reinterpret_cast<intptr_t>( 1972 isolate_->heap()->the_hole_value()); 1973 if (arg_count_known) { 1974 value = frame_ptr + StandardFrameConstants::kCallerSPOffset + 1975 (caller_arg_count - 1) * kPointerSize; 1976 } else { 1977 value = the_hole; 1978 } 1979 1980 output_frame->SetFrameSlot(args_arguments_offset, value); 1981 DebugPrintOutputSlot( 1982 value, frame_index, args_arguments_offset, 1983 arg_count_known ? "args.arguments\n" : "args.arguments (the hole)\n"); 1984 1985 output_frame_offset -= kPointerSize; 1986 int length_frame_offset = output_frame_offset; 1987 value = arg_count_known ? caller_arg_count : the_hole; 1988 output_frame->SetFrameSlot(length_frame_offset, value); 1989 DebugPrintOutputSlot( 1990 value, frame_index, length_frame_offset, 1991 arg_count_known ? "args.length\n" : "args.length (the hole)\n"); 1992 1993 output_frame_offset -= kPointerSize; 1994 value = frame_ptr + StandardFrameConstants::kCallerSPOffset - 1995 (output_frame_size - output_frame_offset) + kPointerSize; 1996 output_frame->SetFrameSlot(output_frame_offset, value); 1997 DebugPrintOutputSlot(value, frame_index, output_frame_offset, "args*\n"); 1998 1999 // Copy the register parameters to the failure frame. 2000 int arguments_length_offset = -1; 2001 for (int i = 0; i < param_count; ++i) { 2002 output_frame_offset -= kPointerSize; 2003 WriteTranslatedValueToOutput(&value_iterator, &input_index, 0, 2004 output_frame_offset); 2005 2006 if (!arg_count_known && 2007 descriptor.GetRegisterParameter(i) 2008 .is(descriptor.stack_parameter_count())) { 2009 arguments_length_offset = output_frame_offset; 2010 } 2011 } 2012 2013 Object* maybe_context = value_iterator->GetRawValue(); 2014 CHECK(maybe_context->IsContext()); 2015 Register context_reg = StubFailureTrampolineFrame::context_register(); 2016 value = reinterpret_cast<intptr_t>(maybe_context); 2017 output_frame->SetRegister(context_reg.code(), value); 2018 ++value_iterator; 2019 2020 // Copy constant stack parameters to the failure frame. If the number of stack 2021 // parameters is not known in the descriptor, the arguments object is the way 2022 // to access them. 2023 for (int i = 0; i < stack_param_count; i++) { 2024 output_frame_offset -= kPointerSize; 2025 Object** stack_parameter = reinterpret_cast<Object**>( 2026 frame_ptr + StandardFrameConstants::kCallerSPOffset + 2027 (stack_param_count - i - 1) * kPointerSize); 2028 value = reinterpret_cast<intptr_t>(*stack_parameter); 2029 output_frame->SetFrameSlot(output_frame_offset, value); 2030 DebugPrintOutputSlot(value, frame_index, output_frame_offset, 2031 "stack parameter\n"); 2032 } 2033 2034 CHECK_EQ(0u, output_frame_offset); 2035 2036 if (!arg_count_known) { 2037 CHECK_GE(arguments_length_offset, 0); 2038 // We know it's a smi because 1) the code stub guarantees the stack 2039 // parameter count is in smi range, and 2) the DoTranslateCommand in the 2040 // parameter loop above translated that to a tagged value. 2041 Smi* smi_caller_arg_count = reinterpret_cast<Smi*>( 2042 output_frame->GetFrameSlot(arguments_length_offset)); 2043 caller_arg_count = smi_caller_arg_count->value(); 2044 output_frame->SetFrameSlot(length_frame_offset, caller_arg_count); 2045 DebugPrintOutputSlot(caller_arg_count, frame_index, length_frame_offset, 2046 "args.length\n"); 2047 value = frame_ptr + StandardFrameConstants::kCallerSPOffset + 2048 (caller_arg_count - 1) * kPointerSize; 2049 output_frame->SetFrameSlot(args_arguments_offset, value); 2050 DebugPrintOutputSlot(value, frame_index, args_arguments_offset, 2051 "args.arguments"); 2052 } 2053 2054 // Copy the double registers from the input into the output frame. 2055 CopyDoubleRegisters(output_frame); 2056 2057 // Fill registers containing handler and number of parameters. 2058 SetPlatformCompiledStubRegisters(output_frame, &descriptor); 2059 2060 // Compute this frame's PC, state, and continuation. 2061 Code* trampoline = NULL; 2062 StubFunctionMode function_mode = descriptor.function_mode(); 2063 StubFailureTrampolineStub(isolate_, function_mode) 2064 .FindCodeInCache(&trampoline); 2065 DCHECK(trampoline != NULL); 2066 output_frame->SetPc(reinterpret_cast<intptr_t>( 2067 trampoline->instruction_start())); 2068 if (FLAG_enable_embedded_constant_pool) { 2069 Register constant_pool_reg = 2070 StubFailureTrampolineFrame::constant_pool_pointer_register(); 2071 intptr_t constant_pool_value = 2072 reinterpret_cast<intptr_t>(trampoline->constant_pool()); 2073 output_frame->SetConstantPool(constant_pool_value); 2074 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); 2075 } 2076 output_frame->SetState( 2077 Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS))); 2078 Code* notify_failure = 2079 isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles); 2080 output_frame->SetContinuation( 2081 reinterpret_cast<intptr_t>(notify_failure->entry())); 2082 } 2083 2084 2085 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { 2086 // Walk to the last JavaScript output frame to find out if it has 2087 // adapted arguments. 2088 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) { 2089 if (frame_index != 0) it->Advance(); 2090 } 2091 translated_state_.Prepare(it->frame()->has_adapted_arguments(), 2092 reinterpret_cast<Address>(stack_fp_)); 2093 2094 for (auto& materialization : values_to_materialize_) { 2095 Handle<Object> value = materialization.value_->GetValue(); 2096 2097 if (trace_scope_ != nullptr) { 2098 PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ; ", 2099 reinterpret_cast<intptr_t>(materialization.output_slot_address_), 2100 reinterpret_cast<intptr_t>(*value)); 2101 value->ShortPrint(trace_scope_->file()); 2102 PrintF(trace_scope_->file(), "\n"); 2103 } 2104 2105 *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) = 2106 reinterpret_cast<intptr_t>(*value); 2107 } 2108 2109 isolate_->materialized_object_store()->Remove( 2110 reinterpret_cast<Address>(stack_fp_)); 2111 } 2112 2113 2114 void Deoptimizer::WriteTranslatedValueToOutput( 2115 TranslatedFrame::iterator* iterator, int* input_index, int frame_index, 2116 unsigned output_offset, const char* debug_hint_string, 2117 Address output_address_for_materialization) { 2118 Object* value = (*iterator)->GetRawValue(); 2119 2120 WriteValueToOutput(value, *input_index, frame_index, output_offset, 2121 debug_hint_string); 2122 2123 if (value == isolate_->heap()->arguments_marker()) { 2124 Address output_address = 2125 reinterpret_cast<Address>(output_[frame_index]->GetTop()) + 2126 output_offset; 2127 if (output_address_for_materialization == nullptr) { 2128 output_address_for_materialization = output_address; 2129 } 2130 values_to_materialize_.push_back( 2131 {output_address_for_materialization, *iterator}); 2132 } 2133 2134 (*iterator)++; 2135 (*input_index)++; 2136 } 2137 2138 2139 void Deoptimizer::WriteValueToOutput(Object* value, int input_index, 2140 int frame_index, unsigned output_offset, 2141 const char* debug_hint_string) { 2142 output_[frame_index]->SetFrameSlot(output_offset, 2143 reinterpret_cast<intptr_t>(value)); 2144 2145 if (trace_scope_ != nullptr) { 2146 DebugPrintOutputSlot(reinterpret_cast<intptr_t>(value), frame_index, 2147 output_offset, debug_hint_string); 2148 value->ShortPrint(trace_scope_->file()); 2149 PrintF(trace_scope_->file(), " (input #%d)\n", input_index); 2150 } 2151 } 2152 2153 2154 void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index, 2155 unsigned output_offset, 2156 const char* debug_hint_string) { 2157 if (trace_scope_ != nullptr) { 2158 Address output_address = 2159 reinterpret_cast<Address>(output_[frame_index]->GetTop()) + 2160 output_offset; 2161 PrintF(trace_scope_->file(), 2162 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s", 2163 reinterpret_cast<intptr_t>(output_address), output_offset, value, 2164 debug_hint_string == nullptr ? "" : debug_hint_string); 2165 } 2166 } 2167 2168 unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const { 2169 unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp; 2170 if (!function_->IsSmi()) { 2171 fixed_size += ComputeIncomingArgumentSize(function_->shared()); 2172 } 2173 return fixed_size; 2174 } 2175 2176 unsigned Deoptimizer::ComputeInputFrameSize() const { 2177 // The fp-to-sp delta already takes the context, constant pool pointer and the 2178 // function into account so we have to avoid double counting them. 2179 unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize(); 2180 unsigned result = fixed_size_above_fp + fp_to_sp_delta_; 2181 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) { 2182 unsigned stack_slots = compiled_code_->stack_slots(); 2183 unsigned outgoing_size = 2184 ComputeOutgoingArgumentSize(compiled_code_, bailout_id_); 2185 CHECK_EQ(fixed_size_above_fp + (stack_slots * kPointerSize) - 2186 CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size, 2187 result); 2188 } 2189 return result; 2190 } 2191 2192 // static 2193 unsigned Deoptimizer::ComputeJavascriptFixedSize(SharedFunctionInfo* shared) { 2194 // The fixed part of the frame consists of the return address, frame 2195 // pointer, function, context, and all the incoming arguments. 2196 return ComputeIncomingArgumentSize(shared) + 2197 StandardFrameConstants::kFixedFrameSize; 2198 } 2199 2200 // static 2201 unsigned Deoptimizer::ComputeInterpretedFixedSize(SharedFunctionInfo* shared) { 2202 // The fixed part of the frame consists of the return address, frame 2203 // pointer, function, context, new.target, bytecode offset and all the 2204 // incoming arguments. 2205 return ComputeIncomingArgumentSize(shared) + 2206 InterpreterFrameConstants::kFixedFrameSize; 2207 } 2208 2209 // static 2210 unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo* shared) { 2211 return (shared->internal_formal_parameter_count() + 1) * kPointerSize; 2212 } 2213 2214 2215 // static 2216 unsigned Deoptimizer::ComputeOutgoingArgumentSize(Code* code, 2217 unsigned bailout_id) { 2218 DeoptimizationInputData* data = 2219 DeoptimizationInputData::cast(code->deoptimization_data()); 2220 unsigned height = data->ArgumentsStackHeight(bailout_id)->value(); 2221 return height * kPointerSize; 2222 } 2223 2224 2225 Object* Deoptimizer::ComputeLiteral(int index) const { 2226 DeoptimizationInputData* data = 2227 DeoptimizationInputData::cast(compiled_code_->deoptimization_data()); 2228 FixedArray* literals = data->LiteralArray(); 2229 return literals->get(index); 2230 } 2231 2232 2233 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, 2234 BailoutType type, 2235 int max_entry_id) { 2236 // We cannot run this if the serializer is enabled because this will 2237 // cause us to emit relocation information for the external 2238 // references. This is fine because the deoptimizer's code section 2239 // isn't meant to be serialized at all. 2240 CHECK(type == EAGER || type == SOFT || type == LAZY); 2241 DeoptimizerData* data = isolate->deoptimizer_data(); 2242 int entry_count = data->deopt_entry_code_entries_[type]; 2243 if (max_entry_id < entry_count) return; 2244 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); 2245 while (max_entry_id >= entry_count) entry_count *= 2; 2246 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries); 2247 2248 MacroAssembler masm(isolate, NULL, 16 * KB, CodeObjectRequired::kYes); 2249 masm.set_emit_debug_code(false); 2250 GenerateDeoptimizationEntries(&masm, entry_count, type); 2251 CodeDesc desc; 2252 masm.GetCode(&desc); 2253 DCHECK(!RelocInfo::RequiresRelocation(desc)); 2254 2255 MemoryChunk* chunk = data->deopt_entry_code_[type]; 2256 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= 2257 desc.instr_size); 2258 if (!chunk->CommitArea(desc.instr_size)) { 2259 V8::FatalProcessOutOfMemory( 2260 "Deoptimizer::EnsureCodeForDeoptimizationEntry"); 2261 } 2262 CopyBytes(chunk->area_start(), desc.buffer, 2263 static_cast<size_t>(desc.instr_size)); 2264 Assembler::FlushICache(isolate, chunk->area_start(), desc.instr_size); 2265 2266 data->deopt_entry_code_entries_[type] = entry_count; 2267 } 2268 2269 FrameDescription::FrameDescription(uint32_t frame_size, int parameter_count) 2270 : frame_size_(frame_size), 2271 parameter_count_(parameter_count), 2272 top_(kZapUint32), 2273 pc_(kZapUint32), 2274 fp_(kZapUint32), 2275 context_(kZapUint32), 2276 constant_pool_(kZapUint32) { 2277 // Zap all the registers. 2278 for (int r = 0; r < Register::kNumRegisters; r++) { 2279 // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register 2280 // isn't used before the next safepoint, the GC will try to scan it as a 2281 // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't. 2282 SetRegister(r, kZapUint32); 2283 } 2284 2285 // Zap all the slots. 2286 for (unsigned o = 0; o < frame_size; o += kPointerSize) { 2287 SetFrameSlot(o, kZapUint32); 2288 } 2289 } 2290 2291 2292 int FrameDescription::ComputeFixedSize() { 2293 if (type_ == StackFrame::INTERPRETED) { 2294 return InterpreterFrameConstants::kFixedFrameSize + 2295 parameter_count() * kPointerSize; 2296 } else { 2297 return StandardFrameConstants::kFixedFrameSize + 2298 parameter_count() * kPointerSize; 2299 } 2300 } 2301 2302 2303 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) { 2304 if (slot_index >= 0) { 2305 // Local or spill slots. Skip the fixed part of the frame 2306 // including all arguments. 2307 unsigned base = GetFrameSize() - ComputeFixedSize(); 2308 return base - ((slot_index + 1) * kPointerSize); 2309 } else { 2310 // Incoming parameter. 2311 int arg_size = parameter_count() * kPointerSize; 2312 unsigned base = GetFrameSize() - arg_size; 2313 return base - ((slot_index + 1) * kPointerSize); 2314 } 2315 } 2316 2317 2318 void TranslationBuffer::Add(int32_t value, Zone* zone) { 2319 // This wouldn't handle kMinInt correctly if it ever encountered it. 2320 DCHECK(value != kMinInt); 2321 // Encode the sign bit in the least significant bit. 2322 bool is_negative = (value < 0); 2323 uint32_t bits = ((is_negative ? -value : value) << 1) | 2324 static_cast<int32_t>(is_negative); 2325 // Encode the individual bytes using the least significant bit of 2326 // each byte to indicate whether or not more bytes follow. 2327 do { 2328 uint32_t next = bits >> 7; 2329 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone); 2330 bits = next; 2331 } while (bits != 0); 2332 } 2333 2334 2335 int32_t TranslationIterator::Next() { 2336 // Run through the bytes until we reach one with a least significant 2337 // bit of zero (marks the end). 2338 uint32_t bits = 0; 2339 for (int i = 0; true; i += 7) { 2340 DCHECK(HasNext()); 2341 uint8_t next = buffer_->get(index_++); 2342 bits |= (next >> 1) << i; 2343 if ((next & 1) == 0) break; 2344 } 2345 // The bits encode the sign in the least significant bit. 2346 bool is_negative = (bits & 1) == 1; 2347 int32_t result = bits >> 1; 2348 return is_negative ? -result : result; 2349 } 2350 2351 2352 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) { 2353 int length = contents_.length(); 2354 Handle<ByteArray> result = factory->NewByteArray(length, TENURED); 2355 MemCopy(result->GetDataStartAddress(), contents_.ToVector().start(), length); 2356 return result; 2357 } 2358 2359 2360 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) { 2361 buffer_->Add(CONSTRUCT_STUB_FRAME, zone()); 2362 buffer_->Add(literal_id, zone()); 2363 buffer_->Add(height, zone()); 2364 } 2365 2366 2367 void Translation::BeginGetterStubFrame(int literal_id) { 2368 buffer_->Add(GETTER_STUB_FRAME, zone()); 2369 buffer_->Add(literal_id, zone()); 2370 } 2371 2372 2373 void Translation::BeginSetterStubFrame(int literal_id) { 2374 buffer_->Add(SETTER_STUB_FRAME, zone()); 2375 buffer_->Add(literal_id, zone()); 2376 } 2377 2378 2379 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) { 2380 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone()); 2381 buffer_->Add(literal_id, zone()); 2382 buffer_->Add(height, zone()); 2383 } 2384 2385 void Translation::BeginTailCallerFrame(int literal_id) { 2386 buffer_->Add(TAIL_CALLER_FRAME, zone()); 2387 buffer_->Add(literal_id, zone()); 2388 } 2389 2390 void Translation::BeginJSFrame(BailoutId node_id, 2391 int literal_id, 2392 unsigned height) { 2393 buffer_->Add(JS_FRAME, zone()); 2394 buffer_->Add(node_id.ToInt(), zone()); 2395 buffer_->Add(literal_id, zone()); 2396 buffer_->Add(height, zone()); 2397 } 2398 2399 2400 void Translation::BeginInterpretedFrame(BailoutId bytecode_offset, 2401 int literal_id, unsigned height) { 2402 buffer_->Add(INTERPRETED_FRAME, zone()); 2403 buffer_->Add(bytecode_offset.ToInt(), zone()); 2404 buffer_->Add(literal_id, zone()); 2405 buffer_->Add(height, zone()); 2406 } 2407 2408 2409 void Translation::BeginCompiledStubFrame(int height) { 2410 buffer_->Add(COMPILED_STUB_FRAME, zone()); 2411 buffer_->Add(height, zone()); 2412 } 2413 2414 2415 void Translation::BeginArgumentsObject(int args_length) { 2416 buffer_->Add(ARGUMENTS_OBJECT, zone()); 2417 buffer_->Add(args_length, zone()); 2418 } 2419 2420 2421 void Translation::BeginCapturedObject(int length) { 2422 buffer_->Add(CAPTURED_OBJECT, zone()); 2423 buffer_->Add(length, zone()); 2424 } 2425 2426 2427 void Translation::DuplicateObject(int object_index) { 2428 buffer_->Add(DUPLICATED_OBJECT, zone()); 2429 buffer_->Add(object_index, zone()); 2430 } 2431 2432 2433 void Translation::StoreRegister(Register reg) { 2434 buffer_->Add(REGISTER, zone()); 2435 buffer_->Add(reg.code(), zone()); 2436 } 2437 2438 2439 void Translation::StoreInt32Register(Register reg) { 2440 buffer_->Add(INT32_REGISTER, zone()); 2441 buffer_->Add(reg.code(), zone()); 2442 } 2443 2444 2445 void Translation::StoreUint32Register(Register reg) { 2446 buffer_->Add(UINT32_REGISTER, zone()); 2447 buffer_->Add(reg.code(), zone()); 2448 } 2449 2450 2451 void Translation::StoreBoolRegister(Register reg) { 2452 buffer_->Add(BOOL_REGISTER, zone()); 2453 buffer_->Add(reg.code(), zone()); 2454 } 2455 2456 void Translation::StoreFloatRegister(FloatRegister reg) { 2457 buffer_->Add(FLOAT_REGISTER, zone()); 2458 buffer_->Add(reg.code(), zone()); 2459 } 2460 2461 void Translation::StoreDoubleRegister(DoubleRegister reg) { 2462 buffer_->Add(DOUBLE_REGISTER, zone()); 2463 buffer_->Add(reg.code(), zone()); 2464 } 2465 2466 2467 void Translation::StoreStackSlot(int index) { 2468 buffer_->Add(STACK_SLOT, zone()); 2469 buffer_->Add(index, zone()); 2470 } 2471 2472 2473 void Translation::StoreInt32StackSlot(int index) { 2474 buffer_->Add(INT32_STACK_SLOT, zone()); 2475 buffer_->Add(index, zone()); 2476 } 2477 2478 2479 void Translation::StoreUint32StackSlot(int index) { 2480 buffer_->Add(UINT32_STACK_SLOT, zone()); 2481 buffer_->Add(index, zone()); 2482 } 2483 2484 2485 void Translation::StoreBoolStackSlot(int index) { 2486 buffer_->Add(BOOL_STACK_SLOT, zone()); 2487 buffer_->Add(index, zone()); 2488 } 2489 2490 void Translation::StoreFloatStackSlot(int index) { 2491 buffer_->Add(FLOAT_STACK_SLOT, zone()); 2492 buffer_->Add(index, zone()); 2493 } 2494 2495 void Translation::StoreDoubleStackSlot(int index) { 2496 buffer_->Add(DOUBLE_STACK_SLOT, zone()); 2497 buffer_->Add(index, zone()); 2498 } 2499 2500 2501 void Translation::StoreLiteral(int literal_id) { 2502 buffer_->Add(LITERAL, zone()); 2503 buffer_->Add(literal_id, zone()); 2504 } 2505 2506 2507 void Translation::StoreArgumentsObject(bool args_known, 2508 int args_index, 2509 int args_length) { 2510 buffer_->Add(ARGUMENTS_OBJECT, zone()); 2511 buffer_->Add(args_known, zone()); 2512 buffer_->Add(args_index, zone()); 2513 buffer_->Add(args_length, zone()); 2514 } 2515 2516 2517 void Translation::StoreJSFrameFunction() { 2518 StoreStackSlot((StandardFrameConstants::kCallerPCOffset - 2519 StandardFrameConstants::kFunctionOffset) / 2520 kPointerSize); 2521 } 2522 2523 int Translation::NumberOfOperandsFor(Opcode opcode) { 2524 switch (opcode) { 2525 case GETTER_STUB_FRAME: 2526 case SETTER_STUB_FRAME: 2527 case DUPLICATED_OBJECT: 2528 case ARGUMENTS_OBJECT: 2529 case CAPTURED_OBJECT: 2530 case REGISTER: 2531 case INT32_REGISTER: 2532 case UINT32_REGISTER: 2533 case BOOL_REGISTER: 2534 case FLOAT_REGISTER: 2535 case DOUBLE_REGISTER: 2536 case STACK_SLOT: 2537 case INT32_STACK_SLOT: 2538 case UINT32_STACK_SLOT: 2539 case BOOL_STACK_SLOT: 2540 case FLOAT_STACK_SLOT: 2541 case DOUBLE_STACK_SLOT: 2542 case LITERAL: 2543 case COMPILED_STUB_FRAME: 2544 case TAIL_CALLER_FRAME: 2545 return 1; 2546 case BEGIN: 2547 case ARGUMENTS_ADAPTOR_FRAME: 2548 case CONSTRUCT_STUB_FRAME: 2549 return 2; 2550 case JS_FRAME: 2551 case INTERPRETED_FRAME: 2552 return 3; 2553 } 2554 FATAL("Unexpected translation type"); 2555 return -1; 2556 } 2557 2558 2559 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 2560 2561 const char* Translation::StringFor(Opcode opcode) { 2562 #define TRANSLATION_OPCODE_CASE(item) case item: return #item; 2563 switch (opcode) { 2564 TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE) 2565 } 2566 #undef TRANSLATION_OPCODE_CASE 2567 UNREACHABLE(); 2568 return ""; 2569 } 2570 2571 #endif 2572 2573 2574 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) { 2575 int index = StackIdToIndex(fp); 2576 if (index == -1) { 2577 return Handle<FixedArray>::null(); 2578 } 2579 Handle<FixedArray> array = GetStackEntries(); 2580 CHECK_GT(array->length(), index); 2581 return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate())); 2582 } 2583 2584 2585 void MaterializedObjectStore::Set(Address fp, 2586 Handle<FixedArray> materialized_objects) { 2587 int index = StackIdToIndex(fp); 2588 if (index == -1) { 2589 index = frame_fps_.length(); 2590 frame_fps_.Add(fp); 2591 } 2592 2593 Handle<FixedArray> array = EnsureStackEntries(index + 1); 2594 array->set(index, *materialized_objects); 2595 } 2596 2597 2598 bool MaterializedObjectStore::Remove(Address fp) { 2599 int index = StackIdToIndex(fp); 2600 if (index == -1) { 2601 return false; 2602 } 2603 CHECK_GE(index, 0); 2604 2605 frame_fps_.Remove(index); 2606 FixedArray* array = isolate()->heap()->materialized_objects(); 2607 CHECK_LT(index, array->length()); 2608 for (int i = index; i < frame_fps_.length(); i++) { 2609 array->set(i, array->get(i + 1)); 2610 } 2611 array->set(frame_fps_.length(), isolate()->heap()->undefined_value()); 2612 return true; 2613 } 2614 2615 2616 int MaterializedObjectStore::StackIdToIndex(Address fp) { 2617 for (int i = 0; i < frame_fps_.length(); i++) { 2618 if (frame_fps_[i] == fp) { 2619 return i; 2620 } 2621 } 2622 return -1; 2623 } 2624 2625 2626 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() { 2627 return Handle<FixedArray>(isolate()->heap()->materialized_objects()); 2628 } 2629 2630 2631 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) { 2632 Handle<FixedArray> array = GetStackEntries(); 2633 if (array->length() >= length) { 2634 return array; 2635 } 2636 2637 int new_length = length > 10 ? length : 10; 2638 if (new_length < 2 * array->length()) { 2639 new_length = 2 * array->length(); 2640 } 2641 2642 Handle<FixedArray> new_array = 2643 isolate()->factory()->NewFixedArray(new_length, TENURED); 2644 for (int i = 0; i < array->length(); i++) { 2645 new_array->set(i, array->get(i)); 2646 } 2647 for (int i = array->length(); i < length; i++) { 2648 new_array->set(i, isolate()->heap()->undefined_value()); 2649 } 2650 isolate()->heap()->SetRootMaterializedObjects(*new_array); 2651 return new_array; 2652 } 2653 2654 namespace { 2655 2656 Handle<Object> GetValueForDebugger(TranslatedFrame::iterator it, 2657 Isolate* isolate) { 2658 if (it->GetRawValue() == isolate->heap()->arguments_marker()) { 2659 if (!it->IsMaterializableByDebugger()) { 2660 return isolate->factory()->undefined_value(); 2661 } 2662 } 2663 return it->GetValue(); 2664 } 2665 2666 } // namespace 2667 2668 DeoptimizedFrameInfo::DeoptimizedFrameInfo(TranslatedState* state, 2669 TranslatedState::iterator frame_it, 2670 Isolate* isolate) { 2671 // If the previous frame is an adaptor frame, we will take the parameters 2672 // from there. 2673 TranslatedState::iterator parameter_frame = frame_it; 2674 if (parameter_frame != state->begin()) { 2675 parameter_frame--; 2676 } 2677 int parameter_count; 2678 if (parameter_frame->kind() == TranslatedFrame::kArgumentsAdaptor) { 2679 parameter_count = parameter_frame->height() - 1; // Ignore the receiver. 2680 } else { 2681 parameter_frame = frame_it; 2682 parameter_count = 2683 frame_it->shared_info()->internal_formal_parameter_count(); 2684 } 2685 TranslatedFrame::iterator parameter_it = parameter_frame->begin(); 2686 parameter_it++; // Skip the function. 2687 parameter_it++; // Skip the receiver. 2688 2689 // Figure out whether there is a construct stub frame on top of 2690 // the parameter frame. 2691 has_construct_stub_ = 2692 parameter_frame != state->begin() && 2693 (parameter_frame - 1)->kind() == TranslatedFrame::kConstructStub; 2694 2695 source_position_ = Deoptimizer::ComputeSourcePosition( 2696 *frame_it->shared_info(), frame_it->node_id()); 2697 2698 TranslatedFrame::iterator value_it = frame_it->begin(); 2699 // Get the function. Note that this might materialize the function. 2700 // In case the debugger mutates this value, we should deoptimize 2701 // the function and remember the value in the materialized value store. 2702 function_ = Handle<JSFunction>::cast(value_it->GetValue()); 2703 2704 parameters_.resize(static_cast<size_t>(parameter_count)); 2705 for (int i = 0; i < parameter_count; i++) { 2706 Handle<Object> parameter = GetValueForDebugger(parameter_it, isolate); 2707 SetParameter(i, parameter); 2708 parameter_it++; 2709 } 2710 2711 // Skip the function, the receiver and the arguments. 2712 int skip_count = 2713 frame_it->shared_info()->internal_formal_parameter_count() + 2; 2714 TranslatedFrame::iterator stack_it = frame_it->begin(); 2715 for (int i = 0; i < skip_count; i++) { 2716 stack_it++; 2717 } 2718 2719 // Get the context. 2720 context_ = GetValueForDebugger(stack_it, isolate); 2721 stack_it++; 2722 2723 // Get the expression stack. 2724 int stack_height = frame_it->height(); 2725 if (frame_it->kind() == TranslatedFrame::kFunction || 2726 frame_it->kind() == TranslatedFrame::kInterpretedFunction) { 2727 // For full-code frames, we should not count the context. 2728 // For interpreter frames, we should not count the accumulator. 2729 // TODO(jarin): Clean up the indexing in translated frames. 2730 stack_height--; 2731 } 2732 expression_stack_.resize(static_cast<size_t>(stack_height)); 2733 for (int i = 0; i < stack_height; i++) { 2734 Handle<Object> expression = GetValueForDebugger(stack_it, isolate); 2735 SetExpression(i, expression); 2736 stack_it++; 2737 } 2738 2739 // For interpreter frame, skip the accumulator. 2740 if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) { 2741 stack_it++; 2742 } 2743 CHECK(stack_it == frame_it->end()); 2744 } 2745 2746 2747 const char* Deoptimizer::GetDeoptReason(DeoptReason deopt_reason) { 2748 DCHECK(deopt_reason < kLastDeoptReason); 2749 #define DEOPT_MESSAGES_TEXTS(C, T) T, 2750 static const char* deopt_messages_[] = { 2751 DEOPT_MESSAGES_LIST(DEOPT_MESSAGES_TEXTS)}; 2752 #undef DEOPT_MESSAGES_TEXTS 2753 return deopt_messages_[deopt_reason]; 2754 } 2755 2756 2757 Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) { 2758 SourcePosition last_position = SourcePosition::Unknown(); 2759 Deoptimizer::DeoptReason last_reason = Deoptimizer::kNoReason; 2760 int last_deopt_id = Deoptimizer::DeoptInfo::kNoDeoptId; 2761 int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) | 2762 RelocInfo::ModeMask(RelocInfo::DEOPT_ID) | 2763 RelocInfo::ModeMask(RelocInfo::POSITION); 2764 for (RelocIterator it(code, mask); !it.done(); it.next()) { 2765 RelocInfo* info = it.rinfo(); 2766 if (info->pc() >= pc) { 2767 return DeoptInfo(last_position, last_reason, last_deopt_id); 2768 } 2769 if (info->rmode() == RelocInfo::POSITION) { 2770 int raw_position = static_cast<int>(info->data()); 2771 last_position = raw_position ? SourcePosition::FromRaw(raw_position) 2772 : SourcePosition::Unknown(); 2773 } else if (info->rmode() == RelocInfo::DEOPT_ID) { 2774 last_deopt_id = static_cast<int>(info->data()); 2775 } else if (info->rmode() == RelocInfo::DEOPT_REASON) { 2776 last_reason = static_cast<Deoptimizer::DeoptReason>(info->data()); 2777 } 2778 } 2779 return DeoptInfo(SourcePosition::Unknown(), Deoptimizer::kNoReason, -1); 2780 } 2781 2782 2783 // static 2784 int Deoptimizer::ComputeSourcePosition(SharedFunctionInfo* shared, 2785 BailoutId node_id) { 2786 if (shared->HasBytecodeArray()) { 2787 BytecodeArray* bytecodes = shared->bytecode_array(); 2788 // BailoutId points to the next bytecode in the bytecode aray. Subtract 2789 // 1 to get the end of current bytecode. 2790 return bytecodes->SourcePosition(node_id.ToInt() - 1); 2791 } else { 2792 Code* non_optimized_code = shared->code(); 2793 FixedArray* raw_data = non_optimized_code->deoptimization_data(); 2794 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data); 2795 unsigned pc_and_state = Deoptimizer::GetOutputInfo(data, node_id, shared); 2796 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state); 2797 return non_optimized_code->SourcePosition(pc_offset); 2798 } 2799 } 2800 2801 // static 2802 TranslatedValue TranslatedValue::NewArgumentsObject(TranslatedState* container, 2803 int length, 2804 int object_index) { 2805 TranslatedValue slot(container, kArgumentsObject); 2806 slot.materialization_info_ = {object_index, length}; 2807 return slot; 2808 } 2809 2810 2811 // static 2812 TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container, 2813 int length, 2814 int object_index) { 2815 TranslatedValue slot(container, kCapturedObject); 2816 slot.materialization_info_ = {object_index, length}; 2817 return slot; 2818 } 2819 2820 2821 // static 2822 TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container, 2823 int id) { 2824 TranslatedValue slot(container, kDuplicatedObject); 2825 slot.materialization_info_ = {id, -1}; 2826 return slot; 2827 } 2828 2829 2830 // static 2831 TranslatedValue TranslatedValue::NewFloat(TranslatedState* container, 2832 float value) { 2833 TranslatedValue slot(container, kFloat); 2834 slot.float_value_ = value; 2835 return slot; 2836 } 2837 2838 // static 2839 TranslatedValue TranslatedValue::NewDouble(TranslatedState* container, 2840 double value) { 2841 TranslatedValue slot(container, kDouble); 2842 slot.double_value_ = value; 2843 return slot; 2844 } 2845 2846 2847 // static 2848 TranslatedValue TranslatedValue::NewInt32(TranslatedState* container, 2849 int32_t value) { 2850 TranslatedValue slot(container, kInt32); 2851 slot.int32_value_ = value; 2852 return slot; 2853 } 2854 2855 2856 // static 2857 TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container, 2858 uint32_t value) { 2859 TranslatedValue slot(container, kUInt32); 2860 slot.uint32_value_ = value; 2861 return slot; 2862 } 2863 2864 2865 // static 2866 TranslatedValue TranslatedValue::NewBool(TranslatedState* container, 2867 uint32_t value) { 2868 TranslatedValue slot(container, kBoolBit); 2869 slot.uint32_value_ = value; 2870 return slot; 2871 } 2872 2873 2874 // static 2875 TranslatedValue TranslatedValue::NewTagged(TranslatedState* container, 2876 Object* literal) { 2877 TranslatedValue slot(container, kTagged); 2878 slot.raw_literal_ = literal; 2879 return slot; 2880 } 2881 2882 2883 // static 2884 TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) { 2885 return TranslatedValue(container, kInvalid); 2886 } 2887 2888 2889 Isolate* TranslatedValue::isolate() const { return container_->isolate(); } 2890 2891 2892 Object* TranslatedValue::raw_literal() const { 2893 DCHECK_EQ(kTagged, kind()); 2894 return raw_literal_; 2895 } 2896 2897 2898 int32_t TranslatedValue::int32_value() const { 2899 DCHECK_EQ(kInt32, kind()); 2900 return int32_value_; 2901 } 2902 2903 2904 uint32_t TranslatedValue::uint32_value() const { 2905 DCHECK(kind() == kUInt32 || kind() == kBoolBit); 2906 return uint32_value_; 2907 } 2908 2909 float TranslatedValue::float_value() const { 2910 DCHECK_EQ(kFloat, kind()); 2911 return float_value_; 2912 } 2913 2914 double TranslatedValue::double_value() const { 2915 DCHECK_EQ(kDouble, kind()); 2916 return double_value_; 2917 } 2918 2919 2920 int TranslatedValue::object_length() const { 2921 DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject); 2922 return materialization_info_.length_; 2923 } 2924 2925 2926 int TranslatedValue::object_index() const { 2927 DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject || 2928 kind() == kDuplicatedObject); 2929 return materialization_info_.id_; 2930 } 2931 2932 2933 Object* TranslatedValue::GetRawValue() const { 2934 // If we have a value, return it. 2935 Handle<Object> result_handle; 2936 if (value_.ToHandle(&result_handle)) { 2937 return *result_handle; 2938 } 2939 2940 // Otherwise, do a best effort to get the value without allocation. 2941 switch (kind()) { 2942 case kTagged: 2943 return raw_literal(); 2944 2945 case kInt32: { 2946 bool is_smi = Smi::IsValid(int32_value()); 2947 if (is_smi) { 2948 return Smi::FromInt(int32_value()); 2949 } 2950 break; 2951 } 2952 2953 case kUInt32: { 2954 bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue)); 2955 if (is_smi) { 2956 return Smi::FromInt(static_cast<int32_t>(uint32_value())); 2957 } 2958 break; 2959 } 2960 2961 case kBoolBit: { 2962 if (uint32_value() == 0) { 2963 return isolate()->heap()->false_value(); 2964 } else { 2965 CHECK_EQ(1U, uint32_value()); 2966 return isolate()->heap()->true_value(); 2967 } 2968 } 2969 2970 default: 2971 break; 2972 } 2973 2974 // If we could not get the value without allocation, return the arguments 2975 // marker. 2976 return isolate()->heap()->arguments_marker(); 2977 } 2978 2979 2980 Handle<Object> TranslatedValue::GetValue() { 2981 Handle<Object> result; 2982 // If we already have a value, then get it. 2983 if (value_.ToHandle(&result)) return result; 2984 2985 // Otherwise we have to materialize. 2986 switch (kind()) { 2987 case TranslatedValue::kTagged: 2988 case TranslatedValue::kInt32: 2989 case TranslatedValue::kUInt32: 2990 case TranslatedValue::kBoolBit: 2991 case TranslatedValue::kFloat: 2992 case TranslatedValue::kDouble: { 2993 MaterializeSimple(); 2994 return value_.ToHandleChecked(); 2995 } 2996 2997 case TranslatedValue::kArgumentsObject: 2998 case TranslatedValue::kCapturedObject: 2999 case TranslatedValue::kDuplicatedObject: 3000 return container_->MaterializeObjectAt(object_index()); 3001 3002 case TranslatedValue::kInvalid: 3003 FATAL("unexpected case"); 3004 return Handle<Object>::null(); 3005 } 3006 3007 FATAL("internal error: value missing"); 3008 return Handle<Object>::null(); 3009 } 3010 3011 3012 void TranslatedValue::MaterializeSimple() { 3013 // If we already have materialized, return. 3014 if (!value_.is_null()) return; 3015 3016 Object* raw_value = GetRawValue(); 3017 if (raw_value != isolate()->heap()->arguments_marker()) { 3018 // We can get the value without allocation, just return it here. 3019 value_ = Handle<Object>(raw_value, isolate()); 3020 return; 3021 } 3022 3023 switch (kind()) { 3024 case kInt32: { 3025 value_ = Handle<Object>(isolate()->factory()->NewNumber(int32_value())); 3026 return; 3027 } 3028 3029 case kUInt32: 3030 value_ = Handle<Object>(isolate()->factory()->NewNumber(uint32_value())); 3031 return; 3032 3033 case kFloat: 3034 value_ = Handle<Object>(isolate()->factory()->NewNumber(float_value())); 3035 return; 3036 3037 case kDouble: 3038 value_ = Handle<Object>(isolate()->factory()->NewNumber(double_value())); 3039 return; 3040 3041 case kCapturedObject: 3042 case kDuplicatedObject: 3043 case kArgumentsObject: 3044 case kInvalid: 3045 case kTagged: 3046 case kBoolBit: 3047 FATAL("internal error: unexpected materialization."); 3048 break; 3049 } 3050 } 3051 3052 3053 bool TranslatedValue::IsMaterializedObject() const { 3054 switch (kind()) { 3055 case kCapturedObject: 3056 case kDuplicatedObject: 3057 case kArgumentsObject: 3058 return true; 3059 default: 3060 return false; 3061 } 3062 } 3063 3064 bool TranslatedValue::IsMaterializableByDebugger() const { 3065 // At the moment, we only allow materialization of doubles. 3066 return (kind() == kDouble); 3067 } 3068 3069 int TranslatedValue::GetChildrenCount() const { 3070 if (kind() == kCapturedObject || kind() == kArgumentsObject) { 3071 return object_length(); 3072 } else { 3073 return 0; 3074 } 3075 } 3076 3077 3078 uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) { 3079 Address address = fp + slot_offset; 3080 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT 3081 return Memory::uint32_at(address + kIntSize); 3082 #else 3083 return Memory::uint32_at(address); 3084 #endif 3085 } 3086 3087 3088 void TranslatedValue::Handlify() { 3089 if (kind() == kTagged) { 3090 value_ = Handle<Object>(raw_literal(), isolate()); 3091 raw_literal_ = nullptr; 3092 } 3093 } 3094 3095 3096 TranslatedFrame TranslatedFrame::JSFrame(BailoutId node_id, 3097 SharedFunctionInfo* shared_info, 3098 int height) { 3099 TranslatedFrame frame(kFunction, shared_info->GetIsolate(), shared_info, 3100 height); 3101 frame.node_id_ = node_id; 3102 return frame; 3103 } 3104 3105 3106 TranslatedFrame TranslatedFrame::InterpretedFrame( 3107 BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) { 3108 TranslatedFrame frame(kInterpretedFunction, shared_info->GetIsolate(), 3109 shared_info, height); 3110 frame.node_id_ = bytecode_offset; 3111 return frame; 3112 } 3113 3114 3115 TranslatedFrame TranslatedFrame::AccessorFrame( 3116 Kind kind, SharedFunctionInfo* shared_info) { 3117 DCHECK(kind == kSetter || kind == kGetter); 3118 return TranslatedFrame(kind, shared_info->GetIsolate(), shared_info); 3119 } 3120 3121 3122 TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame( 3123 SharedFunctionInfo* shared_info, int height) { 3124 return TranslatedFrame(kArgumentsAdaptor, shared_info->GetIsolate(), 3125 shared_info, height); 3126 } 3127 3128 TranslatedFrame TranslatedFrame::TailCallerFrame( 3129 SharedFunctionInfo* shared_info) { 3130 return TranslatedFrame(kTailCallerFunction, shared_info->GetIsolate(), 3131 shared_info, 0); 3132 } 3133 3134 TranslatedFrame TranslatedFrame::ConstructStubFrame( 3135 SharedFunctionInfo* shared_info, int height) { 3136 return TranslatedFrame(kConstructStub, shared_info->GetIsolate(), shared_info, 3137 height); 3138 } 3139 3140 3141 int TranslatedFrame::GetValueCount() { 3142 switch (kind()) { 3143 case kFunction: { 3144 int parameter_count = 3145 raw_shared_info_->internal_formal_parameter_count() + 1; 3146 // + 1 for function. 3147 return height_ + parameter_count + 1; 3148 } 3149 3150 case kInterpretedFunction: { 3151 int parameter_count = 3152 raw_shared_info_->internal_formal_parameter_count() + 1; 3153 // + 2 for function and context. 3154 return height_ + parameter_count + 2; 3155 } 3156 3157 case kGetter: 3158 return 2; // Function and receiver. 3159 3160 case kSetter: 3161 return 3; // Function, receiver and the value to set. 3162 3163 case kArgumentsAdaptor: 3164 case kConstructStub: 3165 return 1 + height_; 3166 3167 case kTailCallerFunction: 3168 return 1; // Function. 3169 3170 case kCompiledStub: 3171 return height_; 3172 3173 case kInvalid: 3174 UNREACHABLE(); 3175 break; 3176 } 3177 UNREACHABLE(); 3178 return -1; 3179 } 3180 3181 3182 void TranslatedFrame::Handlify() { 3183 if (raw_shared_info_ != nullptr) { 3184 shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_); 3185 raw_shared_info_ = nullptr; 3186 } 3187 for (auto& value : values_) { 3188 value.Handlify(); 3189 } 3190 } 3191 3192 3193 TranslatedFrame TranslatedState::CreateNextTranslatedFrame( 3194 TranslationIterator* iterator, FixedArray* literal_array, Address fp, 3195 FILE* trace_file) { 3196 Translation::Opcode opcode = 3197 static_cast<Translation::Opcode>(iterator->Next()); 3198 switch (opcode) { 3199 case Translation::JS_FRAME: { 3200 BailoutId node_id = BailoutId(iterator->Next()); 3201 SharedFunctionInfo* shared_info = 3202 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3203 int height = iterator->Next(); 3204 if (trace_file != nullptr) { 3205 base::SmartArrayPointer<char> name = 3206 shared_info->DebugName()->ToCString(); 3207 PrintF(trace_file, " reading input frame %s", name.get()); 3208 int arg_count = shared_info->internal_formal_parameter_count() + 1; 3209 PrintF(trace_file, " => node=%d, args=%d, height=%d; inputs:\n", 3210 node_id.ToInt(), arg_count, height); 3211 } 3212 return TranslatedFrame::JSFrame(node_id, shared_info, height); 3213 } 3214 3215 case Translation::INTERPRETED_FRAME: { 3216 BailoutId bytecode_offset = BailoutId(iterator->Next()); 3217 SharedFunctionInfo* shared_info = 3218 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3219 int height = iterator->Next(); 3220 if (trace_file != nullptr) { 3221 base::SmartArrayPointer<char> name = 3222 shared_info->DebugName()->ToCString(); 3223 PrintF(trace_file, " reading input frame %s", name.get()); 3224 int arg_count = shared_info->internal_formal_parameter_count() + 1; 3225 PrintF(trace_file, 3226 " => bytecode_offset=%d, args=%d, height=%d; inputs:\n", 3227 bytecode_offset.ToInt(), arg_count, height); 3228 } 3229 return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info, 3230 height); 3231 } 3232 3233 case Translation::ARGUMENTS_ADAPTOR_FRAME: { 3234 SharedFunctionInfo* shared_info = 3235 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3236 int height = iterator->Next(); 3237 if (trace_file != nullptr) { 3238 base::SmartArrayPointer<char> name = 3239 shared_info->DebugName()->ToCString(); 3240 PrintF(trace_file, " reading arguments adaptor frame %s", name.get()); 3241 PrintF(trace_file, " => height=%d; inputs:\n", height); 3242 } 3243 return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height); 3244 } 3245 3246 case Translation::TAIL_CALLER_FRAME: { 3247 SharedFunctionInfo* shared_info = 3248 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3249 if (trace_file != nullptr) { 3250 base::SmartArrayPointer<char> name = 3251 shared_info->DebugName()->ToCString(); 3252 PrintF(trace_file, " reading tail caller frame marker %s\n", 3253 name.get()); 3254 } 3255 return TranslatedFrame::TailCallerFrame(shared_info); 3256 } 3257 3258 case Translation::CONSTRUCT_STUB_FRAME: { 3259 SharedFunctionInfo* shared_info = 3260 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3261 int height = iterator->Next(); 3262 if (trace_file != nullptr) { 3263 base::SmartArrayPointer<char> name = 3264 shared_info->DebugName()->ToCString(); 3265 PrintF(trace_file, " reading construct stub frame %s", name.get()); 3266 PrintF(trace_file, " => height=%d; inputs:\n", height); 3267 } 3268 return TranslatedFrame::ConstructStubFrame(shared_info, height); 3269 } 3270 3271 case Translation::GETTER_STUB_FRAME: { 3272 SharedFunctionInfo* shared_info = 3273 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3274 if (trace_file != nullptr) { 3275 base::SmartArrayPointer<char> name = 3276 shared_info->DebugName()->ToCString(); 3277 PrintF(trace_file, " reading getter frame %s; inputs:\n", name.get()); 3278 } 3279 return TranslatedFrame::AccessorFrame(TranslatedFrame::kGetter, 3280 shared_info); 3281 } 3282 3283 case Translation::SETTER_STUB_FRAME: { 3284 SharedFunctionInfo* shared_info = 3285 SharedFunctionInfo::cast(literal_array->get(iterator->Next())); 3286 if (trace_file != nullptr) { 3287 base::SmartArrayPointer<char> name = 3288 shared_info->DebugName()->ToCString(); 3289 PrintF(trace_file, " reading setter frame %s; inputs:\n", name.get()); 3290 } 3291 return TranslatedFrame::AccessorFrame(TranslatedFrame::kSetter, 3292 shared_info); 3293 } 3294 3295 case Translation::COMPILED_STUB_FRAME: { 3296 int height = iterator->Next(); 3297 if (trace_file != nullptr) { 3298 PrintF(trace_file, 3299 " reading compiler stub frame => height=%d; inputs:\n", height); 3300 } 3301 return TranslatedFrame::CompiledStubFrame(height, 3302 literal_array->GetIsolate()); 3303 } 3304 3305 case Translation::BEGIN: 3306 case Translation::DUPLICATED_OBJECT: 3307 case Translation::ARGUMENTS_OBJECT: 3308 case Translation::CAPTURED_OBJECT: 3309 case Translation::REGISTER: 3310 case Translation::INT32_REGISTER: 3311 case Translation::UINT32_REGISTER: 3312 case Translation::BOOL_REGISTER: 3313 case Translation::FLOAT_REGISTER: 3314 case Translation::DOUBLE_REGISTER: 3315 case Translation::STACK_SLOT: 3316 case Translation::INT32_STACK_SLOT: 3317 case Translation::UINT32_STACK_SLOT: 3318 case Translation::BOOL_STACK_SLOT: 3319 case Translation::FLOAT_STACK_SLOT: 3320 case Translation::DOUBLE_STACK_SLOT: 3321 case Translation::LITERAL: 3322 break; 3323 } 3324 FATAL("We should never get here - unexpected deopt info."); 3325 return TranslatedFrame::InvalidFrame(); 3326 } 3327 3328 3329 // static 3330 void TranslatedFrame::AdvanceIterator( 3331 std::deque<TranslatedValue>::iterator* iter) { 3332 int values_to_skip = 1; 3333 while (values_to_skip > 0) { 3334 // Consume the current element. 3335 values_to_skip--; 3336 // Add all the children. 3337 values_to_skip += (*iter)->GetChildrenCount(); 3338 3339 (*iter)++; 3340 } 3341 } 3342 3343 3344 // We can't intermix stack decoding and allocations because 3345 // deoptimization infrastracture is not GC safe. 3346 // Thus we build a temporary structure in malloced space. 3347 TranslatedValue TranslatedState::CreateNextTranslatedValue( 3348 int frame_index, int value_index, TranslationIterator* iterator, 3349 FixedArray* literal_array, Address fp, RegisterValues* registers, 3350 FILE* trace_file) { 3351 disasm::NameConverter converter; 3352 3353 Translation::Opcode opcode = 3354 static_cast<Translation::Opcode>(iterator->Next()); 3355 switch (opcode) { 3356 case Translation::BEGIN: 3357 case Translation::JS_FRAME: 3358 case Translation::INTERPRETED_FRAME: 3359 case Translation::ARGUMENTS_ADAPTOR_FRAME: 3360 case Translation::TAIL_CALLER_FRAME: 3361 case Translation::CONSTRUCT_STUB_FRAME: 3362 case Translation::GETTER_STUB_FRAME: 3363 case Translation::SETTER_STUB_FRAME: 3364 case Translation::COMPILED_STUB_FRAME: 3365 // Peeled off before getting here. 3366 break; 3367 3368 case Translation::DUPLICATED_OBJECT: { 3369 int object_id = iterator->Next(); 3370 if (trace_file != nullptr) { 3371 PrintF(trace_file, "duplicated object #%d", object_id); 3372 } 3373 object_positions_.push_back(object_positions_[object_id]); 3374 return TranslatedValue::NewDuplicateObject(this, object_id); 3375 } 3376 3377 case Translation::ARGUMENTS_OBJECT: { 3378 int arg_count = iterator->Next(); 3379 int object_index = static_cast<int>(object_positions_.size()); 3380 if (trace_file != nullptr) { 3381 PrintF(trace_file, "argumets object #%d (length = %d)", object_index, 3382 arg_count); 3383 } 3384 object_positions_.push_back({frame_index, value_index}); 3385 return TranslatedValue::NewArgumentsObject(this, arg_count, object_index); 3386 } 3387 3388 case Translation::CAPTURED_OBJECT: { 3389 int field_count = iterator->Next(); 3390 int object_index = static_cast<int>(object_positions_.size()); 3391 if (trace_file != nullptr) { 3392 PrintF(trace_file, "captured object #%d (length = %d)", object_index, 3393 field_count); 3394 } 3395 object_positions_.push_back({frame_index, value_index}); 3396 return TranslatedValue::NewDeferredObject(this, field_count, 3397 object_index); 3398 } 3399 3400 case Translation::REGISTER: { 3401 int input_reg = iterator->Next(); 3402 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3403 intptr_t value = registers->GetRegister(input_reg); 3404 if (trace_file != nullptr) { 3405 PrintF(trace_file, "0x%08" V8PRIxPTR " ; %s ", value, 3406 converter.NameOfCPURegister(input_reg)); 3407 reinterpret_cast<Object*>(value)->ShortPrint(trace_file); 3408 } 3409 return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value)); 3410 } 3411 3412 case Translation::INT32_REGISTER: { 3413 int input_reg = iterator->Next(); 3414 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3415 intptr_t value = registers->GetRegister(input_reg); 3416 if (trace_file != nullptr) { 3417 PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value, 3418 converter.NameOfCPURegister(input_reg)); 3419 } 3420 return TranslatedValue::NewInt32(this, static_cast<int32_t>(value)); 3421 } 3422 3423 case Translation::UINT32_REGISTER: { 3424 int input_reg = iterator->Next(); 3425 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3426 intptr_t value = registers->GetRegister(input_reg); 3427 if (trace_file != nullptr) { 3428 PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value, 3429 converter.NameOfCPURegister(input_reg)); 3430 reinterpret_cast<Object*>(value)->ShortPrint(trace_file); 3431 } 3432 return TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value)); 3433 } 3434 3435 case Translation::BOOL_REGISTER: { 3436 int input_reg = iterator->Next(); 3437 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3438 intptr_t value = registers->GetRegister(input_reg); 3439 if (trace_file != nullptr) { 3440 PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value, 3441 converter.NameOfCPURegister(input_reg)); 3442 } 3443 return TranslatedValue::NewBool(this, static_cast<uint32_t>(value)); 3444 } 3445 3446 case Translation::FLOAT_REGISTER: { 3447 int input_reg = iterator->Next(); 3448 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3449 float value = registers->GetFloatRegister(input_reg); 3450 if (trace_file != nullptr) { 3451 PrintF(trace_file, "%e ; %s (float)", value, 3452 RegisterConfiguration::Crankshaft()->GetFloatRegisterName( 3453 input_reg)); 3454 } 3455 return TranslatedValue::NewFloat(this, value); 3456 } 3457 3458 case Translation::DOUBLE_REGISTER: { 3459 int input_reg = iterator->Next(); 3460 if (registers == nullptr) return TranslatedValue::NewInvalid(this); 3461 double value = registers->GetDoubleRegister(input_reg); 3462 if (trace_file != nullptr) { 3463 PrintF(trace_file, "%e ; %s (double)", value, 3464 RegisterConfiguration::Crankshaft()->GetDoubleRegisterName( 3465 input_reg)); 3466 } 3467 return TranslatedValue::NewDouble(this, value); 3468 } 3469 3470 case Translation::STACK_SLOT: { 3471 int slot_offset = 3472 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3473 intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset)); 3474 if (trace_file != nullptr) { 3475 PrintF(trace_file, "0x%08" V8PRIxPTR " ; [fp %c %d] ", value, 3476 slot_offset < 0 ? '-' : '+', std::abs(slot_offset)); 3477 reinterpret_cast<Object*>(value)->ShortPrint(trace_file); 3478 } 3479 return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value)); 3480 } 3481 3482 case Translation::INT32_STACK_SLOT: { 3483 int slot_offset = 3484 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3485 uint32_t value = GetUInt32Slot(fp, slot_offset); 3486 if (trace_file != nullptr) { 3487 PrintF(trace_file, "%d ; (int) [fp %c %d] ", 3488 static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+', 3489 std::abs(slot_offset)); 3490 } 3491 return TranslatedValue::NewInt32(this, value); 3492 } 3493 3494 case Translation::UINT32_STACK_SLOT: { 3495 int slot_offset = 3496 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3497 uint32_t value = GetUInt32Slot(fp, slot_offset); 3498 if (trace_file != nullptr) { 3499 PrintF(trace_file, "%u ; (uint) [fp %c %d] ", value, 3500 slot_offset < 0 ? '-' : '+', std::abs(slot_offset)); 3501 } 3502 return TranslatedValue::NewUInt32(this, value); 3503 } 3504 3505 case Translation::BOOL_STACK_SLOT: { 3506 int slot_offset = 3507 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3508 uint32_t value = GetUInt32Slot(fp, slot_offset); 3509 if (trace_file != nullptr) { 3510 PrintF(trace_file, "%u ; (bool) [fp %c %d] ", value, 3511 slot_offset < 0 ? '-' : '+', std::abs(slot_offset)); 3512 } 3513 return TranslatedValue::NewBool(this, value); 3514 } 3515 3516 case Translation::FLOAT_STACK_SLOT: { 3517 int slot_offset = 3518 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3519 float value = ReadFloatValue(fp + slot_offset); 3520 if (trace_file != nullptr) { 3521 PrintF(trace_file, "%e ; (float) [fp %c %d] ", value, 3522 slot_offset < 0 ? '-' : '+', std::abs(slot_offset)); 3523 } 3524 return TranslatedValue::NewFloat(this, value); 3525 } 3526 3527 case Translation::DOUBLE_STACK_SLOT: { 3528 int slot_offset = 3529 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); 3530 double value = ReadDoubleValue(fp + slot_offset); 3531 if (trace_file != nullptr) { 3532 PrintF(trace_file, "%e ; (double) [fp %c %d] ", value, 3533 slot_offset < 0 ? '-' : '+', std::abs(slot_offset)); 3534 } 3535 return TranslatedValue::NewDouble(this, value); 3536 } 3537 3538 case Translation::LITERAL: { 3539 int literal_index = iterator->Next(); 3540 Object* value = literal_array->get(literal_index); 3541 if (trace_file != nullptr) { 3542 PrintF(trace_file, "0x%08" V8PRIxPTR " ; (literal %d) ", 3543 reinterpret_cast<intptr_t>(value), literal_index); 3544 reinterpret_cast<Object*>(value)->ShortPrint(trace_file); 3545 } 3546 3547 return TranslatedValue::NewTagged(this, value); 3548 } 3549 } 3550 3551 FATAL("We should never get here - unexpected deopt info."); 3552 return TranslatedValue(nullptr, TranslatedValue::kInvalid); 3553 } 3554 3555 3556 TranslatedState::TranslatedState(JavaScriptFrame* frame) 3557 : isolate_(nullptr), 3558 stack_frame_pointer_(nullptr), 3559 has_adapted_arguments_(false) { 3560 int deopt_index = Safepoint::kNoDeoptimizationIndex; 3561 DeoptimizationInputData* data = 3562 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index); 3563 DCHECK(data != nullptr && deopt_index != Safepoint::kNoDeoptimizationIndex); 3564 TranslationIterator it(data->TranslationByteArray(), 3565 data->TranslationIndex(deopt_index)->value()); 3566 Init(frame->fp(), &it, data->LiteralArray(), nullptr /* registers */, 3567 nullptr /* trace file */); 3568 } 3569 3570 3571 TranslatedState::TranslatedState() 3572 : isolate_(nullptr), 3573 stack_frame_pointer_(nullptr), 3574 has_adapted_arguments_(false) {} 3575 3576 3577 void TranslatedState::Init(Address input_frame_pointer, 3578 TranslationIterator* iterator, 3579 FixedArray* literal_array, RegisterValues* registers, 3580 FILE* trace_file) { 3581 DCHECK(frames_.empty()); 3582 3583 isolate_ = literal_array->GetIsolate(); 3584 // Read out the 'header' translation. 3585 Translation::Opcode opcode = 3586 static_cast<Translation::Opcode>(iterator->Next()); 3587 CHECK(opcode == Translation::BEGIN); 3588 3589 int count = iterator->Next(); 3590 iterator->Next(); // Drop JS frames count. 3591 3592 frames_.reserve(count); 3593 3594 std::stack<int> nested_counts; 3595 3596 // Read the frames 3597 for (int i = 0; i < count; i++) { 3598 // Read the frame descriptor. 3599 frames_.push_back(CreateNextTranslatedFrame( 3600 iterator, literal_array, input_frame_pointer, trace_file)); 3601 TranslatedFrame& frame = frames_.back(); 3602 3603 // Read the values. 3604 int values_to_process = frame.GetValueCount(); 3605 while (values_to_process > 0 || !nested_counts.empty()) { 3606 if (trace_file != nullptr) { 3607 if (nested_counts.empty()) { 3608 // For top level values, print the value number. 3609 PrintF(trace_file, " %3i: ", 3610 frame.GetValueCount() - values_to_process); 3611 } else { 3612 // Take care of indenting for nested values. 3613 PrintF(trace_file, " "); 3614 for (size_t j = 0; j < nested_counts.size(); j++) { 3615 PrintF(trace_file, " "); 3616 } 3617 } 3618 } 3619 3620 TranslatedValue value = CreateNextTranslatedValue( 3621 i, static_cast<int>(frame.values_.size()), iterator, literal_array, 3622 input_frame_pointer, registers, trace_file); 3623 frame.Add(value); 3624 3625 if (trace_file != nullptr) { 3626 PrintF(trace_file, "\n"); 3627 } 3628 3629 // Update the value count and resolve the nesting. 3630 values_to_process--; 3631 int children_count = value.GetChildrenCount(); 3632 if (children_count > 0) { 3633 nested_counts.push(values_to_process); 3634 values_to_process = children_count; 3635 } else { 3636 while (values_to_process == 0 && !nested_counts.empty()) { 3637 values_to_process = nested_counts.top(); 3638 nested_counts.pop(); 3639 } 3640 } 3641 } 3642 } 3643 3644 CHECK(!iterator->HasNext() || 3645 static_cast<Translation::Opcode>(iterator->Next()) == 3646 Translation::BEGIN); 3647 } 3648 3649 3650 void TranslatedState::Prepare(bool has_adapted_arguments, 3651 Address stack_frame_pointer) { 3652 for (auto& frame : frames_) frame.Handlify(); 3653 3654 stack_frame_pointer_ = stack_frame_pointer; 3655 has_adapted_arguments_ = has_adapted_arguments; 3656 3657 UpdateFromPreviouslyMaterializedObjects(); 3658 } 3659 3660 3661 Handle<Object> TranslatedState::MaterializeAt(int frame_index, 3662 int* value_index) { 3663 TranslatedFrame* frame = &(frames_[frame_index]); 3664 CHECK(static_cast<size_t>(*value_index) < frame->values_.size()); 3665 3666 TranslatedValue* slot = &(frame->values_[*value_index]); 3667 (*value_index)++; 3668 3669 switch (slot->kind()) { 3670 case TranslatedValue::kTagged: 3671 case TranslatedValue::kInt32: 3672 case TranslatedValue::kUInt32: 3673 case TranslatedValue::kBoolBit: 3674 case TranslatedValue::kFloat: 3675 case TranslatedValue::kDouble: { 3676 slot->MaterializeSimple(); 3677 Handle<Object> value = slot->GetValue(); 3678 if (value->IsMutableHeapNumber()) { 3679 HeapNumber::cast(*value)->set_map(isolate()->heap()->heap_number_map()); 3680 } 3681 return value; 3682 } 3683 3684 case TranslatedValue::kArgumentsObject: { 3685 int length = slot->GetChildrenCount(); 3686 Handle<JSObject> arguments; 3687 if (GetAdaptedArguments(&arguments, frame_index)) { 3688 // Store the materialized object and consume the nested values. 3689 for (int i = 0; i < length; ++i) { 3690 MaterializeAt(frame_index, value_index); 3691 } 3692 } else { 3693 Handle<JSFunction> function = 3694 Handle<JSFunction>::cast(frame->front().GetValue()); 3695 arguments = isolate_->factory()->NewArgumentsObject(function, length); 3696 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); 3697 DCHECK_EQ(array->length(), length); 3698 arguments->set_elements(*array); 3699 for (int i = 0; i < length; ++i) { 3700 Handle<Object> value = MaterializeAt(frame_index, value_index); 3701 array->set(i, *value); 3702 } 3703 } 3704 slot->value_ = arguments; 3705 return arguments; 3706 } 3707 case TranslatedValue::kCapturedObject: { 3708 int length = slot->GetChildrenCount(); 3709 3710 // The map must be a tagged object. 3711 CHECK(frame->values_[*value_index].kind() == TranslatedValue::kTagged); 3712 3713 Handle<Object> result; 3714 if (slot->value_.ToHandle(&result)) { 3715 // This has been previously materialized, return the previous value. 3716 // We still need to skip all the nested objects. 3717 for (int i = 0; i < length; i++) { 3718 MaterializeAt(frame_index, value_index); 3719 } 3720 3721 return result; 3722 } 3723 3724 Handle<Object> map_object = MaterializeAt(frame_index, value_index); 3725 Handle<Map> map = 3726 Map::GeneralizeAllFieldRepresentations(Handle<Map>::cast(map_object)); 3727 switch (map->instance_type()) { 3728 case MUTABLE_HEAP_NUMBER_TYPE: 3729 case HEAP_NUMBER_TYPE: { 3730 // Reuse the HeapNumber value directly as it is already properly 3731 // tagged and skip materializing the HeapNumber explicitly. 3732 Handle<Object> object = MaterializeAt(frame_index, value_index); 3733 slot->value_ = object; 3734 // On 32-bit architectures, there is an extra slot there because 3735 // the escape analysis calculates the number of slots as 3736 // object-size/pointer-size. To account for this, we read out 3737 // any extra slots. 3738 for (int i = 0; i < length - 2; i++) { 3739 MaterializeAt(frame_index, value_index); 3740 } 3741 return object; 3742 } 3743 case JS_OBJECT_TYPE: 3744 case JS_ERROR_TYPE: 3745 case JS_ARGUMENTS_TYPE: { 3746 Handle<JSObject> object = 3747 isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED); 3748 slot->value_ = object; 3749 Handle<Object> properties = MaterializeAt(frame_index, value_index); 3750 Handle<Object> elements = MaterializeAt(frame_index, value_index); 3751 object->set_properties(FixedArray::cast(*properties)); 3752 object->set_elements(FixedArrayBase::cast(*elements)); 3753 for (int i = 0; i < length - 3; ++i) { 3754 Handle<Object> value = MaterializeAt(frame_index, value_index); 3755 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i); 3756 object->FastPropertyAtPut(index, *value); 3757 } 3758 return object; 3759 } 3760 case JS_ARRAY_TYPE: { 3761 Handle<JSArray> object = 3762 isolate_->factory()->NewJSArray(0, map->elements_kind()); 3763 slot->value_ = object; 3764 Handle<Object> properties = MaterializeAt(frame_index, value_index); 3765 Handle<Object> elements = MaterializeAt(frame_index, value_index); 3766 Handle<Object> length = MaterializeAt(frame_index, value_index); 3767 object->set_properties(FixedArray::cast(*properties)); 3768 object->set_elements(FixedArrayBase::cast(*elements)); 3769 object->set_length(*length); 3770 return object; 3771 } 3772 case JS_FUNCTION_TYPE: { 3773 Handle<JSFunction> object = 3774 isolate_->factory()->NewFunctionFromSharedFunctionInfo( 3775 handle(isolate_->object_function()->shared()), 3776 handle(isolate_->context())); 3777 slot->value_ = object; 3778 // We temporarily allocated a JSFunction for the {Object} function 3779 // within the current context, to break cycles in the object graph. 3780 // The correct function and context will be set below once available. 3781 Handle<Object> properties = MaterializeAt(frame_index, value_index); 3782 Handle<Object> elements = MaterializeAt(frame_index, value_index); 3783 Handle<Object> prototype = MaterializeAt(frame_index, value_index); 3784 Handle<Object> shared = MaterializeAt(frame_index, value_index); 3785 Handle<Object> context = MaterializeAt(frame_index, value_index); 3786 Handle<Object> literals = MaterializeAt(frame_index, value_index); 3787 Handle<Object> entry = MaterializeAt(frame_index, value_index); 3788 Handle<Object> next_link = MaterializeAt(frame_index, value_index); 3789 object->ReplaceCode(*isolate_->builtins()->CompileLazy()); 3790 object->set_map(*map); 3791 object->set_properties(FixedArray::cast(*properties)); 3792 object->set_elements(FixedArrayBase::cast(*elements)); 3793 object->set_prototype_or_initial_map(*prototype); 3794 object->set_shared(SharedFunctionInfo::cast(*shared)); 3795 object->set_context(Context::cast(*context)); 3796 object->set_literals(LiteralsArray::cast(*literals)); 3797 CHECK(entry->IsNumber()); // Entry to compile lazy stub. 3798 CHECK(next_link->IsUndefined(isolate_)); 3799 return object; 3800 } 3801 case FIXED_ARRAY_TYPE: { 3802 Handle<Object> lengthObject = MaterializeAt(frame_index, value_index); 3803 int32_t length = 0; 3804 CHECK(lengthObject->ToInt32(&length)); 3805 Handle<FixedArray> object = 3806 isolate_->factory()->NewFixedArray(length); 3807 // We need to set the map, because the fixed array we are 3808 // materializing could be a context or an arguments object, 3809 // in which case we must retain that information. 3810 object->set_map(*map); 3811 slot->value_ = object; 3812 for (int i = 0; i < length; ++i) { 3813 Handle<Object> value = MaterializeAt(frame_index, value_index); 3814 object->set(i, *value); 3815 } 3816 return object; 3817 } 3818 case FIXED_DOUBLE_ARRAY_TYPE: { 3819 DCHECK_EQ(*map, isolate_->heap()->fixed_double_array_map()); 3820 Handle<Object> lengthObject = MaterializeAt(frame_index, value_index); 3821 int32_t length = 0; 3822 CHECK(lengthObject->ToInt32(&length)); 3823 Handle<FixedArrayBase> object = 3824 isolate_->factory()->NewFixedDoubleArray(length); 3825 slot->value_ = object; 3826 if (length > 0) { 3827 Handle<FixedDoubleArray> double_array = 3828 Handle<FixedDoubleArray>::cast(object); 3829 for (int i = 0; i < length; ++i) { 3830 Handle<Object> value = MaterializeAt(frame_index, value_index); 3831 CHECK(value->IsNumber()); 3832 double_array->set(i, value->Number()); 3833 } 3834 } 3835 return object; 3836 } 3837 default: 3838 PrintF(stderr, "[couldn't handle instance type %d]\n", 3839 map->instance_type()); 3840 FATAL("unreachable"); 3841 return Handle<Object>::null(); 3842 } 3843 UNREACHABLE(); 3844 break; 3845 } 3846 3847 case TranslatedValue::kDuplicatedObject: { 3848 int object_index = slot->object_index(); 3849 TranslatedState::ObjectPosition pos = object_positions_[object_index]; 3850 3851 // Make sure the duplicate is refering to a previous object. 3852 CHECK(pos.frame_index_ < frame_index || 3853 (pos.frame_index_ == frame_index && 3854 pos.value_index_ < *value_index - 1)); 3855 3856 Handle<Object> object = 3857 frames_[pos.frame_index_].values_[pos.value_index_].GetValue(); 3858 3859 // The object should have a (non-sentinel) value. 3860 CHECK(!object.is_null() && 3861 !object.is_identical_to(isolate_->factory()->arguments_marker())); 3862 3863 slot->value_ = object; 3864 return object; 3865 } 3866 3867 case TranslatedValue::kInvalid: 3868 UNREACHABLE(); 3869 break; 3870 } 3871 3872 FATAL("We should never get here - unexpected deopt slot kind."); 3873 return Handle<Object>::null(); 3874 } 3875 3876 3877 Handle<Object> TranslatedState::MaterializeObjectAt(int object_index) { 3878 TranslatedState::ObjectPosition pos = object_positions_[object_index]; 3879 return MaterializeAt(pos.frame_index_, &(pos.value_index_)); 3880 } 3881 3882 3883 bool TranslatedState::GetAdaptedArguments(Handle<JSObject>* result, 3884 int frame_index) { 3885 if (frame_index == 0) { 3886 // Top level frame -> we need to go to the parent frame on the stack. 3887 if (!has_adapted_arguments_) return false; 3888 3889 // This is top level frame, so we need to go to the stack to get 3890 // this function's argument. (Note that this relies on not inlining 3891 // recursive functions!) 3892 Handle<JSFunction> function = 3893 Handle<JSFunction>::cast(frames_[frame_index].front().GetValue()); 3894 *result = Accessors::FunctionGetArguments(function); 3895 return true; 3896 } else { 3897 TranslatedFrame* previous_frame = &(frames_[frame_index]); 3898 if (previous_frame->kind() != TranslatedFrame::kArgumentsAdaptor) { 3899 return false; 3900 } 3901 // We get the adapted arguments from the parent translation. 3902 int length = previous_frame->height(); 3903 Handle<JSFunction> function = 3904 Handle<JSFunction>::cast(previous_frame->front().GetValue()); 3905 Handle<JSObject> arguments = 3906 isolate_->factory()->NewArgumentsObject(function, length); 3907 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); 3908 arguments->set_elements(*array); 3909 TranslatedFrame::iterator arg_iterator = previous_frame->begin(); 3910 arg_iterator++; // Skip function. 3911 for (int i = 0; i < length; ++i) { 3912 Handle<Object> value = arg_iterator->GetValue(); 3913 array->set(i, *value); 3914 arg_iterator++; 3915 } 3916 CHECK(arg_iterator == previous_frame->end()); 3917 *result = arguments; 3918 return true; 3919 } 3920 } 3921 3922 3923 TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex( 3924 int jsframe_index, int* args_count) { 3925 for (size_t i = 0; i < frames_.size(); i++) { 3926 if (frames_[i].kind() == TranslatedFrame::kFunction || 3927 frames_[i].kind() == TranslatedFrame::kInterpretedFunction) { 3928 if (jsframe_index > 0) { 3929 jsframe_index--; 3930 } else { 3931 // We have the JS function frame, now check if it has arguments adaptor. 3932 if (i > 0 && 3933 frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) { 3934 *args_count = frames_[i - 1].height(); 3935 return &(frames_[i - 1]); 3936 } 3937 *args_count = 3938 frames_[i].shared_info()->internal_formal_parameter_count() + 1; 3939 return &(frames_[i]); 3940 } 3941 } 3942 } 3943 return nullptr; 3944 } 3945 3946 3947 void TranslatedState::StoreMaterializedValuesAndDeopt() { 3948 MaterializedObjectStore* materialized_store = 3949 isolate_->materialized_object_store(); 3950 Handle<FixedArray> previously_materialized_objects = 3951 materialized_store->Get(stack_frame_pointer_); 3952 3953 Handle<Object> marker = isolate_->factory()->arguments_marker(); 3954 3955 int length = static_cast<int>(object_positions_.size()); 3956 bool new_store = false; 3957 if (previously_materialized_objects.is_null()) { 3958 previously_materialized_objects = 3959 isolate_->factory()->NewFixedArray(length); 3960 for (int i = 0; i < length; i++) { 3961 previously_materialized_objects->set(i, *marker); 3962 } 3963 new_store = true; 3964 } 3965 3966 CHECK_EQ(length, previously_materialized_objects->length()); 3967 3968 bool value_changed = false; 3969 for (int i = 0; i < length; i++) { 3970 TranslatedState::ObjectPosition pos = object_positions_[i]; 3971 TranslatedValue* value_info = 3972 &(frames_[pos.frame_index_].values_[pos.value_index_]); 3973 3974 CHECK(value_info->IsMaterializedObject()); 3975 3976 Handle<Object> value(value_info->GetRawValue(), isolate_); 3977 3978 if (!value.is_identical_to(marker)) { 3979 if (previously_materialized_objects->get(i) == *marker) { 3980 previously_materialized_objects->set(i, *value); 3981 value_changed = true; 3982 } else { 3983 CHECK(previously_materialized_objects->get(i) == *value); 3984 } 3985 } 3986 } 3987 if (new_store && value_changed) { 3988 materialized_store->Set(stack_frame_pointer_, 3989 previously_materialized_objects); 3990 CHECK(frames_[0].kind() == TranslatedFrame::kFunction || 3991 frames_[0].kind() == TranslatedFrame::kInterpretedFunction || 3992 frames_[0].kind() == TranslatedFrame::kTailCallerFunction); 3993 Object* const function = frames_[0].front().GetRawValue(); 3994 Deoptimizer::DeoptimizeFunction(JSFunction::cast(function)); 3995 } 3996 } 3997 3998 3999 void TranslatedState::UpdateFromPreviouslyMaterializedObjects() { 4000 MaterializedObjectStore* materialized_store = 4001 isolate_->materialized_object_store(); 4002 Handle<FixedArray> previously_materialized_objects = 4003 materialized_store->Get(stack_frame_pointer_); 4004 4005 // If we have no previously materialized objects, there is nothing to do. 4006 if (previously_materialized_objects.is_null()) return; 4007 4008 Handle<Object> marker = isolate_->factory()->arguments_marker(); 4009 4010 int length = static_cast<int>(object_positions_.size()); 4011 CHECK_EQ(length, previously_materialized_objects->length()); 4012 4013 for (int i = 0; i < length; i++) { 4014 // For a previously materialized objects, inject their value into the 4015 // translated values. 4016 if (previously_materialized_objects->get(i) != *marker) { 4017 TranslatedState::ObjectPosition pos = object_positions_[i]; 4018 TranslatedValue* value_info = 4019 &(frames_[pos.frame_index_].values_[pos.value_index_]); 4020 CHECK(value_info->IsMaterializedObject()); 4021 4022 value_info->value_ = 4023 Handle<Object>(previously_materialized_objects->get(i), isolate_); 4024 } 4025 } 4026 } 4027 4028 } // namespace internal 4029 } // namespace v8 4030