1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "codegen.h" 31 #include "deoptimizer.h" 32 #include "disasm.h" 33 #include "full-codegen.h" 34 #include "global-handles.h" 35 #include "macro-assembler.h" 36 #include "prettyprinter.h" 37 38 39 namespace v8 { 40 namespace internal { 41 42 DeoptimizerData::DeoptimizerData() { 43 eager_deoptimization_entry_code_ = NULL; 44 lazy_deoptimization_entry_code_ = NULL; 45 current_ = NULL; 46 deoptimizing_code_list_ = NULL; 47 #ifdef ENABLE_DEBUGGER_SUPPORT 48 deoptimized_frame_info_ = NULL; 49 #endif 50 } 51 52 53 DeoptimizerData::~DeoptimizerData() { 54 if (eager_deoptimization_entry_code_ != NULL) { 55 Isolate::Current()->memory_allocator()->Free( 56 eager_deoptimization_entry_code_); 57 eager_deoptimization_entry_code_ = NULL; 58 } 59 if (lazy_deoptimization_entry_code_ != NULL) { 60 Isolate::Current()->memory_allocator()->Free( 61 lazy_deoptimization_entry_code_); 62 lazy_deoptimization_entry_code_ = NULL; 63 } 64 } 65 66 67 #ifdef ENABLE_DEBUGGER_SUPPORT 68 void DeoptimizerData::Iterate(ObjectVisitor* v) { 69 if (deoptimized_frame_info_ != NULL) { 70 deoptimized_frame_info_->Iterate(v); 71 } 72 } 73 #endif 74 75 76 // We rely on this function not causing a GC. It is called from generated code 77 // without having a real stack frame in place. 78 Deoptimizer* Deoptimizer::New(JSFunction* function, 79 BailoutType type, 80 unsigned bailout_id, 81 Address from, 82 int fp_to_sp_delta, 83 Isolate* isolate) { 84 ASSERT(isolate == Isolate::Current()); 85 Deoptimizer* deoptimizer = new Deoptimizer(isolate, 86 function, 87 type, 88 bailout_id, 89 from, 90 fp_to_sp_delta, 91 NULL); 92 ASSERT(isolate->deoptimizer_data()->current_ == NULL); 93 isolate->deoptimizer_data()->current_ = deoptimizer; 94 return deoptimizer; 95 } 96 97 98 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { 99 ASSERT(isolate == Isolate::Current()); 100 Deoptimizer* result = isolate->deoptimizer_data()->current_; 101 ASSERT(result != NULL); 102 result->DeleteFrameDescriptions(); 103 isolate->deoptimizer_data()->current_ = NULL; 104 return result; 105 } 106 107 108 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) { 109 if (jsframe_index == 0) return 0; 110 111 int frame_index = 0; 112 while (jsframe_index >= 0) { 113 FrameDescription* frame = output_[frame_index]; 114 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) { 115 jsframe_index--; 116 } 117 frame_index++; 118 } 119 120 return frame_index - 1; 121 } 122 123 124 #ifdef ENABLE_DEBUGGER_SUPPORT 125 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame( 126 JavaScriptFrame* frame, 127 int jsframe_index, 128 Isolate* isolate) { 129 ASSERT(isolate == Isolate::Current()); 130 ASSERT(frame->is_optimized()); 131 ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL); 132 133 // Get the function and code from the frame. 134 JSFunction* function = JSFunction::cast(frame->function()); 135 Code* code = frame->LookupCode(); 136 137 // Locate the deoptimization point in the code. As we are at a call the 138 // return address must be at a place in the code with deoptimization support. 139 SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc()); 140 int deoptimization_index = safepoint_entry.deoptimization_index(); 141 ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex); 142 143 // Always use the actual stack slots when calculating the fp to sp 144 // delta adding two for the function and context. 145 unsigned stack_slots = code->stack_slots(); 146 unsigned fp_to_sp_delta = ((stack_slots + 2) * kPointerSize); 147 148 Deoptimizer* deoptimizer = new Deoptimizer(isolate, 149 function, 150 Deoptimizer::DEBUGGER, 151 deoptimization_index, 152 frame->pc(), 153 fp_to_sp_delta, 154 code); 155 Address tos = frame->fp() - fp_to_sp_delta; 156 deoptimizer->FillInputFrame(tos, frame); 157 158 // Calculate the output frames. 159 Deoptimizer::ComputeOutputFrames(deoptimizer); 160 161 // Create the GC safe output frame information and register it for GC 162 // handling. 163 ASSERT_LT(jsframe_index, deoptimizer->jsframe_count()); 164 165 // Convert JS frame index into frame index. 166 int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index); 167 168 bool has_arguments_adaptor = 169 frame_index > 0 && 170 deoptimizer->output_[frame_index - 1]->GetFrameType() == 171 StackFrame::ARGUMENTS_ADAPTOR; 172 173 int construct_offset = has_arguments_adaptor ? 2 : 1; 174 bool has_construct_stub = 175 frame_index >= construct_offset && 176 deoptimizer->output_[frame_index - construct_offset]->GetFrameType() == 177 StackFrame::CONSTRUCT; 178 179 DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer, 180 frame_index, 181 has_arguments_adaptor, 182 has_construct_stub); 183 isolate->deoptimizer_data()->deoptimized_frame_info_ = info; 184 185 // Get the "simulated" top and size for the requested frame. 186 FrameDescription* parameters_frame = 187 deoptimizer->output_[ 188 has_arguments_adaptor ? (frame_index - 1) : frame_index]; 189 190 uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize; 191 Address parameters_top = reinterpret_cast<Address>( 192 parameters_frame->GetTop() + (parameters_frame->GetFrameSize() - 193 parameters_size)); 194 195 uint32_t expressions_size = info->expression_count() * kPointerSize; 196 Address expressions_top = reinterpret_cast<Address>( 197 deoptimizer->output_[frame_index]->GetTop()); 198 199 // Done with the GC-unsafe frame descriptions. This re-enables allocation. 200 deoptimizer->DeleteFrameDescriptions(); 201 202 // Allocate a heap number for the doubles belonging to this frame. 203 deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame( 204 parameters_top, parameters_size, expressions_top, expressions_size, info); 205 206 // Finished using the deoptimizer instance. 207 delete deoptimizer; 208 209 return info; 210 } 211 212 213 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, 214 Isolate* isolate) { 215 ASSERT(isolate == Isolate::Current()); 216 ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info); 217 delete info; 218 isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL; 219 } 220 #endif 221 222 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, 223 int count, 224 BailoutType type) { 225 TableEntryGenerator generator(masm, type, count); 226 generator.Generate(); 227 } 228 229 230 class DeoptimizingVisitor : public OptimizedFunctionVisitor { 231 public: 232 virtual void EnterContext(Context* context) { 233 if (FLAG_trace_deopt) { 234 PrintF("[deoptimize context: %" V8PRIxPTR "]\n", 235 reinterpret_cast<intptr_t>(context)); 236 } 237 } 238 239 virtual void VisitFunction(JSFunction* function) { 240 Deoptimizer::DeoptimizeFunction(function); 241 } 242 243 virtual void LeaveContext(Context* context) { 244 context->ClearOptimizedFunctions(); 245 } 246 }; 247 248 249 void Deoptimizer::DeoptimizeAll() { 250 AssertNoAllocation no_allocation; 251 252 if (FLAG_trace_deopt) { 253 PrintF("[deoptimize all contexts]\n"); 254 } 255 256 DeoptimizingVisitor visitor; 257 VisitAllOptimizedFunctions(&visitor); 258 } 259 260 261 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { 262 AssertNoAllocation no_allocation; 263 264 DeoptimizingVisitor visitor; 265 VisitAllOptimizedFunctionsForGlobalObject(object, &visitor); 266 } 267 268 269 void Deoptimizer::VisitAllOptimizedFunctionsForContext( 270 Context* context, OptimizedFunctionVisitor* visitor) { 271 AssertNoAllocation no_allocation; 272 273 ASSERT(context->IsGlobalContext()); 274 275 visitor->EnterContext(context); 276 // Run through the list of optimized functions and deoptimize them. 277 Object* element = context->OptimizedFunctionsListHead(); 278 while (!element->IsUndefined()) { 279 JSFunction* element_function = JSFunction::cast(element); 280 // Get the next link before deoptimizing as deoptimizing will clear the 281 // next link. 282 element = element_function->next_function_link(); 283 visitor->VisitFunction(element_function); 284 } 285 visitor->LeaveContext(context); 286 } 287 288 289 void Deoptimizer::VisitAllOptimizedFunctionsForGlobalObject( 290 JSObject* object, OptimizedFunctionVisitor* visitor) { 291 AssertNoAllocation no_allocation; 292 293 if (object->IsJSGlobalProxy()) { 294 Object* proto = object->GetPrototype(); 295 ASSERT(proto->IsJSGlobalObject()); 296 VisitAllOptimizedFunctionsForContext( 297 GlobalObject::cast(proto)->global_context(), visitor); 298 } else if (object->IsGlobalObject()) { 299 VisitAllOptimizedFunctionsForContext( 300 GlobalObject::cast(object)->global_context(), visitor); 301 } 302 } 303 304 305 void Deoptimizer::VisitAllOptimizedFunctions( 306 OptimizedFunctionVisitor* visitor) { 307 AssertNoAllocation no_allocation; 308 309 // Run through the list of all global contexts and deoptimize. 310 Object* context = Isolate::Current()->heap()->global_contexts_list(); 311 while (!context->IsUndefined()) { 312 // GC can happen when the context is not fully initialized, 313 // so the global field of the context can be undefined. 314 Object* global = Context::cast(context)->get(Context::GLOBAL_INDEX); 315 if (!global->IsUndefined()) { 316 VisitAllOptimizedFunctionsForGlobalObject(JSObject::cast(global), 317 visitor); 318 } 319 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 320 } 321 } 322 323 324 void Deoptimizer::HandleWeakDeoptimizedCode( 325 v8::Persistent<v8::Value> obj, void* data) { 326 DeoptimizingCodeListNode* node = 327 reinterpret_cast<DeoptimizingCodeListNode*>(data); 328 RemoveDeoptimizingCode(*node->code()); 329 #ifdef DEBUG 330 node = Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; 331 while (node != NULL) { 332 ASSERT(node != reinterpret_cast<DeoptimizingCodeListNode*>(data)); 333 node = node->next(); 334 } 335 #endif 336 } 337 338 339 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { 340 deoptimizer->DoComputeOutputFrames(); 341 } 342 343 344 Deoptimizer::Deoptimizer(Isolate* isolate, 345 JSFunction* function, 346 BailoutType type, 347 unsigned bailout_id, 348 Address from, 349 int fp_to_sp_delta, 350 Code* optimized_code) 351 : isolate_(isolate), 352 function_(function), 353 bailout_id_(bailout_id), 354 bailout_type_(type), 355 from_(from), 356 fp_to_sp_delta_(fp_to_sp_delta), 357 input_(NULL), 358 output_count_(0), 359 jsframe_count_(0), 360 output_(NULL), 361 deferred_heap_numbers_(0) { 362 if (FLAG_trace_deopt && type != OSR) { 363 if (type == DEBUGGER) { 364 PrintF("**** DEOPT FOR DEBUGGER: "); 365 } else { 366 PrintF("**** DEOPT: "); 367 } 368 function->PrintName(); 369 PrintF(" at bailout #%u, address 0x%" V8PRIxPTR ", frame size %d\n", 370 bailout_id, 371 reinterpret_cast<intptr_t>(from), 372 fp_to_sp_delta - (2 * kPointerSize)); 373 } else if (FLAG_trace_osr && type == OSR) { 374 PrintF("**** OSR: "); 375 function->PrintName(); 376 PrintF(" at ast id #%u, address 0x%" V8PRIxPTR ", frame size %d\n", 377 bailout_id, 378 reinterpret_cast<intptr_t>(from), 379 fp_to_sp_delta - (2 * kPointerSize)); 380 } 381 // Find the optimized code. 382 if (type == EAGER) { 383 ASSERT(from == NULL); 384 optimized_code_ = function_->code(); 385 if (FLAG_trace_deopt && FLAG_code_comments) { 386 // Print instruction associated with this bailout. 387 const char* last_comment = NULL; 388 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT) 389 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 390 for (RelocIterator it(optimized_code_, mask); !it.done(); it.next()) { 391 RelocInfo* info = it.rinfo(); 392 if (info->rmode() == RelocInfo::COMMENT) { 393 last_comment = reinterpret_cast<const char*>(info->data()); 394 } 395 if (info->rmode() == RelocInfo::RUNTIME_ENTRY) { 396 unsigned id = Deoptimizer::GetDeoptimizationId( 397 info->target_address(), Deoptimizer::EAGER); 398 if (id == bailout_id && last_comment != NULL) { 399 PrintF(" %s\n", last_comment); 400 break; 401 } 402 } 403 } 404 } 405 } else if (type == LAZY) { 406 optimized_code_ = FindDeoptimizingCodeFromAddress(from); 407 ASSERT(optimized_code_ != NULL); 408 } else if (type == OSR) { 409 // The function has already been optimized and we're transitioning 410 // from the unoptimized shared version to the optimized one in the 411 // function. The return address (from) points to unoptimized code. 412 optimized_code_ = function_->code(); 413 ASSERT(optimized_code_->kind() == Code::OPTIMIZED_FUNCTION); 414 ASSERT(!optimized_code_->contains(from)); 415 } else if (type == DEBUGGER) { 416 optimized_code_ = optimized_code; 417 ASSERT(optimized_code_->contains(from)); 418 } 419 ASSERT(HEAP->allow_allocation(false)); 420 unsigned size = ComputeInputFrameSize(); 421 input_ = new(size) FrameDescription(size, function); 422 input_->SetFrameType(StackFrame::JAVA_SCRIPT); 423 } 424 425 426 Deoptimizer::~Deoptimizer() { 427 ASSERT(input_ == NULL && output_ == NULL); 428 } 429 430 431 void Deoptimizer::DeleteFrameDescriptions() { 432 delete input_; 433 for (int i = 0; i < output_count_; ++i) { 434 if (output_[i] != input_) delete output_[i]; 435 } 436 delete[] output_; 437 input_ = NULL; 438 output_ = NULL; 439 ASSERT(!HEAP->allow_allocation(true)); 440 } 441 442 443 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { 444 ASSERT(id >= 0); 445 if (id >= kNumberOfEntries) return NULL; 446 MemoryChunk* base = NULL; 447 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); 448 if (type == EAGER) { 449 if (data->eager_deoptimization_entry_code_ == NULL) { 450 data->eager_deoptimization_entry_code_ = CreateCode(type); 451 } 452 base = data->eager_deoptimization_entry_code_; 453 } else { 454 if (data->lazy_deoptimization_entry_code_ == NULL) { 455 data->lazy_deoptimization_entry_code_ = CreateCode(type); 456 } 457 base = data->lazy_deoptimization_entry_code_; 458 } 459 return 460 static_cast<Address>(base->area_start()) + (id * table_entry_size_); 461 } 462 463 464 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { 465 MemoryChunk* base = NULL; 466 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); 467 if (type == EAGER) { 468 base = data->eager_deoptimization_entry_code_; 469 } else { 470 base = data->lazy_deoptimization_entry_code_; 471 } 472 if (base == NULL || 473 addr < base->area_start() || 474 addr >= base->area_start() + 475 (kNumberOfEntries * table_entry_size_)) { 476 return kNotDeoptimizationEntry; 477 } 478 ASSERT_EQ(0, 479 static_cast<int>(addr - base->area_start()) % table_entry_size_); 480 return static_cast<int>(addr - base->area_start()) / table_entry_size_; 481 } 482 483 484 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, 485 unsigned id, 486 SharedFunctionInfo* shared) { 487 // TODO(kasperl): For now, we do a simple linear search for the PC 488 // offset associated with the given node id. This should probably be 489 // changed to a binary search. 490 int length = data->DeoptPoints(); 491 Smi* smi_id = Smi::FromInt(id); 492 for (int i = 0; i < length; i++) { 493 if (data->AstId(i) == smi_id) { 494 return data->PcAndState(i)->value(); 495 } 496 } 497 PrintF("[couldn't find pc offset for node=%u]\n", id); 498 PrintF("[method: %s]\n", *shared->DebugName()->ToCString()); 499 // Print the source code if available. 500 HeapStringAllocator string_allocator; 501 StringStream stream(&string_allocator); 502 shared->SourceCodePrint(&stream, -1); 503 PrintF("[source:\n%s\n]", *stream.ToCString()); 504 505 UNREACHABLE(); 506 return -1; 507 } 508 509 510 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { 511 int length = 0; 512 DeoptimizingCodeListNode* node = 513 isolate->deoptimizer_data()->deoptimizing_code_list_; 514 while (node != NULL) { 515 length++; 516 node = node->next(); 517 } 518 return length; 519 } 520 521 522 // We rely on this function not causing a GC. It is called from generated code 523 // without having a real stack frame in place. 524 void Deoptimizer::DoComputeOutputFrames() { 525 if (bailout_type_ == OSR) { 526 DoComputeOsrOutputFrame(); 527 return; 528 } 529 530 // Print some helpful diagnostic information. 531 int64_t start = OS::Ticks(); 532 if (FLAG_trace_deopt) { 533 PrintF("[deoptimizing%s: begin 0x%08" V8PRIxPTR " ", 534 (bailout_type_ == LAZY ? " (lazy)" : ""), 535 reinterpret_cast<intptr_t>(function_)); 536 function_->PrintName(); 537 PrintF(" @%d]\n", bailout_id_); 538 } 539 540 // Determine basic deoptimization information. The optimized frame is 541 // described by the input data. 542 DeoptimizationInputData* input_data = 543 DeoptimizationInputData::cast(optimized_code_->deoptimization_data()); 544 unsigned node_id = input_data->AstId(bailout_id_)->value(); 545 ByteArray* translations = input_data->TranslationByteArray(); 546 unsigned translation_index = 547 input_data->TranslationIndex(bailout_id_)->value(); 548 549 // Do the input frame to output frame(s) translation. 550 TranslationIterator iterator(translations, translation_index); 551 Translation::Opcode opcode = 552 static_cast<Translation::Opcode>(iterator.Next()); 553 ASSERT(Translation::BEGIN == opcode); 554 USE(opcode); 555 // Read the number of output frames and allocate an array for their 556 // descriptions. 557 int count = iterator.Next(); 558 iterator.Next(); // Drop JS frames count. 559 ASSERT(output_ == NULL); 560 output_ = new FrameDescription*[count]; 561 for (int i = 0; i < count; ++i) { 562 output_[i] = NULL; 563 } 564 output_count_ = count; 565 566 // Translate each output frame. 567 for (int i = 0; i < count; ++i) { 568 // Read the ast node id, function, and frame height for this output frame. 569 Translation::Opcode opcode = 570 static_cast<Translation::Opcode>(iterator.Next()); 571 switch (opcode) { 572 case Translation::JS_FRAME: 573 DoComputeJSFrame(&iterator, i); 574 jsframe_count_++; 575 break; 576 case Translation::ARGUMENTS_ADAPTOR_FRAME: 577 DoComputeArgumentsAdaptorFrame(&iterator, i); 578 break; 579 case Translation::CONSTRUCT_STUB_FRAME: 580 DoComputeConstructStubFrame(&iterator, i); 581 break; 582 default: 583 UNREACHABLE(); 584 break; 585 } 586 } 587 588 // Print some helpful diagnostic information. 589 if (FLAG_trace_deopt) { 590 double ms = static_cast<double>(OS::Ticks() - start) / 1000; 591 int index = output_count_ - 1; // Index of the topmost frame. 592 JSFunction* function = output_[index]->GetFunction(); 593 PrintF("[deoptimizing: end 0x%08" V8PRIxPTR " ", 594 reinterpret_cast<intptr_t>(function)); 595 function->PrintName(); 596 PrintF(" => node=%u, pc=0x%08" V8PRIxPTR ", state=%s, took %0.3f ms]\n", 597 node_id, 598 output_[index]->GetPc(), 599 FullCodeGenerator::State2String( 600 static_cast<FullCodeGenerator::State>( 601 output_[index]->GetState()->value())), 602 ms); 603 } 604 } 605 606 607 void Deoptimizer::MaterializeHeapNumbers() { 608 ASSERT_NE(DEBUGGER, bailout_type_); 609 for (int i = 0; i < deferred_heap_numbers_.length(); i++) { 610 HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i]; 611 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 612 if (FLAG_trace_deopt) { 613 PrintF("Materializing a new heap number %p [%e] in slot %p\n", 614 reinterpret_cast<void*>(*num), 615 d.value(), 616 d.slot_address()); 617 } 618 619 Memory::Object_at(d.slot_address()) = *num; 620 } 621 } 622 623 624 #ifdef ENABLE_DEBUGGER_SUPPORT 625 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame( 626 Address parameters_top, 627 uint32_t parameters_size, 628 Address expressions_top, 629 uint32_t expressions_size, 630 DeoptimizedFrameInfo* info) { 631 ASSERT_EQ(DEBUGGER, bailout_type_); 632 Address parameters_bottom = parameters_top + parameters_size; 633 Address expressions_bottom = expressions_top + expressions_size; 634 for (int i = 0; i < deferred_heap_numbers_.length(); i++) { 635 HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i]; 636 637 // Check of the heap number to materialize actually belong to the frame 638 // being extracted. 639 Address slot = d.slot_address(); 640 if (parameters_top <= slot && slot < parameters_bottom) { 641 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 642 643 int index = (info->parameters_count() - 1) - 644 static_cast<int>(slot - parameters_top) / kPointerSize; 645 646 if (FLAG_trace_deopt) { 647 PrintF("Materializing a new heap number %p [%e] in slot %p" 648 "for parameter slot #%d\n", 649 reinterpret_cast<void*>(*num), 650 d.value(), 651 d.slot_address(), 652 index); 653 } 654 655 info->SetParameter(index, *num); 656 } else if (expressions_top <= slot && slot < expressions_bottom) { 657 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); 658 659 int index = info->expression_count() - 1 - 660 static_cast<int>(slot - expressions_top) / kPointerSize; 661 662 if (FLAG_trace_deopt) { 663 PrintF("Materializing a new heap number %p [%e] in slot %p" 664 "for expression slot #%d\n", 665 reinterpret_cast<void*>(*num), 666 d.value(), 667 d.slot_address(), 668 index); 669 } 670 671 info->SetExpression(index, *num); 672 } 673 } 674 } 675 #endif 676 677 678 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator, 679 int frame_index, 680 unsigned output_offset) { 681 disasm::NameConverter converter; 682 // A GC-safe temporary placeholder that we can put in the output frame. 683 const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0)); 684 685 // Ignore commands marked as duplicate and act on the first non-duplicate. 686 Translation::Opcode opcode = 687 static_cast<Translation::Opcode>(iterator->Next()); 688 while (opcode == Translation::DUPLICATE) { 689 opcode = static_cast<Translation::Opcode>(iterator->Next()); 690 iterator->Skip(Translation::NumberOfOperandsFor(opcode)); 691 opcode = static_cast<Translation::Opcode>(iterator->Next()); 692 } 693 694 switch (opcode) { 695 case Translation::BEGIN: 696 case Translation::JS_FRAME: 697 case Translation::ARGUMENTS_ADAPTOR_FRAME: 698 case Translation::CONSTRUCT_STUB_FRAME: 699 case Translation::DUPLICATE: 700 UNREACHABLE(); 701 return; 702 703 case Translation::REGISTER: { 704 int input_reg = iterator->Next(); 705 intptr_t input_value = input_->GetRegister(input_reg); 706 if (FLAG_trace_deopt) { 707 PrintF( 708 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ", 709 output_[frame_index]->GetTop() + output_offset, 710 output_offset, 711 input_value, 712 converter.NameOfCPURegister(input_reg)); 713 reinterpret_cast<Object*>(input_value)->ShortPrint(); 714 PrintF("\n"); 715 } 716 output_[frame_index]->SetFrameSlot(output_offset, input_value); 717 return; 718 } 719 720 case Translation::INT32_REGISTER: { 721 int input_reg = iterator->Next(); 722 intptr_t value = input_->GetRegister(input_reg); 723 bool is_smi = Smi::IsValid(value); 724 if (FLAG_trace_deopt) { 725 PrintF( 726 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n", 727 output_[frame_index]->GetTop() + output_offset, 728 output_offset, 729 value, 730 converter.NameOfCPURegister(input_reg), 731 is_smi ? "smi" : "heap number"); 732 } 733 if (is_smi) { 734 intptr_t tagged_value = 735 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 736 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 737 } else { 738 // We save the untagged value on the side and store a GC-safe 739 // temporary placeholder in the frame. 740 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 741 static_cast<double>(static_cast<int32_t>(value))); 742 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 743 } 744 return; 745 } 746 747 case Translation::DOUBLE_REGISTER: { 748 int input_reg = iterator->Next(); 749 double value = input_->GetDoubleRegister(input_reg); 750 if (FLAG_trace_deopt) { 751 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n", 752 output_[frame_index]->GetTop() + output_offset, 753 output_offset, 754 value, 755 DoubleRegister::AllocationIndexToString(input_reg)); 756 } 757 // We save the untagged value on the side and store a GC-safe 758 // temporary placeholder in the frame. 759 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value); 760 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 761 return; 762 } 763 764 case Translation::STACK_SLOT: { 765 int input_slot_index = iterator->Next(); 766 unsigned input_offset = 767 input_->GetOffsetFromSlotIndex(input_slot_index); 768 intptr_t input_value = input_->GetFrameSlot(input_offset); 769 if (FLAG_trace_deopt) { 770 PrintF(" 0x%08" V8PRIxPTR ": ", 771 output_[frame_index]->GetTop() + output_offset); 772 PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d] ", 773 output_offset, 774 input_value, 775 input_offset); 776 reinterpret_cast<Object*>(input_value)->ShortPrint(); 777 PrintF("\n"); 778 } 779 output_[frame_index]->SetFrameSlot(output_offset, input_value); 780 return; 781 } 782 783 case Translation::INT32_STACK_SLOT: { 784 int input_slot_index = iterator->Next(); 785 unsigned input_offset = 786 input_->GetOffsetFromSlotIndex(input_slot_index); 787 intptr_t value = input_->GetFrameSlot(input_offset); 788 bool is_smi = Smi::IsValid(value); 789 if (FLAG_trace_deopt) { 790 PrintF(" 0x%08" V8PRIxPTR ": ", 791 output_[frame_index]->GetTop() + output_offset); 792 PrintF("[top + %d] <- %" V8PRIdPTR " ; [esp + %d] (%s)\n", 793 output_offset, 794 value, 795 input_offset, 796 is_smi ? "smi" : "heap number"); 797 } 798 if (is_smi) { 799 intptr_t tagged_value = 800 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value))); 801 output_[frame_index]->SetFrameSlot(output_offset, tagged_value); 802 } else { 803 // We save the untagged value on the side and store a GC-safe 804 // temporary placeholder in the frame. 805 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, 806 static_cast<double>(static_cast<int32_t>(value))); 807 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 808 } 809 return; 810 } 811 812 case Translation::DOUBLE_STACK_SLOT: { 813 int input_slot_index = iterator->Next(); 814 unsigned input_offset = 815 input_->GetOffsetFromSlotIndex(input_slot_index); 816 double value = input_->GetDoubleFrameSlot(input_offset); 817 if (FLAG_trace_deopt) { 818 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [esp + %d]\n", 819 output_[frame_index]->GetTop() + output_offset, 820 output_offset, 821 value, 822 input_offset); 823 } 824 // We save the untagged value on the side and store a GC-safe 825 // temporary placeholder in the frame. 826 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value); 827 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder); 828 return; 829 } 830 831 case Translation::LITERAL: { 832 Object* literal = ComputeLiteral(iterator->Next()); 833 if (FLAG_trace_deopt) { 834 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ", 835 output_[frame_index]->GetTop() + output_offset, 836 output_offset); 837 literal->ShortPrint(); 838 PrintF(" ; literal\n"); 839 } 840 intptr_t value = reinterpret_cast<intptr_t>(literal); 841 output_[frame_index]->SetFrameSlot(output_offset, value); 842 return; 843 } 844 845 case Translation::ARGUMENTS_OBJECT: { 846 // Use the arguments marker value as a sentinel and fill in the arguments 847 // object after the deoptimized frame is built. 848 if (FLAG_trace_deopt) { 849 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ", 850 output_[frame_index]->GetTop() + output_offset, 851 output_offset); 852 isolate_->heap()->arguments_marker()->ShortPrint(); 853 PrintF(" ; arguments object\n"); 854 } 855 intptr_t value = reinterpret_cast<intptr_t>( 856 isolate_->heap()->arguments_marker()); 857 output_[frame_index]->SetFrameSlot(output_offset, value); 858 return; 859 } 860 } 861 } 862 863 864 bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, 865 int* input_offset) { 866 disasm::NameConverter converter; 867 FrameDescription* output = output_[0]; 868 869 // The input values are all part of the unoptimized frame so they 870 // are all tagged pointers. 871 uintptr_t input_value = input_->GetFrameSlot(*input_offset); 872 Object* input_object = reinterpret_cast<Object*>(input_value); 873 874 Translation::Opcode opcode = 875 static_cast<Translation::Opcode>(iterator->Next()); 876 bool duplicate = (opcode == Translation::DUPLICATE); 877 if (duplicate) { 878 opcode = static_cast<Translation::Opcode>(iterator->Next()); 879 } 880 881 switch (opcode) { 882 case Translation::BEGIN: 883 case Translation::JS_FRAME: 884 case Translation::ARGUMENTS_ADAPTOR_FRAME: 885 case Translation::CONSTRUCT_STUB_FRAME: 886 case Translation::DUPLICATE: 887 UNREACHABLE(); // Malformed input. 888 return false; 889 890 case Translation::REGISTER: { 891 int output_reg = iterator->Next(); 892 if (FLAG_trace_osr) { 893 PrintF(" %s <- 0x%08" V8PRIxPTR " ; [sp + %d]\n", 894 converter.NameOfCPURegister(output_reg), 895 input_value, 896 *input_offset); 897 } 898 output->SetRegister(output_reg, input_value); 899 break; 900 } 901 902 case Translation::INT32_REGISTER: { 903 // Abort OSR if we don't have a number. 904 if (!input_object->IsNumber()) return false; 905 906 int output_reg = iterator->Next(); 907 int int32_value = input_object->IsSmi() 908 ? Smi::cast(input_object)->value() 909 : FastD2I(input_object->Number()); 910 // Abort the translation if the conversion lost information. 911 if (!input_object->IsSmi() && 912 FastI2D(int32_value) != input_object->Number()) { 913 if (FLAG_trace_osr) { 914 PrintF("**** %g could not be converted to int32 ****\n", 915 input_object->Number()); 916 } 917 return false; 918 } 919 if (FLAG_trace_osr) { 920 PrintF(" %s <- %d (int32) ; [sp + %d]\n", 921 converter.NameOfCPURegister(output_reg), 922 int32_value, 923 *input_offset); 924 } 925 output->SetRegister(output_reg, int32_value); 926 break; 927 } 928 929 case Translation::DOUBLE_REGISTER: { 930 // Abort OSR if we don't have a number. 931 if (!input_object->IsNumber()) return false; 932 933 int output_reg = iterator->Next(); 934 double double_value = input_object->Number(); 935 if (FLAG_trace_osr) { 936 PrintF(" %s <- %g (double) ; [sp + %d]\n", 937 DoubleRegister::AllocationIndexToString(output_reg), 938 double_value, 939 *input_offset); 940 } 941 output->SetDoubleRegister(output_reg, double_value); 942 break; 943 } 944 945 case Translation::STACK_SLOT: { 946 int output_index = iterator->Next(); 947 unsigned output_offset = 948 output->GetOffsetFromSlotIndex(output_index); 949 if (FLAG_trace_osr) { 950 PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ", 951 output_offset, 952 input_value, 953 *input_offset); 954 reinterpret_cast<Object*>(input_value)->ShortPrint(); 955 PrintF("\n"); 956 } 957 output->SetFrameSlot(output_offset, input_value); 958 break; 959 } 960 961 case Translation::INT32_STACK_SLOT: { 962 // Abort OSR if we don't have a number. 963 if (!input_object->IsNumber()) return false; 964 965 int output_index = iterator->Next(); 966 unsigned output_offset = 967 output->GetOffsetFromSlotIndex(output_index); 968 int int32_value = input_object->IsSmi() 969 ? Smi::cast(input_object)->value() 970 : DoubleToInt32(input_object->Number()); 971 // Abort the translation if the conversion lost information. 972 if (!input_object->IsSmi() && 973 FastI2D(int32_value) != input_object->Number()) { 974 if (FLAG_trace_osr) { 975 PrintF("**** %g could not be converted to int32 ****\n", 976 input_object->Number()); 977 } 978 return false; 979 } 980 if (FLAG_trace_osr) { 981 PrintF(" [sp + %d] <- %d (int32) ; [sp + %d]\n", 982 output_offset, 983 int32_value, 984 *input_offset); 985 } 986 output->SetFrameSlot(output_offset, int32_value); 987 break; 988 } 989 990 case Translation::DOUBLE_STACK_SLOT: { 991 static const int kLowerOffset = 0 * kPointerSize; 992 static const int kUpperOffset = 1 * kPointerSize; 993 994 // Abort OSR if we don't have a number. 995 if (!input_object->IsNumber()) return false; 996 997 int output_index = iterator->Next(); 998 unsigned output_offset = 999 output->GetOffsetFromSlotIndex(output_index); 1000 double double_value = input_object->Number(); 1001 uint64_t int_value = BitCast<uint64_t, double>(double_value); 1002 int32_t lower = static_cast<int32_t>(int_value); 1003 int32_t upper = static_cast<int32_t>(int_value >> kBitsPerInt); 1004 if (FLAG_trace_osr) { 1005 PrintF(" [sp + %d] <- 0x%08x (upper bits of %g) ; [sp + %d]\n", 1006 output_offset + kUpperOffset, 1007 upper, 1008 double_value, 1009 *input_offset); 1010 PrintF(" [sp + %d] <- 0x%08x (lower bits of %g) ; [sp + %d]\n", 1011 output_offset + kLowerOffset, 1012 lower, 1013 double_value, 1014 *input_offset); 1015 } 1016 output->SetFrameSlot(output_offset + kLowerOffset, lower); 1017 output->SetFrameSlot(output_offset + kUpperOffset, upper); 1018 break; 1019 } 1020 1021 case Translation::LITERAL: { 1022 // Just ignore non-materialized literals. 1023 iterator->Next(); 1024 break; 1025 } 1026 1027 case Translation::ARGUMENTS_OBJECT: { 1028 // Optimized code assumes that the argument object has not been 1029 // materialized and so bypasses it when doing arguments access. 1030 // We should have bailed out before starting the frame 1031 // translation. 1032 UNREACHABLE(); 1033 return false; 1034 } 1035 } 1036 1037 if (!duplicate) *input_offset -= kPointerSize; 1038 return true; 1039 } 1040 1041 1042 void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, 1043 Code* check_code, 1044 Code* replacement_code) { 1045 // Iterate over the stack check table and patch every stack check 1046 // call to an unconditional call to the replacement code. 1047 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 1048 Address stack_check_cursor = unoptimized_code->instruction_start() + 1049 unoptimized_code->stack_check_table_offset(); 1050 uint32_t table_length = Memory::uint32_at(stack_check_cursor); 1051 stack_check_cursor += kIntSize; 1052 for (uint32_t i = 0; i < table_length; ++i) { 1053 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); 1054 Address pc_after = unoptimized_code->instruction_start() + pc_offset; 1055 PatchStackCheckCodeAt(unoptimized_code, 1056 pc_after, 1057 check_code, 1058 replacement_code); 1059 stack_check_cursor += 2 * kIntSize; 1060 } 1061 } 1062 1063 1064 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, 1065 Code* check_code, 1066 Code* replacement_code) { 1067 // Iterate over the stack check table and revert the patched 1068 // stack check calls. 1069 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 1070 Address stack_check_cursor = unoptimized_code->instruction_start() + 1071 unoptimized_code->stack_check_table_offset(); 1072 uint32_t table_length = Memory::uint32_at(stack_check_cursor); 1073 stack_check_cursor += kIntSize; 1074 for (uint32_t i = 0; i < table_length; ++i) { 1075 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); 1076 Address pc_after = unoptimized_code->instruction_start() + pc_offset; 1077 RevertStackCheckCodeAt(unoptimized_code, 1078 pc_after, 1079 check_code, 1080 replacement_code); 1081 stack_check_cursor += 2 * kIntSize; 1082 } 1083 } 1084 1085 1086 unsigned Deoptimizer::ComputeInputFrameSize() const { 1087 unsigned fixed_size = ComputeFixedSize(function_); 1088 // The fp-to-sp delta already takes the context and the function 1089 // into account so we have to avoid double counting them (-2). 1090 unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize); 1091 #ifdef DEBUG 1092 if (bailout_type_ == OSR) { 1093 // TODO(kasperl): It would be nice if we could verify that the 1094 // size matches with the stack height we can compute based on the 1095 // environment at the OSR entry. The code for that his built into 1096 // the DoComputeOsrOutputFrame function for now. 1097 } else { 1098 unsigned stack_slots = optimized_code_->stack_slots(); 1099 unsigned outgoing_size = ComputeOutgoingArgumentSize(); 1100 ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size); 1101 } 1102 #endif 1103 return result; 1104 } 1105 1106 1107 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const { 1108 // The fixed part of the frame consists of the return address, frame 1109 // pointer, function, context, and all the incoming arguments. 1110 return ComputeIncomingArgumentSize(function) + 1111 StandardFrameConstants::kFixedFrameSize; 1112 } 1113 1114 1115 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const { 1116 // The incoming arguments is the values for formal parameters and 1117 // the receiver. Every slot contains a pointer. 1118 unsigned arguments = function->shared()->formal_parameter_count() + 1; 1119 return arguments * kPointerSize; 1120 } 1121 1122 1123 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const { 1124 DeoptimizationInputData* data = DeoptimizationInputData::cast( 1125 optimized_code_->deoptimization_data()); 1126 unsigned height = data->ArgumentsStackHeight(bailout_id_)->value(); 1127 return height * kPointerSize; 1128 } 1129 1130 1131 Object* Deoptimizer::ComputeLiteral(int index) const { 1132 DeoptimizationInputData* data = DeoptimizationInputData::cast( 1133 optimized_code_->deoptimization_data()); 1134 FixedArray* literals = data->LiteralArray(); 1135 return literals->get(index); 1136 } 1137 1138 1139 void Deoptimizer::AddDoubleValue(intptr_t slot_address, 1140 double value) { 1141 HeapNumberMaterializationDescriptor value_desc( 1142 reinterpret_cast<Address>(slot_address), value); 1143 deferred_heap_numbers_.Add(value_desc); 1144 } 1145 1146 1147 MemoryChunk* Deoptimizer::CreateCode(BailoutType type) { 1148 // We cannot run this if the serializer is enabled because this will 1149 // cause us to emit relocation information for the external 1150 // references. This is fine because the deoptimizer's code section 1151 // isn't meant to be serialized at all. 1152 ASSERT(!Serializer::enabled()); 1153 1154 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); 1155 masm.set_emit_debug_code(false); 1156 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); 1157 CodeDesc desc; 1158 masm.GetCode(&desc); 1159 ASSERT(desc.reloc_size == 0); 1160 1161 MemoryChunk* chunk = 1162 Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size, 1163 EXECUTABLE, 1164 NULL); 1165 ASSERT(chunk->area_size() >= desc.instr_size); 1166 if (chunk == NULL) { 1167 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); 1168 } 1169 memcpy(chunk->area_start(), desc.buffer, desc.instr_size); 1170 CPU::FlushICache(chunk->area_start(), desc.instr_size); 1171 return chunk; 1172 } 1173 1174 1175 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { 1176 DeoptimizingCodeListNode* node = 1177 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; 1178 while (node != NULL) { 1179 if (node->code()->contains(addr)) return *node->code(); 1180 node = node->next(); 1181 } 1182 return NULL; 1183 } 1184 1185 1186 void Deoptimizer::RemoveDeoptimizingCode(Code* code) { 1187 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); 1188 ASSERT(data->deoptimizing_code_list_ != NULL); 1189 // Run through the code objects to find this one and remove it. 1190 DeoptimizingCodeListNode* prev = NULL; 1191 DeoptimizingCodeListNode* current = data->deoptimizing_code_list_; 1192 while (current != NULL) { 1193 if (*current->code() == code) { 1194 // Unlink from list. If prev is NULL we are looking at the first element. 1195 if (prev == NULL) { 1196 data->deoptimizing_code_list_ = current->next(); 1197 } else { 1198 prev->set_next(current->next()); 1199 } 1200 delete current; 1201 return; 1202 } 1203 // Move to next in list. 1204 prev = current; 1205 current = current->next(); 1206 } 1207 // Deoptimizing code is removed through weak callback. Each object is expected 1208 // to be removed once and only once. 1209 UNREACHABLE(); 1210 } 1211 1212 1213 FrameDescription::FrameDescription(uint32_t frame_size, 1214 JSFunction* function) 1215 : frame_size_(frame_size), 1216 function_(function), 1217 top_(kZapUint32), 1218 pc_(kZapUint32), 1219 fp_(kZapUint32), 1220 context_(kZapUint32) { 1221 // Zap all the registers. 1222 for (int r = 0; r < Register::kNumRegisters; r++) { 1223 SetRegister(r, kZapUint32); 1224 } 1225 1226 // Zap all the slots. 1227 for (unsigned o = 0; o < frame_size; o += kPointerSize) { 1228 SetFrameSlot(o, kZapUint32); 1229 } 1230 } 1231 1232 1233 int FrameDescription::ComputeFixedSize() { 1234 return StandardFrameConstants::kFixedFrameSize + 1235 (ComputeParametersCount() + 1) * kPointerSize; 1236 } 1237 1238 1239 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) { 1240 if (slot_index >= 0) { 1241 // Local or spill slots. Skip the fixed part of the frame 1242 // including all arguments. 1243 unsigned base = GetFrameSize() - ComputeFixedSize(); 1244 return base - ((slot_index + 1) * kPointerSize); 1245 } else { 1246 // Incoming parameter. 1247 int arg_size = (ComputeParametersCount() + 1) * kPointerSize; 1248 unsigned base = GetFrameSize() - arg_size; 1249 return base - ((slot_index + 1) * kPointerSize); 1250 } 1251 } 1252 1253 1254 int FrameDescription::ComputeParametersCount() { 1255 switch (type_) { 1256 case StackFrame::JAVA_SCRIPT: 1257 return function_->shared()->formal_parameter_count(); 1258 case StackFrame::ARGUMENTS_ADAPTOR: { 1259 // Last slot contains number of incomming arguments as a smi. 1260 // Can't use GetExpression(0) because it would cause infinite recursion. 1261 return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value(); 1262 } 1263 default: 1264 UNREACHABLE(); 1265 return 0; 1266 } 1267 } 1268 1269 1270 Object* FrameDescription::GetParameter(int index) { 1271 ASSERT(index >= 0); 1272 ASSERT(index < ComputeParametersCount()); 1273 // The slot indexes for incoming arguments are negative. 1274 unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount()); 1275 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); 1276 } 1277 1278 1279 unsigned FrameDescription::GetExpressionCount() { 1280 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); 1281 unsigned size = GetFrameSize() - ComputeFixedSize(); 1282 return size / kPointerSize; 1283 } 1284 1285 1286 Object* FrameDescription::GetExpression(int index) { 1287 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); 1288 unsigned offset = GetOffsetFromSlotIndex(index); 1289 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); 1290 } 1291 1292 1293 void TranslationBuffer::Add(int32_t value) { 1294 // Encode the sign bit in the least significant bit. 1295 bool is_negative = (value < 0); 1296 uint32_t bits = ((is_negative ? -value : value) << 1) | 1297 static_cast<int32_t>(is_negative); 1298 // Encode the individual bytes using the least significant bit of 1299 // each byte to indicate whether or not more bytes follow. 1300 do { 1301 uint32_t next = bits >> 7; 1302 contents_.Add(((bits << 1) & 0xFF) | (next != 0)); 1303 bits = next; 1304 } while (bits != 0); 1305 } 1306 1307 1308 int32_t TranslationIterator::Next() { 1309 // Run through the bytes until we reach one with a least significant 1310 // bit of zero (marks the end). 1311 uint32_t bits = 0; 1312 for (int i = 0; true; i += 7) { 1313 ASSERT(HasNext()); 1314 uint8_t next = buffer_->get(index_++); 1315 bits |= (next >> 1) << i; 1316 if ((next & 1) == 0) break; 1317 } 1318 // The bits encode the sign in the least significant bit. 1319 bool is_negative = (bits & 1) == 1; 1320 int32_t result = bits >> 1; 1321 return is_negative ? -result : result; 1322 } 1323 1324 1325 Handle<ByteArray> TranslationBuffer::CreateByteArray() { 1326 int length = contents_.length(); 1327 Handle<ByteArray> result = 1328 Isolate::Current()->factory()->NewByteArray(length, TENURED); 1329 memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length); 1330 return result; 1331 } 1332 1333 1334 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) { 1335 buffer_->Add(CONSTRUCT_STUB_FRAME); 1336 buffer_->Add(literal_id); 1337 buffer_->Add(height); 1338 } 1339 1340 1341 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) { 1342 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME); 1343 buffer_->Add(literal_id); 1344 buffer_->Add(height); 1345 } 1346 1347 1348 void Translation::BeginJSFrame(int node_id, int literal_id, unsigned height) { 1349 buffer_->Add(JS_FRAME); 1350 buffer_->Add(node_id); 1351 buffer_->Add(literal_id); 1352 buffer_->Add(height); 1353 } 1354 1355 1356 void Translation::StoreRegister(Register reg) { 1357 buffer_->Add(REGISTER); 1358 buffer_->Add(reg.code()); 1359 } 1360 1361 1362 void Translation::StoreInt32Register(Register reg) { 1363 buffer_->Add(INT32_REGISTER); 1364 buffer_->Add(reg.code()); 1365 } 1366 1367 1368 void Translation::StoreDoubleRegister(DoubleRegister reg) { 1369 buffer_->Add(DOUBLE_REGISTER); 1370 buffer_->Add(DoubleRegister::ToAllocationIndex(reg)); 1371 } 1372 1373 1374 void Translation::StoreStackSlot(int index) { 1375 buffer_->Add(STACK_SLOT); 1376 buffer_->Add(index); 1377 } 1378 1379 1380 void Translation::StoreInt32StackSlot(int index) { 1381 buffer_->Add(INT32_STACK_SLOT); 1382 buffer_->Add(index); 1383 } 1384 1385 1386 void Translation::StoreDoubleStackSlot(int index) { 1387 buffer_->Add(DOUBLE_STACK_SLOT); 1388 buffer_->Add(index); 1389 } 1390 1391 1392 void Translation::StoreLiteral(int literal_id) { 1393 buffer_->Add(LITERAL); 1394 buffer_->Add(literal_id); 1395 } 1396 1397 1398 void Translation::StoreArgumentsObject() { 1399 buffer_->Add(ARGUMENTS_OBJECT); 1400 } 1401 1402 1403 void Translation::MarkDuplicate() { 1404 buffer_->Add(DUPLICATE); 1405 } 1406 1407 1408 int Translation::NumberOfOperandsFor(Opcode opcode) { 1409 switch (opcode) { 1410 case ARGUMENTS_OBJECT: 1411 case DUPLICATE: 1412 return 0; 1413 case REGISTER: 1414 case INT32_REGISTER: 1415 case DOUBLE_REGISTER: 1416 case STACK_SLOT: 1417 case INT32_STACK_SLOT: 1418 case DOUBLE_STACK_SLOT: 1419 case LITERAL: 1420 return 1; 1421 case BEGIN: 1422 case ARGUMENTS_ADAPTOR_FRAME: 1423 case CONSTRUCT_STUB_FRAME: 1424 return 2; 1425 case JS_FRAME: 1426 return 3; 1427 } 1428 UNREACHABLE(); 1429 return -1; 1430 } 1431 1432 1433 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 1434 1435 const char* Translation::StringFor(Opcode opcode) { 1436 switch (opcode) { 1437 case BEGIN: 1438 return "BEGIN"; 1439 case JS_FRAME: 1440 return "JS_FRAME"; 1441 case ARGUMENTS_ADAPTOR_FRAME: 1442 return "ARGUMENTS_ADAPTOR_FRAME"; 1443 case CONSTRUCT_STUB_FRAME: 1444 return "CONSTRUCT_STUB_FRAME"; 1445 case REGISTER: 1446 return "REGISTER"; 1447 case INT32_REGISTER: 1448 return "INT32_REGISTER"; 1449 case DOUBLE_REGISTER: 1450 return "DOUBLE_REGISTER"; 1451 case STACK_SLOT: 1452 return "STACK_SLOT"; 1453 case INT32_STACK_SLOT: 1454 return "INT32_STACK_SLOT"; 1455 case DOUBLE_STACK_SLOT: 1456 return "DOUBLE_STACK_SLOT"; 1457 case LITERAL: 1458 return "LITERAL"; 1459 case ARGUMENTS_OBJECT: 1460 return "ARGUMENTS_OBJECT"; 1461 case DUPLICATE: 1462 return "DUPLICATE"; 1463 } 1464 UNREACHABLE(); 1465 return ""; 1466 } 1467 1468 #endif 1469 1470 1471 DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) { 1472 GlobalHandles* global_handles = Isolate::Current()->global_handles(); 1473 // Globalize the code object and make it weak. 1474 code_ = Handle<Code>::cast(global_handles->Create(code)); 1475 global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()), 1476 this, 1477 Deoptimizer::HandleWeakDeoptimizedCode); 1478 } 1479 1480 1481 DeoptimizingCodeListNode::~DeoptimizingCodeListNode() { 1482 GlobalHandles* global_handles = Isolate::Current()->global_handles(); 1483 global_handles->Destroy(reinterpret_cast<Object**>(code_.location())); 1484 } 1485 1486 1487 // We can't intermix stack decoding and allocations because 1488 // deoptimization infrastracture is not GC safe. 1489 // Thus we build a temporary structure in malloced space. 1490 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, 1491 DeoptimizationInputData* data, 1492 JavaScriptFrame* frame) { 1493 Translation::Opcode opcode = 1494 static_cast<Translation::Opcode>(iterator->Next()); 1495 1496 switch (opcode) { 1497 case Translation::BEGIN: 1498 case Translation::JS_FRAME: 1499 case Translation::ARGUMENTS_ADAPTOR_FRAME: 1500 case Translation::CONSTRUCT_STUB_FRAME: 1501 // Peeled off before getting here. 1502 break; 1503 1504 case Translation::ARGUMENTS_OBJECT: 1505 // This can be only emitted for local slots not for argument slots. 1506 break; 1507 1508 case Translation::REGISTER: 1509 case Translation::INT32_REGISTER: 1510 case Translation::DOUBLE_REGISTER: 1511 case Translation::DUPLICATE: 1512 // We are at safepoint which corresponds to call. All registers are 1513 // saved by caller so there would be no live registers at this 1514 // point. Thus these translation commands should not be used. 1515 break; 1516 1517 case Translation::STACK_SLOT: { 1518 int slot_index = iterator->Next(); 1519 Address slot_addr = SlotAddress(frame, slot_index); 1520 return SlotRef(slot_addr, SlotRef::TAGGED); 1521 } 1522 1523 case Translation::INT32_STACK_SLOT: { 1524 int slot_index = iterator->Next(); 1525 Address slot_addr = SlotAddress(frame, slot_index); 1526 return SlotRef(slot_addr, SlotRef::INT32); 1527 } 1528 1529 case Translation::DOUBLE_STACK_SLOT: { 1530 int slot_index = iterator->Next(); 1531 Address slot_addr = SlotAddress(frame, slot_index); 1532 return SlotRef(slot_addr, SlotRef::DOUBLE); 1533 } 1534 1535 case Translation::LITERAL: { 1536 int literal_index = iterator->Next(); 1537 return SlotRef(data->LiteralArray()->get(literal_index)); 1538 } 1539 } 1540 1541 UNREACHABLE(); 1542 return SlotRef(); 1543 } 1544 1545 1546 void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots, 1547 TranslationIterator* it, 1548 DeoptimizationInputData* data, 1549 JavaScriptFrame* frame) { 1550 // Process the translation commands for the arguments. 1551 1552 // Skip the translation command for the receiver. 1553 it->Skip(Translation::NumberOfOperandsFor( 1554 static_cast<Translation::Opcode>(it->Next()))); 1555 1556 // Compute slots for arguments. 1557 for (int i = 0; i < args_slots->length(); ++i) { 1558 (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame); 1559 } 1560 } 1561 1562 1563 Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments( 1564 JavaScriptFrame* frame, 1565 int inlined_jsframe_index, 1566 int formal_parameter_count) { 1567 AssertNoAllocation no_gc; 1568 int deopt_index = AstNode::kNoNumber; 1569 DeoptimizationInputData* data = 1570 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index); 1571 TranslationIterator it(data->TranslationByteArray(), 1572 data->TranslationIndex(deopt_index)->value()); 1573 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); 1574 ASSERT(opcode == Translation::BEGIN); 1575 it.Next(); // Drop frame count. 1576 int jsframe_count = it.Next(); 1577 USE(jsframe_count); 1578 ASSERT(jsframe_count > inlined_jsframe_index); 1579 int jsframes_to_skip = inlined_jsframe_index; 1580 while (true) { 1581 opcode = static_cast<Translation::Opcode>(it.Next()); 1582 if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) { 1583 if (jsframes_to_skip == 0) { 1584 ASSERT(Translation::NumberOfOperandsFor(opcode) == 2); 1585 1586 it.Skip(1); // literal id 1587 int height = it.Next(); 1588 1589 // We reached the arguments adaptor frame corresponding to the 1590 // inlined function in question. Number of arguments is height - 1. 1591 Vector<SlotRef> args_slots = 1592 Vector<SlotRef>::New(height - 1); // Minus receiver. 1593 ComputeSlotsForArguments(&args_slots, &it, data, frame); 1594 return args_slots; 1595 } 1596 } else if (opcode == Translation::JS_FRAME) { 1597 if (jsframes_to_skip == 0) { 1598 // Skip over operands to advance to the next opcode. 1599 it.Skip(Translation::NumberOfOperandsFor(opcode)); 1600 1601 // We reached the frame corresponding to the inlined function 1602 // in question. Process the translation commands for the 1603 // arguments. Number of arguments is equal to the number of 1604 // format parameter count. 1605 Vector<SlotRef> args_slots = 1606 Vector<SlotRef>::New(formal_parameter_count); 1607 ComputeSlotsForArguments(&args_slots, &it, data, frame); 1608 return args_slots; 1609 } 1610 jsframes_to_skip--; 1611 } 1612 1613 // Skip over operands to advance to the next opcode. 1614 it.Skip(Translation::NumberOfOperandsFor(opcode)); 1615 } 1616 1617 UNREACHABLE(); 1618 return Vector<SlotRef>(); 1619 } 1620 1621 #ifdef ENABLE_DEBUGGER_SUPPORT 1622 1623 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer, 1624 int frame_index, 1625 bool has_arguments_adaptor, 1626 bool has_construct_stub) { 1627 FrameDescription* output_frame = deoptimizer->output_[frame_index]; 1628 function_ = output_frame->GetFunction(); 1629 has_construct_stub_ = has_construct_stub; 1630 expression_count_ = output_frame->GetExpressionCount(); 1631 expression_stack_ = new Object*[expression_count_]; 1632 // Get the source position using the unoptimized code. 1633 Address pc = reinterpret_cast<Address>(output_frame->GetPc()); 1634 Code* code = Code::cast(Isolate::Current()->heap()->FindCodeObject(pc)); 1635 source_position_ = code->SourcePosition(pc); 1636 1637 for (int i = 0; i < expression_count_; i++) { 1638 SetExpression(i, output_frame->GetExpression(i)); 1639 } 1640 1641 if (has_arguments_adaptor) { 1642 output_frame = deoptimizer->output_[frame_index - 1]; 1643 ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR); 1644 } 1645 1646 parameters_count_ = output_frame->ComputeParametersCount(); 1647 parameters_ = new Object*[parameters_count_]; 1648 for (int i = 0; i < parameters_count_; i++) { 1649 SetParameter(i, output_frame->GetParameter(i)); 1650 } 1651 } 1652 1653 1654 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() { 1655 delete[] expression_stack_; 1656 delete[] parameters_; 1657 } 1658 1659 1660 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { 1661 v->VisitPointer(BitCast<Object**>(&function_)); 1662 v->VisitPointers(parameters_, parameters_ + parameters_count_); 1663 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); 1664 } 1665 1666 #endif // ENABLE_DEBUGGER_SUPPORT 1667 1668 } } // namespace v8::internal 1669