1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "ast.h" 31 #include "deoptimizer.h" 32 #include "frames-inl.h" 33 #include "full-codegen.h" 34 #include "lazy-instance.h" 35 #include "mark-compact.h" 36 #include "safepoint-table.h" 37 #include "scopeinfo.h" 38 #include "string-stream.h" 39 #include "vm-state-inl.h" 40 41 #include "allocation-inl.h" 42 43 namespace v8 { 44 namespace internal { 45 46 47 ReturnAddressLocationResolver 48 StackFrame::return_address_location_resolver_ = NULL; 49 50 51 // Iterator that supports traversing the stack handlers of a 52 // particular frame. Needs to know the top of the handler chain. 53 class StackHandlerIterator BASE_EMBEDDED { 54 public: 55 StackHandlerIterator(const StackFrame* frame, StackHandler* handler) 56 : limit_(frame->fp()), handler_(handler) { 57 // Make sure the handler has already been unwound to this frame. 58 ASSERT(frame->sp() <= handler->address()); 59 } 60 61 StackHandler* handler() const { return handler_; } 62 63 bool done() { 64 return handler_ == NULL || handler_->address() > limit_; 65 } 66 void Advance() { 67 ASSERT(!done()); 68 handler_ = handler_->next(); 69 } 70 71 private: 72 const Address limit_; 73 StackHandler* handler_; 74 }; 75 76 77 // ------------------------------------------------------------------------- 78 79 80 #define INITIALIZE_SINGLETON(type, field) field##_(this), 81 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate, 82 bool can_access_heap_objects) 83 : isolate_(isolate), 84 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) 85 frame_(NULL), handler_(NULL), 86 can_access_heap_objects_(can_access_heap_objects) { 87 } 88 #undef INITIALIZE_SINGLETON 89 90 91 StackFrameIterator::StackFrameIterator(Isolate* isolate) 92 : StackFrameIteratorBase(isolate, true) { 93 Reset(isolate->thread_local_top()); 94 } 95 96 97 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t) 98 : StackFrameIteratorBase(isolate, true) { 99 Reset(t); 100 } 101 102 103 void StackFrameIterator::Advance() { 104 ASSERT(!done()); 105 // Compute the state of the calling frame before restoring 106 // callee-saved registers and unwinding handlers. This allows the 107 // frame code that computes the caller state to access the top 108 // handler and the value of any callee-saved register if needed. 109 StackFrame::State state; 110 StackFrame::Type type = frame_->GetCallerState(&state); 111 112 // Unwind handlers corresponding to the current frame. 113 StackHandlerIterator it(frame_, handler_); 114 while (!it.done()) it.Advance(); 115 handler_ = it.handler(); 116 117 // Advance to the calling frame. 118 frame_ = SingletonFor(type, &state); 119 120 // When we're done iterating over the stack frames, the handler 121 // chain must have been completely unwound. 122 ASSERT(!done() || handler_ == NULL); 123 } 124 125 126 void StackFrameIterator::Reset(ThreadLocalTop* top) { 127 StackFrame::State state; 128 StackFrame::Type type = ExitFrame::GetStateForFramePointer( 129 Isolate::c_entry_fp(top), &state); 130 handler_ = StackHandler::FromAddress(Isolate::handler(top)); 131 if (SingletonFor(type) == NULL) return; 132 frame_ = SingletonFor(type, &state); 133 } 134 135 136 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type, 137 StackFrame::State* state) { 138 if (type == StackFrame::NONE) return NULL; 139 StackFrame* result = SingletonFor(type); 140 ASSERT(result != NULL); 141 result->state_ = *state; 142 return result; 143 } 144 145 146 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) { 147 #define FRAME_TYPE_CASE(type, field) \ 148 case StackFrame::type: result = &field##_; break; 149 150 StackFrame* result = NULL; 151 switch (type) { 152 case StackFrame::NONE: return NULL; 153 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE) 154 default: break; 155 } 156 return result; 157 158 #undef FRAME_TYPE_CASE 159 } 160 161 162 // ------------------------------------------------------------------------- 163 164 165 JavaScriptFrameIterator::JavaScriptFrameIterator( 166 Isolate* isolate, StackFrame::Id id) 167 : iterator_(isolate) { 168 while (!done()) { 169 Advance(); 170 if (frame()->id() == id) return; 171 } 172 } 173 174 175 void JavaScriptFrameIterator::Advance() { 176 do { 177 iterator_.Advance(); 178 } while (!iterator_.done() && !iterator_.frame()->is_java_script()); 179 } 180 181 182 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() { 183 if (!frame()->has_adapted_arguments()) return; 184 iterator_.Advance(); 185 ASSERT(iterator_.frame()->is_arguments_adaptor()); 186 } 187 188 189 // ------------------------------------------------------------------------- 190 191 192 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate) 193 : JavaScriptFrameIterator(isolate) { 194 if (!done() && !IsValidFrame()) Advance(); 195 } 196 197 198 void StackTraceFrameIterator::Advance() { 199 while (true) { 200 JavaScriptFrameIterator::Advance(); 201 if (done()) return; 202 if (IsValidFrame()) return; 203 } 204 } 205 206 207 bool StackTraceFrameIterator::IsValidFrame() { 208 if (!frame()->function()->IsJSFunction()) return false; 209 Object* script = frame()->function()->shared()->script(); 210 // Don't show functions from native scripts to user. 211 return (script->IsScript() && 212 Script::TYPE_NATIVE != Script::cast(script)->type()->value()); 213 } 214 215 216 // ------------------------------------------------------------------------- 217 218 219 SafeStackFrameIterator::SafeStackFrameIterator( 220 Isolate* isolate, 221 Address fp, Address sp, Address js_entry_sp) 222 : StackFrameIteratorBase(isolate, false), 223 low_bound_(sp), 224 high_bound_(js_entry_sp), 225 top_frame_type_(StackFrame::NONE), 226 external_callback_scope_(isolate->external_callback_scope()) { 227 StackFrame::State state; 228 StackFrame::Type type; 229 ThreadLocalTop* top = isolate->thread_local_top(); 230 if (IsValidTop(top)) { 231 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state); 232 top_frame_type_ = type; 233 } else if (IsValidStackAddress(fp)) { 234 ASSERT(fp != NULL); 235 state.fp = fp; 236 state.sp = sp; 237 state.pc_address = StackFrame::ResolveReturnAddressLocation( 238 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp))); 239 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset, 240 // we check only that kMarkerOffset is within the stack bounds and do 241 // compile time check that kContextOffset slot is pushed on the stack before 242 // kMarkerOffset. 243 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset < 244 StandardFrameConstants::kContextOffset); 245 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset; 246 if (IsValidStackAddress(frame_marker)) { 247 type = StackFrame::ComputeType(this, &state); 248 top_frame_type_ = type; 249 } else { 250 // Mark the frame as JAVA_SCRIPT if we cannot determine its type. 251 // The frame anyways will be skipped. 252 type = StackFrame::JAVA_SCRIPT; 253 // Top frame is incomplete so we cannot reliably determine its type. 254 top_frame_type_ = StackFrame::NONE; 255 } 256 } else { 257 return; 258 } 259 if (SingletonFor(type) == NULL) return; 260 frame_ = SingletonFor(type, &state); 261 if (frame_ == NULL) return; 262 263 Advance(); 264 265 if (frame_ != NULL && !frame_->is_exit() && 266 external_callback_scope_ != NULL && 267 external_callback_scope_->scope_address() < frame_->fp()) { 268 // Skip top ExternalCallbackScope if we already advanced to a JS frame 269 // under it. Sampler will anyways take this top external callback. 270 external_callback_scope_ = external_callback_scope_->previous(); 271 } 272 } 273 274 275 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const { 276 Address c_entry_fp = Isolate::c_entry_fp(top); 277 if (!IsValidExitFrame(c_entry_fp)) return false; 278 // There should be at least one JS_ENTRY stack handler. 279 Address handler = Isolate::handler(top); 280 if (handler == NULL) return false; 281 // Check that there are no js frames on top of the native frames. 282 return c_entry_fp < handler; 283 } 284 285 286 void SafeStackFrameIterator::AdvanceOneFrame() { 287 ASSERT(!done()); 288 StackFrame* last_frame = frame_; 289 Address last_sp = last_frame->sp(), last_fp = last_frame->fp(); 290 // Before advancing to the next stack frame, perform pointer validity tests. 291 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) { 292 frame_ = NULL; 293 return; 294 } 295 296 // Advance to the previous frame. 297 StackFrame::State state; 298 StackFrame::Type type = frame_->GetCallerState(&state); 299 frame_ = SingletonFor(type, &state); 300 if (frame_ == NULL) return; 301 302 // Check that we have actually moved to the previous frame in the stack. 303 if (frame_->sp() < last_sp || frame_->fp() < last_fp) { 304 frame_ = NULL; 305 } 306 } 307 308 309 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const { 310 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp()); 311 } 312 313 314 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) { 315 StackFrame::State state; 316 if (frame->is_entry() || frame->is_entry_construct()) { 317 // See EntryFrame::GetCallerState. It computes the caller FP address 318 // and calls ExitFrame::GetStateForFramePointer on it. We need to be 319 // sure that caller FP address is valid. 320 Address caller_fp = Memory::Address_at( 321 frame->fp() + EntryFrameConstants::kCallerFPOffset); 322 if (!IsValidExitFrame(caller_fp)) return false; 323 } else if (frame->is_arguments_adaptor()) { 324 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that 325 // the number of arguments is stored on stack as Smi. We need to check 326 // that it really an Smi. 327 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)-> 328 GetExpression(0); 329 if (!number_of_args->IsSmi()) { 330 return false; 331 } 332 } 333 frame->ComputeCallerState(&state); 334 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) && 335 SingletonFor(frame->GetCallerState(&state)) != NULL; 336 } 337 338 339 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const { 340 if (!IsValidStackAddress(fp)) return false; 341 Address sp = ExitFrame::ComputeStackPointer(fp); 342 if (!IsValidStackAddress(sp)) return false; 343 StackFrame::State state; 344 ExitFrame::FillState(fp, sp, &state); 345 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) { 346 return false; 347 } 348 return *state.pc_address != NULL; 349 } 350 351 352 void SafeStackFrameIterator::Advance() { 353 while (true) { 354 AdvanceOneFrame(); 355 if (done()) return; 356 if (frame_->is_java_script()) return; 357 if (frame_->is_exit() && external_callback_scope_) { 358 // Some of the EXIT frames may have ExternalCallbackScope allocated on 359 // top of them. In that case the scope corresponds to the first EXIT 360 // frame beneath it. There may be other EXIT frames on top of the 361 // ExternalCallbackScope, just skip them as we cannot collect any useful 362 // information about them. 363 if (external_callback_scope_->scope_address() < frame_->fp()) { 364 Address* callback_address = 365 external_callback_scope_->callback_address(); 366 if (*callback_address != NULL) { 367 frame_->state_.pc_address = callback_address; 368 } 369 external_callback_scope_ = external_callback_scope_->previous(); 370 ASSERT(external_callback_scope_ == NULL || 371 external_callback_scope_->scope_address() > frame_->fp()); 372 return; 373 } 374 } 375 } 376 } 377 378 379 // ------------------------------------------------------------------------- 380 381 382 Code* StackFrame::GetSafepointData(Isolate* isolate, 383 Address inner_pointer, 384 SafepointEntry* safepoint_entry, 385 unsigned* stack_slots) { 386 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry = 387 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer); 388 if (!entry->safepoint_entry.is_valid()) { 389 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer); 390 ASSERT(entry->safepoint_entry.is_valid()); 391 } else { 392 ASSERT(entry->safepoint_entry.Equals( 393 entry->code->GetSafepointEntry(inner_pointer))); 394 } 395 396 // Fill in the results and return the code. 397 Code* code = entry->code; 398 *safepoint_entry = entry->safepoint_entry; 399 *stack_slots = code->stack_slots(); 400 return code; 401 } 402 403 404 bool StackFrame::HasHandler() const { 405 StackHandlerIterator it(this, top_handler()); 406 return !it.done(); 407 } 408 409 410 #ifdef DEBUG 411 static bool GcSafeCodeContains(HeapObject* object, Address addr); 412 #endif 413 414 415 void StackFrame::IteratePc(ObjectVisitor* v, 416 Address* pc_address, 417 Code* holder) { 418 Address pc = *pc_address; 419 ASSERT(GcSafeCodeContains(holder, pc)); 420 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start()); 421 Object* code = holder; 422 v->VisitPointer(&code); 423 if (code != holder) { 424 holder = reinterpret_cast<Code*>(code); 425 pc = holder->instruction_start() + pc_offset; 426 *pc_address = pc; 427 } 428 } 429 430 431 void StackFrame::SetReturnAddressLocationResolver( 432 ReturnAddressLocationResolver resolver) { 433 ASSERT(return_address_location_resolver_ == NULL); 434 return_address_location_resolver_ = resolver; 435 } 436 437 438 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator, 439 State* state) { 440 ASSERT(state->fp != NULL); 441 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) { 442 return ARGUMENTS_ADAPTOR; 443 } 444 // The marker and function offsets overlap. If the marker isn't a 445 // smi then the frame is a JavaScript frame -- and the marker is 446 // really the function. 447 const int offset = StandardFrameConstants::kMarkerOffset; 448 Object* marker = Memory::Object_at(state->fp + offset); 449 if (!marker->IsSmi()) { 450 // If we're using a "safe" stack iterator, we treat optimized 451 // frames as normal JavaScript frames to avoid having to look 452 // into the heap to determine the state. This is safe as long 453 // as nobody tries to GC... 454 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT; 455 Code::Kind kind = GetContainingCode(iterator->isolate(), 456 *(state->pc_address))->kind(); 457 ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION); 458 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT; 459 } 460 return static_cast<StackFrame::Type>(Smi::cast(marker)->value()); 461 } 462 463 464 #ifdef DEBUG 465 bool StackFrame::can_access_heap_objects() const { 466 return iterator_->can_access_heap_objects_; 467 } 468 #endif 469 470 471 StackFrame::Type StackFrame::GetCallerState(State* state) const { 472 ComputeCallerState(state); 473 return ComputeType(iterator_, state); 474 } 475 476 477 Address StackFrame::UnpaddedFP() const { 478 #if V8_TARGET_ARCH_IA32 479 if (!is_optimized()) return fp(); 480 int32_t alignment_state = Memory::int32_at( 481 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset); 482 483 return (alignment_state == kAlignmentPaddingPushed) ? 484 (fp() + kPointerSize) : fp(); 485 #else 486 return fp(); 487 #endif 488 } 489 490 491 Code* EntryFrame::unchecked_code() const { 492 return HEAP->js_entry_code(); 493 } 494 495 496 void EntryFrame::ComputeCallerState(State* state) const { 497 GetCallerState(state); 498 } 499 500 501 void EntryFrame::SetCallerFp(Address caller_fp) { 502 const int offset = EntryFrameConstants::kCallerFPOffset; 503 Memory::Address_at(this->fp() + offset) = caller_fp; 504 } 505 506 507 StackFrame::Type EntryFrame::GetCallerState(State* state) const { 508 const int offset = EntryFrameConstants::kCallerFPOffset; 509 Address fp = Memory::Address_at(this->fp() + offset); 510 return ExitFrame::GetStateForFramePointer(fp, state); 511 } 512 513 514 Code* EntryConstructFrame::unchecked_code() const { 515 return HEAP->js_construct_entry_code(); 516 } 517 518 519 Object*& ExitFrame::code_slot() const { 520 const int offset = ExitFrameConstants::kCodeOffset; 521 return Memory::Object_at(fp() + offset); 522 } 523 524 525 Code* ExitFrame::unchecked_code() const { 526 return reinterpret_cast<Code*>(code_slot()); 527 } 528 529 530 void ExitFrame::ComputeCallerState(State* state) const { 531 // Set up the caller state. 532 state->sp = caller_sp(); 533 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset); 534 state->pc_address = ResolveReturnAddressLocation( 535 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset)); 536 } 537 538 539 void ExitFrame::SetCallerFp(Address caller_fp) { 540 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp; 541 } 542 543 544 void ExitFrame::Iterate(ObjectVisitor* v) const { 545 // The arguments are traversed as part of the expression stack of 546 // the calling frame. 547 IteratePc(v, pc_address(), LookupCode()); 548 v->VisitPointer(&code_slot()); 549 } 550 551 552 Address ExitFrame::GetCallerStackPointer() const { 553 return fp() + ExitFrameConstants::kCallerSPDisplacement; 554 } 555 556 557 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { 558 if (fp == 0) return NONE; 559 Address sp = ComputeStackPointer(fp); 560 FillState(fp, sp, state); 561 ASSERT(*state->pc_address != NULL); 562 return EXIT; 563 } 564 565 566 Address ExitFrame::ComputeStackPointer(Address fp) { 567 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset); 568 } 569 570 571 void ExitFrame::FillState(Address fp, Address sp, State* state) { 572 state->sp = sp; 573 state->fp = fp; 574 state->pc_address = ResolveReturnAddressLocation( 575 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize)); 576 } 577 578 579 Address StandardFrame::GetExpressionAddress(int n) const { 580 const int offset = StandardFrameConstants::kExpressionsOffset; 581 return fp() + offset - n * kPointerSize; 582 } 583 584 585 Object* StandardFrame::GetExpression(Address fp, int index) { 586 return Memory::Object_at(GetExpressionAddress(fp, index)); 587 } 588 589 590 Address StandardFrame::GetExpressionAddress(Address fp, int n) { 591 const int offset = StandardFrameConstants::kExpressionsOffset; 592 return fp + offset - n * kPointerSize; 593 } 594 595 596 int StandardFrame::ComputeExpressionsCount() const { 597 const int offset = 598 StandardFrameConstants::kExpressionsOffset + kPointerSize; 599 Address base = fp() + offset; 600 Address limit = sp(); 601 ASSERT(base >= limit); // stack grows downwards 602 // Include register-allocated locals in number of expressions. 603 return static_cast<int>((base - limit) / kPointerSize); 604 } 605 606 607 void StandardFrame::ComputeCallerState(State* state) const { 608 state->sp = caller_sp(); 609 state->fp = caller_fp(); 610 state->pc_address = ResolveReturnAddressLocation( 611 reinterpret_cast<Address*>(ComputePCAddress(fp()))); 612 } 613 614 615 void StandardFrame::SetCallerFp(Address caller_fp) { 616 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) = 617 caller_fp; 618 } 619 620 621 bool StandardFrame::IsExpressionInsideHandler(int n) const { 622 Address address = GetExpressionAddress(n); 623 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) { 624 if (it.handler()->includes(address)) return true; 625 } 626 return false; 627 } 628 629 630 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const { 631 // Make sure that we're not doing "safe" stack frame iteration. We cannot 632 // possibly find pointers in optimized frames in that state. 633 ASSERT(can_access_heap_objects()); 634 635 // Compute the safepoint information. 636 unsigned stack_slots = 0; 637 SafepointEntry safepoint_entry; 638 Code* code = StackFrame::GetSafepointData( 639 isolate(), pc(), &safepoint_entry, &stack_slots); 640 unsigned slot_space = stack_slots * kPointerSize; 641 642 // Visit the outgoing parameters. 643 Object** parameters_base = &Memory::Object_at(sp()); 644 Object** parameters_limit = &Memory::Object_at( 645 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space); 646 647 // Visit the parameters that may be on top of the saved registers. 648 if (safepoint_entry.argument_count() > 0) { 649 v->VisitPointers(parameters_base, 650 parameters_base + safepoint_entry.argument_count()); 651 parameters_base += safepoint_entry.argument_count(); 652 } 653 654 // Skip saved double registers. 655 if (safepoint_entry.has_doubles()) { 656 // Number of doubles not known at snapshot time. 657 ASSERT(!Serializer::enabled()); 658 parameters_base += DoubleRegister::NumAllocatableRegisters() * 659 kDoubleSize / kPointerSize; 660 } 661 662 // Visit the registers that contain pointers if any. 663 if (safepoint_entry.HasRegisters()) { 664 for (int i = kNumSafepointRegisters - 1; i >=0; i--) { 665 if (safepoint_entry.HasRegisterAt(i)) { 666 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i); 667 v->VisitPointer(parameters_base + reg_stack_index); 668 } 669 } 670 // Skip the words containing the register values. 671 parameters_base += kNumSafepointRegisters; 672 } 673 674 // We're done dealing with the register bits. 675 uint8_t* safepoint_bits = safepoint_entry.bits(); 676 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2; 677 678 // Visit the rest of the parameters. 679 v->VisitPointers(parameters_base, parameters_limit); 680 681 // Visit pointer spill slots and locals. 682 for (unsigned index = 0; index < stack_slots; index++) { 683 int byte_index = index >> kBitsPerByteLog2; 684 int bit_index = index & (kBitsPerByte - 1); 685 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) { 686 v->VisitPointer(parameters_limit + index); 687 } 688 } 689 690 // Visit the return address in the callee and incoming arguments. 691 IteratePc(v, pc_address(), code); 692 693 // Visit the context in stub frame and JavaScript frame. 694 // Visit the function in JavaScript frame. 695 Object** fixed_base = &Memory::Object_at( 696 fp() + StandardFrameConstants::kMarkerOffset); 697 Object** fixed_limit = &Memory::Object_at(fp()); 698 v->VisitPointers(fixed_base, fixed_limit); 699 } 700 701 702 void StubFrame::Iterate(ObjectVisitor* v) const { 703 IterateCompiledFrame(v); 704 } 705 706 707 Code* StubFrame::unchecked_code() const { 708 return static_cast<Code*>(isolate()->FindCodeObject(pc())); 709 } 710 711 712 Address StubFrame::GetCallerStackPointer() const { 713 return fp() + ExitFrameConstants::kCallerSPDisplacement; 714 } 715 716 717 int StubFrame::GetNumberOfIncomingArguments() const { 718 return 0; 719 } 720 721 722 void OptimizedFrame::Iterate(ObjectVisitor* v) const { 723 #ifdef DEBUG 724 // Make sure that optimized frames do not contain any stack handlers. 725 StackHandlerIterator it(this, top_handler()); 726 ASSERT(it.done()); 727 #endif 728 729 IterateCompiledFrame(v); 730 } 731 732 733 void JavaScriptFrame::SetParameterValue(int index, Object* value) const { 734 Memory::Object_at(GetParameterSlot(index)) = value; 735 } 736 737 738 bool JavaScriptFrame::IsConstructor() const { 739 Address fp = caller_fp(); 740 if (has_adapted_arguments()) { 741 // Skip the arguments adaptor frame and look at the real caller. 742 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset); 743 } 744 return IsConstructFrame(fp); 745 } 746 747 748 int JavaScriptFrame::GetArgumentsLength() const { 749 // If there is an arguments adaptor frame get the arguments length from it. 750 if (has_adapted_arguments()) { 751 return Smi::cast(GetExpression(caller_fp(), 0))->value(); 752 } else { 753 return GetNumberOfIncomingArguments(); 754 } 755 } 756 757 758 Code* JavaScriptFrame::unchecked_code() const { 759 return function()->code(); 760 } 761 762 763 int JavaScriptFrame::GetNumberOfIncomingArguments() const { 764 ASSERT(can_access_heap_objects() && 765 isolate()->heap()->gc_state() == Heap::NOT_IN_GC); 766 767 return function()->shared()->formal_parameter_count(); 768 } 769 770 771 Address JavaScriptFrame::GetCallerStackPointer() const { 772 return fp() + StandardFrameConstants::kCallerSPOffset; 773 } 774 775 776 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) { 777 ASSERT(functions->length() == 0); 778 functions->Add(function()); 779 } 780 781 782 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) { 783 ASSERT(functions->length() == 0); 784 Code* code_pointer = LookupCode(); 785 int offset = static_cast<int>(pc() - code_pointer->address()); 786 FrameSummary summary(receiver(), 787 function(), 788 code_pointer, 789 offset, 790 IsConstructor()); 791 functions->Add(summary); 792 } 793 794 795 void JavaScriptFrame::PrintTop(Isolate* isolate, 796 FILE* file, 797 bool print_args, 798 bool print_line_number) { 799 // constructor calls 800 HandleScope scope(isolate); 801 DisallowHeapAllocation no_allocation; 802 JavaScriptFrameIterator it(isolate); 803 while (!it.done()) { 804 if (it.frame()->is_java_script()) { 805 JavaScriptFrame* frame = it.frame(); 806 if (frame->IsConstructor()) PrintF(file, "new "); 807 // function name 808 JSFunction* fun = frame->function(); 809 fun->PrintName(); 810 Code* js_code = frame->unchecked_code(); 811 Address pc = frame->pc(); 812 int code_offset = 813 static_cast<int>(pc - js_code->instruction_start()); 814 PrintF("+%d", code_offset); 815 SharedFunctionInfo* shared = fun->shared(); 816 if (print_line_number) { 817 Code* code = Code::cast( 818 v8::internal::Isolate::Current()->FindCodeObject(pc)); 819 int source_pos = code->SourcePosition(pc); 820 Object* maybe_script = shared->script(); 821 if (maybe_script->IsScript()) { 822 Handle<Script> script(Script::cast(maybe_script)); 823 int line = GetScriptLineNumberSafe(script, source_pos) + 1; 824 Object* script_name_raw = script->name(); 825 if (script_name_raw->IsString()) { 826 String* script_name = String::cast(script->name()); 827 SmartArrayPointer<char> c_script_name = 828 script_name->ToCString(DISALLOW_NULLS, 829 ROBUST_STRING_TRAVERSAL); 830 PrintF(file, " at %s:%d", *c_script_name, line); 831 } else { 832 PrintF(file, " at <unknown>:%d", line); 833 } 834 } else { 835 PrintF(file, " at <unknown>:<unknown>"); 836 } 837 } 838 839 if (print_args) { 840 // function arguments 841 // (we are intentionally only printing the actually 842 // supplied parameters, not all parameters required) 843 PrintF(file, "(this="); 844 frame->receiver()->ShortPrint(file); 845 const int length = frame->ComputeParametersCount(); 846 for (int i = 0; i < length; i++) { 847 PrintF(file, ", "); 848 frame->GetParameter(i)->ShortPrint(file); 849 } 850 PrintF(file, ")"); 851 } 852 break; 853 } 854 it.Advance(); 855 } 856 } 857 858 859 void JavaScriptFrame::SaveOperandStack(FixedArray* store, 860 int* stack_handler_index) const { 861 int operands_count = store->length(); 862 ASSERT_LE(operands_count, ComputeOperandsCount()); 863 864 // Visit the stack in LIFO order, saving operands and stack handlers into the 865 // array. The saved stack handlers store a link to the next stack handler, 866 // which will allow RestoreOperandStack to rewind the handlers. 867 StackHandlerIterator it(this, top_handler()); 868 int i = operands_count - 1; 869 *stack_handler_index = -1; 870 for (; !it.done(); it.Advance()) { 871 StackHandler* handler = it.handler(); 872 // Save operands pushed after the handler was pushed. 873 for (; GetOperandSlot(i) < handler->address(); i--) { 874 store->set(i, GetOperand(i)); 875 } 876 ASSERT_GE(i + 1, StackHandlerConstants::kSlotCount); 877 ASSERT_EQ(handler->address(), GetOperandSlot(i)); 878 int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount; 879 handler->Unwind(isolate(), store, next_stack_handler_index, 880 *stack_handler_index); 881 *stack_handler_index = next_stack_handler_index; 882 i -= StackHandlerConstants::kSlotCount; 883 } 884 885 // Save any remaining operands. 886 for (; i >= 0; i--) { 887 store->set(i, GetOperand(i)); 888 } 889 } 890 891 892 void JavaScriptFrame::RestoreOperandStack(FixedArray* store, 893 int stack_handler_index) { 894 int operands_count = store->length(); 895 ASSERT_LE(operands_count, ComputeOperandsCount()); 896 int i = 0; 897 while (i <= stack_handler_index) { 898 if (i < stack_handler_index) { 899 // An operand. 900 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value()); 901 Memory::Object_at(GetOperandSlot(i)) = store->get(i); 902 i++; 903 } else { 904 // A stack handler. 905 ASSERT_EQ(i, stack_handler_index); 906 // The FixedArray store grows up. The stack grows down. So the operand 907 // slot for i actually points to the bottom of the top word in the 908 // handler. The base of the StackHandler* is the address of the bottom 909 // word, which will be the last slot that is in the handler. 910 int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1; 911 StackHandler *handler = 912 StackHandler::FromAddress(GetOperandSlot(handler_slot_index)); 913 stack_handler_index = handler->Rewind(isolate(), store, i, fp()); 914 i += StackHandlerConstants::kSlotCount; 915 } 916 } 917 918 for (; i < operands_count; i++) { 919 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value()); 920 Memory::Object_at(GetOperandSlot(i)) = store->get(i); 921 } 922 } 923 924 925 void FrameSummary::Print() { 926 PrintF("receiver: "); 927 receiver_->ShortPrint(); 928 PrintF("\nfunction: "); 929 function_->shared()->DebugName()->ShortPrint(); 930 PrintF("\ncode: "); 931 code_->ShortPrint(); 932 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT"); 933 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT"); 934 PrintF("\npc: %d\n", offset_); 935 } 936 937 938 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array, 939 int literal_id) { 940 if (literal_id == Translation::kSelfLiteralId) { 941 return function(); 942 } 943 944 return JSFunction::cast(literal_array->get(literal_id)); 945 } 946 947 948 void OptimizedFrame::Summarize(List<FrameSummary>* frames) { 949 ASSERT(frames->length() == 0); 950 ASSERT(is_optimized()); 951 952 int deopt_index = Safepoint::kNoDeoptimizationIndex; 953 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); 954 FixedArray* literal_array = data->LiteralArray(); 955 956 // BUG(3243555): Since we don't have a lazy-deopt registered at 957 // throw-statements, we can't use the translation at the call-site of 958 // throw. An entry with no deoptimization index indicates a call-site 959 // without a lazy-deopt. As a consequence we are not allowed to inline 960 // functions containing throw. 961 if (deopt_index == Safepoint::kNoDeoptimizationIndex) { 962 JavaScriptFrame::Summarize(frames); 963 return; 964 } 965 966 TranslationIterator it(data->TranslationByteArray(), 967 data->TranslationIndex(deopt_index)->value()); 968 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); 969 ASSERT(opcode == Translation::BEGIN); 970 it.Next(); // Drop frame count. 971 int jsframe_count = it.Next(); 972 973 // We create the summary in reverse order because the frames 974 // in the deoptimization translation are ordered bottom-to-top. 975 bool is_constructor = IsConstructor(); 976 int i = jsframe_count; 977 while (i > 0) { 978 opcode = static_cast<Translation::Opcode>(it.Next()); 979 if (opcode == Translation::JS_FRAME) { 980 i--; 981 BailoutId ast_id = BailoutId(it.Next()); 982 JSFunction* function = LiteralAt(literal_array, it.Next()); 983 it.Next(); // Skip height. 984 985 // The translation commands are ordered and the receiver is always 986 // at the first position. Since we are always at a call when we need 987 // to construct a stack trace, the receiver is always in a stack slot. 988 opcode = static_cast<Translation::Opcode>(it.Next()); 989 ASSERT(opcode == Translation::STACK_SLOT || 990 opcode == Translation::LITERAL); 991 int index = it.Next(); 992 993 // Get the correct receiver in the optimized frame. 994 Object* receiver = NULL; 995 if (opcode == Translation::LITERAL) { 996 receiver = data->LiteralArray()->get(index); 997 } else { 998 // Positive index means the value is spilled to the locals 999 // area. Negative means it is stored in the incoming parameter 1000 // area. 1001 if (index >= 0) { 1002 receiver = GetExpression(index); 1003 } else { 1004 // Index -1 overlaps with last parameter, -n with the first parameter, 1005 // (-n - 1) with the receiver with n being the number of parameters 1006 // of the outermost, optimized frame. 1007 int parameter_count = ComputeParametersCount(); 1008 int parameter_index = index + parameter_count; 1009 receiver = (parameter_index == -1) 1010 ? this->receiver() 1011 : this->GetParameter(parameter_index); 1012 } 1013 } 1014 1015 Code* code = function->shared()->code(); 1016 DeoptimizationOutputData* output_data = 1017 DeoptimizationOutputData::cast(code->deoptimization_data()); 1018 unsigned entry = Deoptimizer::GetOutputInfo(output_data, 1019 ast_id, 1020 function->shared()); 1021 unsigned pc_offset = 1022 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize; 1023 ASSERT(pc_offset > 0); 1024 1025 FrameSummary summary(receiver, function, code, pc_offset, is_constructor); 1026 frames->Add(summary); 1027 is_constructor = false; 1028 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) { 1029 // The next encountered JS_FRAME will be marked as a constructor call. 1030 it.Skip(Translation::NumberOfOperandsFor(opcode)); 1031 ASSERT(!is_constructor); 1032 is_constructor = true; 1033 } else { 1034 // Skip over operands to advance to the next opcode. 1035 it.Skip(Translation::NumberOfOperandsFor(opcode)); 1036 } 1037 } 1038 ASSERT(!is_constructor); 1039 } 1040 1041 1042 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData( 1043 int* deopt_index) { 1044 ASSERT(is_optimized()); 1045 1046 JSFunction* opt_function = function(); 1047 Code* code = opt_function->code(); 1048 1049 // The code object may have been replaced by lazy deoptimization. Fall 1050 // back to a slow search in this case to find the original optimized 1051 // code object. 1052 if (!code->contains(pc())) { 1053 code = isolate()->inner_pointer_to_code_cache()-> 1054 GcSafeFindCodeForInnerPointer(pc()); 1055 } 1056 ASSERT(code != NULL); 1057 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 1058 1059 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc()); 1060 *deopt_index = safepoint_entry.deoptimization_index(); 1061 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex); 1062 1063 return DeoptimizationInputData::cast(code->deoptimization_data()); 1064 } 1065 1066 1067 int OptimizedFrame::GetInlineCount() { 1068 ASSERT(is_optimized()); 1069 1070 int deopt_index = Safepoint::kNoDeoptimizationIndex; 1071 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); 1072 1073 TranslationIterator it(data->TranslationByteArray(), 1074 data->TranslationIndex(deopt_index)->value()); 1075 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); 1076 ASSERT(opcode == Translation::BEGIN); 1077 USE(opcode); 1078 it.Next(); // Drop frame count. 1079 int jsframe_count = it.Next(); 1080 return jsframe_count; 1081 } 1082 1083 1084 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) { 1085 ASSERT(functions->length() == 0); 1086 ASSERT(is_optimized()); 1087 1088 int deopt_index = Safepoint::kNoDeoptimizationIndex; 1089 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); 1090 FixedArray* literal_array = data->LiteralArray(); 1091 1092 TranslationIterator it(data->TranslationByteArray(), 1093 data->TranslationIndex(deopt_index)->value()); 1094 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); 1095 ASSERT(opcode == Translation::BEGIN); 1096 it.Next(); // Drop frame count. 1097 int jsframe_count = it.Next(); 1098 1099 // We insert the frames in reverse order because the frames 1100 // in the deoptimization translation are ordered bottom-to-top. 1101 while (jsframe_count > 0) { 1102 opcode = static_cast<Translation::Opcode>(it.Next()); 1103 if (opcode == Translation::JS_FRAME) { 1104 jsframe_count--; 1105 it.Next(); // Skip ast id. 1106 JSFunction* function = LiteralAt(literal_array, it.Next()); 1107 it.Next(); // Skip height. 1108 functions->Add(function); 1109 } else { 1110 // Skip over operands to advance to the next opcode. 1111 it.Skip(Translation::NumberOfOperandsFor(opcode)); 1112 } 1113 } 1114 } 1115 1116 1117 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const { 1118 return Smi::cast(GetExpression(0))->value(); 1119 } 1120 1121 1122 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const { 1123 return fp() + StandardFrameConstants::kCallerSPOffset; 1124 } 1125 1126 1127 Address InternalFrame::GetCallerStackPointer() const { 1128 // Internal frames have no arguments. The stack pointer of the 1129 // caller is at a fixed offset from the frame pointer. 1130 return fp() + StandardFrameConstants::kCallerSPOffset; 1131 } 1132 1133 1134 Code* ArgumentsAdaptorFrame::unchecked_code() const { 1135 return isolate()->builtins()->builtin( 1136 Builtins::kArgumentsAdaptorTrampoline); 1137 } 1138 1139 1140 Code* InternalFrame::unchecked_code() const { 1141 const int offset = InternalFrameConstants::kCodeOffset; 1142 Object* code = Memory::Object_at(fp() + offset); 1143 ASSERT(code != NULL); 1144 return reinterpret_cast<Code*>(code); 1145 } 1146 1147 1148 void StackFrame::PrintIndex(StringStream* accumulator, 1149 PrintMode mode, 1150 int index) { 1151 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index); 1152 } 1153 1154 1155 void JavaScriptFrame::Print(StringStream* accumulator, 1156 PrintMode mode, 1157 int index) const { 1158 HandleScope scope(isolate()); 1159 Object* receiver = this->receiver(); 1160 JSFunction* function = this->function(); 1161 1162 accumulator->PrintSecurityTokenIfChanged(function); 1163 PrintIndex(accumulator, mode, index); 1164 Code* code = NULL; 1165 if (IsConstructor()) accumulator->Add("new "); 1166 accumulator->PrintFunction(function, receiver, &code); 1167 1168 // Get scope information for nicer output, if possible. If code is NULL, or 1169 // doesn't contain scope info, scope_info will return 0 for the number of 1170 // parameters, stack local variables, context local variables, stack slots, 1171 // or context slots. 1172 Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate())); 1173 1174 Handle<SharedFunctionInfo> shared(function->shared()); 1175 scope_info = Handle<ScopeInfo>(shared->scope_info()); 1176 Object* script_obj = shared->script(); 1177 if (script_obj->IsScript()) { 1178 Handle<Script> script(Script::cast(script_obj)); 1179 accumulator->Add(" ["); 1180 accumulator->PrintName(script->name()); 1181 1182 Address pc = this->pc(); 1183 if (code != NULL && code->kind() == Code::FUNCTION && 1184 pc >= code->instruction_start() && pc < code->instruction_end()) { 1185 int source_pos = code->SourcePosition(pc); 1186 int line = GetScriptLineNumberSafe(script, source_pos) + 1; 1187 accumulator->Add(":%d", line); 1188 } else { 1189 int function_start_pos = shared->start_position(); 1190 int line = GetScriptLineNumberSafe(script, function_start_pos) + 1; 1191 accumulator->Add(":~%d", line); 1192 } 1193 1194 accumulator->Add("] "); 1195 } 1196 1197 accumulator->Add("(this=%o", receiver); 1198 1199 // Print the parameters. 1200 int parameters_count = ComputeParametersCount(); 1201 for (int i = 0; i < parameters_count; i++) { 1202 accumulator->Add(","); 1203 // If we have a name for the parameter we print it. Nameless 1204 // parameters are either because we have more actual parameters 1205 // than formal parameters or because we have no scope information. 1206 if (i < scope_info->ParameterCount()) { 1207 accumulator->PrintName(scope_info->ParameterName(i)); 1208 accumulator->Add("="); 1209 } 1210 accumulator->Add("%o", GetParameter(i)); 1211 } 1212 1213 accumulator->Add(")"); 1214 if (mode == OVERVIEW) { 1215 accumulator->Add("\n"); 1216 return; 1217 } 1218 if (is_optimized()) { 1219 accumulator->Add(" {\n// optimized frame\n}\n"); 1220 return; 1221 } 1222 accumulator->Add(" {\n"); 1223 1224 // Compute the number of locals and expression stack elements. 1225 int stack_locals_count = scope_info->StackLocalCount(); 1226 int heap_locals_count = scope_info->ContextLocalCount(); 1227 int expressions_count = ComputeExpressionsCount(); 1228 1229 // Print stack-allocated local variables. 1230 if (stack_locals_count > 0) { 1231 accumulator->Add(" // stack-allocated locals\n"); 1232 } 1233 for (int i = 0; i < stack_locals_count; i++) { 1234 accumulator->Add(" var "); 1235 accumulator->PrintName(scope_info->StackLocalName(i)); 1236 accumulator->Add(" = "); 1237 if (i < expressions_count) { 1238 accumulator->Add("%o", GetExpression(i)); 1239 } else { 1240 accumulator->Add("// no expression found - inconsistent frame?"); 1241 } 1242 accumulator->Add("\n"); 1243 } 1244 1245 // Try to get hold of the context of this frame. 1246 Context* context = NULL; 1247 if (this->context() != NULL && this->context()->IsContext()) { 1248 context = Context::cast(this->context()); 1249 } 1250 1251 // Print heap-allocated local variables. 1252 if (heap_locals_count > 0) { 1253 accumulator->Add(" // heap-allocated locals\n"); 1254 } 1255 for (int i = 0; i < heap_locals_count; i++) { 1256 accumulator->Add(" var "); 1257 accumulator->PrintName(scope_info->ContextLocalName(i)); 1258 accumulator->Add(" = "); 1259 if (context != NULL) { 1260 if (i < context->length()) { 1261 accumulator->Add("%o", context->get(Context::MIN_CONTEXT_SLOTS + i)); 1262 } else { 1263 accumulator->Add( 1264 "// warning: missing context slot - inconsistent frame?"); 1265 } 1266 } else { 1267 accumulator->Add("// warning: no context found - inconsistent frame?"); 1268 } 1269 accumulator->Add("\n"); 1270 } 1271 1272 // Print the expression stack. 1273 int expressions_start = stack_locals_count; 1274 if (expressions_start < expressions_count) { 1275 accumulator->Add(" // expression stack (top to bottom)\n"); 1276 } 1277 for (int i = expressions_count - 1; i >= expressions_start; i--) { 1278 if (IsExpressionInsideHandler(i)) continue; 1279 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i)); 1280 } 1281 1282 // Print details about the function. 1283 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) { 1284 SharedFunctionInfo* shared = function->shared(); 1285 accumulator->Add("--------- s o u r c e c o d e ---------\n"); 1286 shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length); 1287 accumulator->Add("\n-----------------------------------------\n"); 1288 } 1289 1290 accumulator->Add("}\n\n"); 1291 } 1292 1293 1294 void ArgumentsAdaptorFrame::Print(StringStream* accumulator, 1295 PrintMode mode, 1296 int index) const { 1297 int actual = ComputeParametersCount(); 1298 int expected = -1; 1299 JSFunction* function = this->function(); 1300 expected = function->shared()->formal_parameter_count(); 1301 1302 PrintIndex(accumulator, mode, index); 1303 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected); 1304 if (mode == OVERVIEW) { 1305 accumulator->Add("\n"); 1306 return; 1307 } 1308 accumulator->Add(" {\n"); 1309 1310 // Print actual arguments. 1311 if (actual > 0) accumulator->Add(" // actual arguments\n"); 1312 for (int i = 0; i < actual; i++) { 1313 accumulator->Add(" [%02d] : %o", i, GetParameter(i)); 1314 if (expected != -1 && i >= expected) { 1315 accumulator->Add(" // not passed to callee"); 1316 } 1317 accumulator->Add("\n"); 1318 } 1319 1320 accumulator->Add("}\n\n"); 1321 } 1322 1323 1324 void EntryFrame::Iterate(ObjectVisitor* v) const { 1325 StackHandlerIterator it(this, top_handler()); 1326 ASSERT(!it.done()); 1327 StackHandler* handler = it.handler(); 1328 ASSERT(handler->is_js_entry()); 1329 handler->Iterate(v, LookupCode()); 1330 #ifdef DEBUG 1331 // Make sure that the entry frame does not contain more than one 1332 // stack handler. 1333 it.Advance(); 1334 ASSERT(it.done()); 1335 #endif 1336 IteratePc(v, pc_address(), LookupCode()); 1337 } 1338 1339 1340 void StandardFrame::IterateExpressions(ObjectVisitor* v) const { 1341 const int offset = StandardFrameConstants::kContextOffset; 1342 Object** base = &Memory::Object_at(sp()); 1343 Object** limit = &Memory::Object_at(fp() + offset) + 1; 1344 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) { 1345 StackHandler* handler = it.handler(); 1346 // Traverse pointers down to - but not including - the next 1347 // handler in the handler chain. Update the base to skip the 1348 // handler and allow the handler to traverse its own pointers. 1349 const Address address = handler->address(); 1350 v->VisitPointers(base, reinterpret_cast<Object**>(address)); 1351 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize); 1352 // Traverse the pointers in the handler itself. 1353 handler->Iterate(v, LookupCode()); 1354 } 1355 v->VisitPointers(base, limit); 1356 } 1357 1358 1359 void JavaScriptFrame::Iterate(ObjectVisitor* v) const { 1360 IterateExpressions(v); 1361 IteratePc(v, pc_address(), LookupCode()); 1362 } 1363 1364 1365 void InternalFrame::Iterate(ObjectVisitor* v) const { 1366 // Internal frames only have object pointers on the expression stack 1367 // as they never have any arguments. 1368 IterateExpressions(v); 1369 IteratePc(v, pc_address(), LookupCode()); 1370 } 1371 1372 1373 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const { 1374 Object** base = &Memory::Object_at(sp()); 1375 Object** limit = &Memory::Object_at(fp() + 1376 kFirstRegisterParameterFrameOffset); 1377 v->VisitPointers(base, limit); 1378 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset); 1379 const int offset = StandardFrameConstants::kContextOffset; 1380 limit = &Memory::Object_at(fp() + offset) + 1; 1381 v->VisitPointers(base, limit); 1382 IteratePc(v, pc_address(), LookupCode()); 1383 } 1384 1385 1386 Address StubFailureTrampolineFrame::GetCallerStackPointer() const { 1387 return fp() + StandardFrameConstants::kCallerSPOffset; 1388 } 1389 1390 1391 Code* StubFailureTrampolineFrame::unchecked_code() const { 1392 Code* trampoline; 1393 StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE). 1394 FindCodeInCache(&trampoline, isolate()); 1395 if (trampoline->contains(pc())) { 1396 return trampoline; 1397 } 1398 1399 StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE). 1400 FindCodeInCache(&trampoline, isolate()); 1401 if (trampoline->contains(pc())) { 1402 return trampoline; 1403 } 1404 1405 UNREACHABLE(); 1406 return NULL; 1407 } 1408 1409 1410 // ------------------------------------------------------------------------- 1411 1412 1413 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) { 1414 ASSERT(n >= 0); 1415 for (int i = 0; i <= n; i++) { 1416 while (!iterator_.frame()->is_java_script()) iterator_.Advance(); 1417 if (i == n) return JavaScriptFrame::cast(iterator_.frame()); 1418 iterator_.Advance(); 1419 } 1420 UNREACHABLE(); 1421 return NULL; 1422 } 1423 1424 1425 // ------------------------------------------------------------------------- 1426 1427 1428 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) { 1429 MapWord map_word = object->map_word(); 1430 return map_word.IsForwardingAddress() ? 1431 map_word.ToForwardingAddress()->map() : map_word.ToMap(); 1432 } 1433 1434 1435 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) { 1436 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object)); 1437 } 1438 1439 1440 #ifdef DEBUG 1441 static bool GcSafeCodeContains(HeapObject* code, Address addr) { 1442 Map* map = GcSafeMapOfCodeSpaceObject(code); 1443 ASSERT(map == code->GetHeap()->code_map()); 1444 Address start = code->address(); 1445 Address end = code->address() + code->SizeFromMap(map); 1446 return start <= addr && addr < end; 1447 } 1448 #endif 1449 1450 1451 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object, 1452 Address inner_pointer) { 1453 Code* code = reinterpret_cast<Code*>(object); 1454 ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer)); 1455 return code; 1456 } 1457 1458 1459 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer( 1460 Address inner_pointer) { 1461 Heap* heap = isolate_->heap(); 1462 // Check if the inner pointer points into a large object chunk. 1463 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer); 1464 if (large_page != NULL) { 1465 return GcSafeCastToCode(large_page->GetObject(), inner_pointer); 1466 } 1467 1468 // Iterate through the page until we reach the end or find an object starting 1469 // after the inner pointer. 1470 Page* page = Page::FromAddress(inner_pointer); 1471 1472 Address addr = page->skip_list()->StartFor(inner_pointer); 1473 1474 Address top = heap->code_space()->top(); 1475 Address limit = heap->code_space()->limit(); 1476 1477 while (true) { 1478 if (addr == top && addr != limit) { 1479 addr = limit; 1480 continue; 1481 } 1482 1483 HeapObject* obj = HeapObject::FromAddress(addr); 1484 int obj_size = GcSafeSizeOfCodeSpaceObject(obj); 1485 Address next_addr = addr + obj_size; 1486 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer); 1487 addr = next_addr; 1488 } 1489 } 1490 1491 1492 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* 1493 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) { 1494 isolate_->counters()->pc_to_code()->Increment(); 1495 ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize)); 1496 uint32_t hash = ComputeIntegerHash( 1497 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)), 1498 v8::internal::kZeroHashSeed); 1499 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1); 1500 InnerPointerToCodeCacheEntry* entry = cache(index); 1501 if (entry->inner_pointer == inner_pointer) { 1502 isolate_->counters()->pc_to_code_cached()->Increment(); 1503 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer)); 1504 } else { 1505 // Because this code may be interrupted by a profiling signal that 1506 // also queries the cache, we cannot update inner_pointer before the code 1507 // has been set. Otherwise, we risk trying to use a cache entry before 1508 // the code has been computed. 1509 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer); 1510 entry->safepoint_entry.Reset(); 1511 entry->inner_pointer = inner_pointer; 1512 } 1513 return entry; 1514 } 1515 1516 1517 // ------------------------------------------------------------------------- 1518 1519 1520 void StackHandler::Unwind(Isolate* isolate, 1521 FixedArray* array, 1522 int offset, 1523 int previous_handler_offset) const { 1524 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5); 1525 ASSERT_LE(0, offset); 1526 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount); 1527 // Unwinding a stack handler into an array chains it in the opposite 1528 // direction, re-using the "next" slot as a "previous" link, so that stack 1529 // handlers can be later re-wound in the correct order. Decode the "state" 1530 // slot into "index" and "kind" and store them separately, using the fp slot. 1531 array->set(offset, Smi::FromInt(previous_handler_offset)); // next 1532 array->set(offset + 1, *code_address()); // code 1533 array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state 1534 array->set(offset + 3, *context_address()); // context 1535 array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp 1536 1537 *isolate->handler_address() = next()->address(); 1538 } 1539 1540 1541 int StackHandler::Rewind(Isolate* isolate, 1542 FixedArray* array, 1543 int offset, 1544 Address fp) { 1545 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5); 1546 ASSERT_LE(0, offset); 1547 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount); 1548 Smi* prev_handler_offset = Smi::cast(array->get(offset)); 1549 Code* code = Code::cast(array->get(offset + 1)); 1550 Smi* smi_index = Smi::cast(array->get(offset + 2)); 1551 Object* context = array->get(offset + 3); 1552 Smi* smi_kind = Smi::cast(array->get(offset + 4)); 1553 1554 unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) | 1555 IndexField::encode(static_cast<unsigned>(smi_index->value())); 1556 1557 Memory::Address_at(address() + StackHandlerConstants::kNextOffset) = 1558 *isolate->handler_address(); 1559 Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code; 1560 Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state; 1561 Memory::Object_at(address() + StackHandlerConstants::kContextOffset) = 1562 context; 1563 SetFp(address() + StackHandlerConstants::kFPOffset, fp); 1564 1565 *isolate->handler_address() = address(); 1566 1567 return prev_handler_offset->value(); 1568 } 1569 1570 1571 // ------------------------------------------------------------------------- 1572 1573 int NumRegs(RegList reglist) { 1574 return CompilerIntrinsics::CountSetBits(reglist); 1575 } 1576 1577 1578 struct JSCallerSavedCodeData { 1579 int reg_code[kNumJSCallerSaved]; 1580 }; 1581 1582 JSCallerSavedCodeData caller_saved_code_data; 1583 1584 void SetUpJSCallerSavedCodeData() { 1585 int i = 0; 1586 for (int r = 0; r < kNumRegs; r++) 1587 if ((kJSCallerSaved & (1 << r)) != 0) 1588 caller_saved_code_data.reg_code[i++] = r; 1589 1590 ASSERT(i == kNumJSCallerSaved); 1591 } 1592 1593 1594 int JSCallerSavedCode(int n) { 1595 ASSERT(0 <= n && n < kNumJSCallerSaved); 1596 return caller_saved_code_data.reg_code[n]; 1597 } 1598 1599 1600 #define DEFINE_WRAPPER(type, field) \ 1601 class field##_Wrapper : public ZoneObject { \ 1602 public: /* NOLINT */ \ 1603 field##_Wrapper(const field& original) : frame_(original) { \ 1604 } \ 1605 field frame_; \ 1606 }; 1607 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER) 1608 #undef DEFINE_WRAPPER 1609 1610 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) { 1611 #define FRAME_TYPE_CASE(type, field) \ 1612 case StackFrame::type: { \ 1613 field##_Wrapper* wrapper = \ 1614 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \ 1615 return &wrapper->frame_; \ 1616 } 1617 1618 switch (frame->type()) { 1619 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE) 1620 default: UNREACHABLE(); 1621 } 1622 #undef FRAME_TYPE_CASE 1623 return NULL; 1624 } 1625 1626 1627 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) { 1628 ZoneList<StackFrame*> list(10, zone); 1629 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) { 1630 StackFrame* frame = AllocateFrameCopy(it.frame(), zone); 1631 list.Add(frame, zone); 1632 } 1633 return list.ToVector(); 1634 } 1635 1636 1637 } } // namespace v8::internal 1638