1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include <stdlib.h> 29 30 #include "v8.h" 31 32 #include "allocation-inl.h" 33 #include "ast.h" 34 #include "bootstrapper.h" 35 #include "codegen.h" 36 #include "compilation-cache.h" 37 #include "cpu-profiler.h" 38 #include "debug.h" 39 #include "deoptimizer.h" 40 #include "heap-profiler.h" 41 #include "hydrogen.h" 42 #include "isolate.h" 43 #include "lithium-allocator.h" 44 #include "log.h" 45 #include "marking-thread.h" 46 #include "messages.h" 47 #include "platform.h" 48 #include "regexp-stack.h" 49 #include "runtime-profiler.h" 50 #include "sampler.h" 51 #include "scopeinfo.h" 52 #include "serialize.h" 53 #include "simulator.h" 54 #include "spaces.h" 55 #include "stub-cache.h" 56 #include "sweeper-thread.h" 57 #include "version.h" 58 #include "vm-state-inl.h" 59 60 61 namespace v8 { 62 namespace internal { 63 64 Atomic32 ThreadId::highest_thread_id_ = 0; 65 66 int ThreadId::AllocateThreadId() { 67 int new_id = NoBarrier_AtomicIncrement(&highest_thread_id_, 1); 68 return new_id; 69 } 70 71 72 int ThreadId::GetCurrentThreadId() { 73 int thread_id = Thread::GetThreadLocalInt(Isolate::thread_id_key_); 74 if (thread_id == 0) { 75 thread_id = AllocateThreadId(); 76 Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id); 77 } 78 return thread_id; 79 } 80 81 82 ThreadLocalTop::ThreadLocalTop() { 83 InitializeInternal(); 84 // This flag may be set using v8::V8::IgnoreOutOfMemoryException() 85 // before an isolate is initialized. The initialize methods below do 86 // not touch it to preserve its value. 87 ignore_out_of_memory_ = false; 88 } 89 90 91 void ThreadLocalTop::InitializeInternal() { 92 c_entry_fp_ = 0; 93 handler_ = 0; 94 #ifdef USE_SIMULATOR 95 simulator_ = NULL; 96 #endif 97 js_entry_sp_ = NULL; 98 external_callback_scope_ = NULL; 99 current_vm_state_ = EXTERNAL; 100 try_catch_handler_address_ = NULL; 101 context_ = NULL; 102 thread_id_ = ThreadId::Invalid(); 103 external_caught_exception_ = false; 104 failed_access_check_callback_ = NULL; 105 save_context_ = NULL; 106 catcher_ = NULL; 107 top_lookup_result_ = NULL; 108 109 // These members are re-initialized later after deserialization 110 // is complete. 111 pending_exception_ = NULL; 112 has_pending_message_ = false; 113 rethrowing_message_ = false; 114 pending_message_obj_ = NULL; 115 pending_message_script_ = NULL; 116 scheduled_exception_ = NULL; 117 } 118 119 120 void ThreadLocalTop::Initialize() { 121 InitializeInternal(); 122 #ifdef USE_SIMULATOR 123 #if V8_TARGET_ARCH_ARM 124 simulator_ = Simulator::current(isolate_); 125 #elif V8_TARGET_ARCH_MIPS 126 simulator_ = Simulator::current(isolate_); 127 #endif 128 #endif 129 thread_id_ = ThreadId::Current(); 130 } 131 132 133 v8::TryCatch* ThreadLocalTop::TryCatchHandler() { 134 return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address()); 135 } 136 137 138 int SystemThreadManager::NumberOfParallelSystemThreads( 139 ParallelSystemComponent type) { 140 int number_of_threads = Min(OS::NumberOfCores(), kMaxThreads); 141 ASSERT(number_of_threads > 0); 142 if (number_of_threads == 1) { 143 return 0; 144 } 145 if (type == PARALLEL_SWEEPING) { 146 return number_of_threads; 147 } else if (type == CONCURRENT_SWEEPING) { 148 return number_of_threads - 1; 149 } else if (type == PARALLEL_MARKING) { 150 return number_of_threads; 151 } 152 return 1; 153 } 154 155 156 // Create a dummy thread that will wait forever on a semaphore. The only 157 // purpose for this thread is to have some stack area to save essential data 158 // into for use by a stacks only core dump (aka minidump). 159 class PreallocatedMemoryThread: public Thread { 160 public: 161 char* data() { 162 if (data_ready_semaphore_ != NULL) { 163 // Initial access is guarded until the data has been published. 164 data_ready_semaphore_->Wait(); 165 delete data_ready_semaphore_; 166 data_ready_semaphore_ = NULL; 167 } 168 return data_; 169 } 170 171 unsigned length() { 172 if (data_ready_semaphore_ != NULL) { 173 // Initial access is guarded until the data has been published. 174 data_ready_semaphore_->Wait(); 175 delete data_ready_semaphore_; 176 data_ready_semaphore_ = NULL; 177 } 178 return length_; 179 } 180 181 // Stop the PreallocatedMemoryThread and release its resources. 182 void StopThread() { 183 keep_running_ = false; 184 wait_for_ever_semaphore_->Signal(); 185 186 // Wait for the thread to terminate. 187 Join(); 188 189 if (data_ready_semaphore_ != NULL) { 190 delete data_ready_semaphore_; 191 data_ready_semaphore_ = NULL; 192 } 193 194 delete wait_for_ever_semaphore_; 195 wait_for_ever_semaphore_ = NULL; 196 } 197 198 protected: 199 // When the thread starts running it will allocate a fixed number of bytes 200 // on the stack and publish the location of this memory for others to use. 201 void Run() { 202 EmbeddedVector<char, 15 * 1024> local_buffer; 203 204 // Initialize the buffer with a known good value. 205 OS::StrNCpy(local_buffer, "Trace data was not generated.\n", 206 local_buffer.length()); 207 208 // Publish the local buffer and signal its availability. 209 data_ = local_buffer.start(); 210 length_ = local_buffer.length(); 211 data_ready_semaphore_->Signal(); 212 213 while (keep_running_) { 214 // This thread will wait here until the end of time. 215 wait_for_ever_semaphore_->Wait(); 216 } 217 218 // Make sure we access the buffer after the wait to remove all possibility 219 // of it being optimized away. 220 OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n", 221 local_buffer.length()); 222 } 223 224 225 private: 226 PreallocatedMemoryThread() 227 : Thread("v8:PreallocMem"), 228 keep_running_(true), 229 wait_for_ever_semaphore_(OS::CreateSemaphore(0)), 230 data_ready_semaphore_(OS::CreateSemaphore(0)), 231 data_(NULL), 232 length_(0) { 233 } 234 235 // Used to make sure that the thread keeps looping even for spurious wakeups. 236 bool keep_running_; 237 238 // This semaphore is used by the PreallocatedMemoryThread to wait for ever. 239 Semaphore* wait_for_ever_semaphore_; 240 // Semaphore to signal that the data has been initialized. 241 Semaphore* data_ready_semaphore_; 242 243 // Location and size of the preallocated memory block. 244 char* data_; 245 unsigned length_; 246 247 friend class Isolate; 248 249 DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread); 250 }; 251 252 253 void Isolate::PreallocatedMemoryThreadStart() { 254 if (preallocated_memory_thread_ != NULL) return; 255 preallocated_memory_thread_ = new PreallocatedMemoryThread(); 256 preallocated_memory_thread_->Start(); 257 } 258 259 260 void Isolate::PreallocatedMemoryThreadStop() { 261 if (preallocated_memory_thread_ == NULL) return; 262 preallocated_memory_thread_->StopThread(); 263 // Done with the thread entirely. 264 delete preallocated_memory_thread_; 265 preallocated_memory_thread_ = NULL; 266 } 267 268 269 void Isolate::PreallocatedStorageInit(size_t size) { 270 ASSERT(free_list_.next_ == &free_list_); 271 ASSERT(free_list_.previous_ == &free_list_); 272 PreallocatedStorage* free_chunk = 273 reinterpret_cast<PreallocatedStorage*>(new char[size]); 274 free_list_.next_ = free_list_.previous_ = free_chunk; 275 free_chunk->next_ = free_chunk->previous_ = &free_list_; 276 free_chunk->size_ = size - sizeof(PreallocatedStorage); 277 preallocated_storage_preallocated_ = true; 278 } 279 280 281 void* Isolate::PreallocatedStorageNew(size_t size) { 282 if (!preallocated_storage_preallocated_) { 283 return FreeStoreAllocationPolicy().New(size); 284 } 285 ASSERT(free_list_.next_ != &free_list_); 286 ASSERT(free_list_.previous_ != &free_list_); 287 288 size = (size + kPointerSize - 1) & ~(kPointerSize - 1); 289 // Search for exact fit. 290 for (PreallocatedStorage* storage = free_list_.next_; 291 storage != &free_list_; 292 storage = storage->next_) { 293 if (storage->size_ == size) { 294 storage->Unlink(); 295 storage->LinkTo(&in_use_list_); 296 return reinterpret_cast<void*>(storage + 1); 297 } 298 } 299 // Search for first fit. 300 for (PreallocatedStorage* storage = free_list_.next_; 301 storage != &free_list_; 302 storage = storage->next_) { 303 if (storage->size_ >= size + sizeof(PreallocatedStorage)) { 304 storage->Unlink(); 305 storage->LinkTo(&in_use_list_); 306 PreallocatedStorage* left_over = 307 reinterpret_cast<PreallocatedStorage*>( 308 reinterpret_cast<char*>(storage + 1) + size); 309 left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage); 310 ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) == 311 storage->size_); 312 storage->size_ = size; 313 left_over->LinkTo(&free_list_); 314 return reinterpret_cast<void*>(storage + 1); 315 } 316 } 317 // Allocation failure. 318 ASSERT(false); 319 return NULL; 320 } 321 322 323 // We don't attempt to coalesce. 324 void Isolate::PreallocatedStorageDelete(void* p) { 325 if (p == NULL) { 326 return; 327 } 328 if (!preallocated_storage_preallocated_) { 329 FreeStoreAllocationPolicy::Delete(p); 330 return; 331 } 332 PreallocatedStorage* storage = reinterpret_cast<PreallocatedStorage*>(p) - 1; 333 ASSERT(storage->next_->previous_ == storage); 334 ASSERT(storage->previous_->next_ == storage); 335 storage->Unlink(); 336 storage->LinkTo(&free_list_); 337 } 338 339 Isolate* Isolate::default_isolate_ = NULL; 340 Thread::LocalStorageKey Isolate::isolate_key_; 341 Thread::LocalStorageKey Isolate::thread_id_key_; 342 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; 343 #ifdef DEBUG 344 Thread::LocalStorageKey PerThreadAssertScopeBase::thread_local_key; 345 #endif // DEBUG 346 Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex(); 347 Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL; 348 Atomic32 Isolate::isolate_counter_ = 0; 349 350 Isolate::PerIsolateThreadData* Isolate::AllocatePerIsolateThreadData( 351 ThreadId thread_id) { 352 ASSERT(!thread_id.Equals(ThreadId::Invalid())); 353 PerIsolateThreadData* per_thread = new PerIsolateThreadData(this, thread_id); 354 { 355 ScopedLock lock(process_wide_mutex_); 356 ASSERT(thread_data_table_->Lookup(this, thread_id) == NULL); 357 thread_data_table_->Insert(per_thread); 358 ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread); 359 } 360 return per_thread; 361 } 362 363 364 Isolate::PerIsolateThreadData* 365 Isolate::FindOrAllocatePerThreadDataForThisThread() { 366 ThreadId thread_id = ThreadId::Current(); 367 PerIsolateThreadData* per_thread = NULL; 368 { 369 ScopedLock lock(process_wide_mutex_); 370 per_thread = thread_data_table_->Lookup(this, thread_id); 371 if (per_thread == NULL) { 372 per_thread = AllocatePerIsolateThreadData(thread_id); 373 } 374 } 375 return per_thread; 376 } 377 378 379 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() { 380 ThreadId thread_id = ThreadId::Current(); 381 return FindPerThreadDataForThread(thread_id); 382 } 383 384 385 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread( 386 ThreadId thread_id) { 387 PerIsolateThreadData* per_thread = NULL; 388 { 389 ScopedLock lock(process_wide_mutex_); 390 per_thread = thread_data_table_->Lookup(this, thread_id); 391 } 392 return per_thread; 393 } 394 395 396 void Isolate::EnsureDefaultIsolate() { 397 ScopedLock lock(process_wide_mutex_); 398 if (default_isolate_ == NULL) { 399 isolate_key_ = Thread::CreateThreadLocalKey(); 400 thread_id_key_ = Thread::CreateThreadLocalKey(); 401 per_isolate_thread_data_key_ = Thread::CreateThreadLocalKey(); 402 #ifdef DEBUG 403 PerThreadAssertScopeBase::thread_local_key = Thread::CreateThreadLocalKey(); 404 #endif // DEBUG 405 thread_data_table_ = new Isolate::ThreadDataTable(); 406 default_isolate_ = new Isolate(); 407 } 408 // Can't use SetIsolateThreadLocals(default_isolate_, NULL) here 409 // because a non-null thread data may be already set. 410 if (Thread::GetThreadLocal(isolate_key_) == NULL) { 411 Thread::SetThreadLocal(isolate_key_, default_isolate_); 412 } 413 } 414 415 struct StaticInitializer { 416 StaticInitializer() { 417 Isolate::EnsureDefaultIsolate(); 418 } 419 } static_initializer; 420 421 #ifdef ENABLE_DEBUGGER_SUPPORT 422 Debugger* Isolate::GetDefaultIsolateDebugger() { 423 EnsureDefaultIsolate(); 424 return default_isolate_->debugger(); 425 } 426 #endif 427 428 429 StackGuard* Isolate::GetDefaultIsolateStackGuard() { 430 EnsureDefaultIsolate(); 431 return default_isolate_->stack_guard(); 432 } 433 434 435 void Isolate::EnterDefaultIsolate() { 436 EnsureDefaultIsolate(); 437 ASSERT(default_isolate_ != NULL); 438 439 PerIsolateThreadData* data = CurrentPerIsolateThreadData(); 440 // If not yet in default isolate - enter it. 441 if (data == NULL || data->isolate() != default_isolate_) { 442 default_isolate_->Enter(); 443 } 444 } 445 446 447 v8::Isolate* Isolate::GetDefaultIsolateForLocking() { 448 EnsureDefaultIsolate(); 449 return reinterpret_cast<v8::Isolate*>(default_isolate_); 450 } 451 452 453 Address Isolate::get_address_from_id(Isolate::AddressId id) { 454 return isolate_addresses_[id]; 455 } 456 457 458 char* Isolate::Iterate(ObjectVisitor* v, char* thread_storage) { 459 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage); 460 Iterate(v, thread); 461 return thread_storage + sizeof(ThreadLocalTop); 462 } 463 464 465 void Isolate::IterateThread(ThreadVisitor* v, char* t) { 466 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t); 467 v->VisitThread(this, thread); 468 } 469 470 471 void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) { 472 // Visit the roots from the top for a given thread. 473 Object* pending; 474 // The pending exception can sometimes be a failure. We can't show 475 // that to the GC, which only understands objects. 476 if (thread->pending_exception_->ToObject(&pending)) { 477 v->VisitPointer(&pending); 478 thread->pending_exception_ = pending; // In case GC updated it. 479 } 480 v->VisitPointer(&(thread->pending_message_obj_)); 481 v->VisitPointer(BitCast<Object**>(&(thread->pending_message_script_))); 482 v->VisitPointer(BitCast<Object**>(&(thread->context_))); 483 Object* scheduled; 484 if (thread->scheduled_exception_->ToObject(&scheduled)) { 485 v->VisitPointer(&scheduled); 486 thread->scheduled_exception_ = scheduled; 487 } 488 489 for (v8::TryCatch* block = thread->TryCatchHandler(); 490 block != NULL; 491 block = TRY_CATCH_FROM_ADDRESS(block->next_)) { 492 v->VisitPointer(BitCast<Object**>(&(block->exception_))); 493 v->VisitPointer(BitCast<Object**>(&(block->message_obj_))); 494 v->VisitPointer(BitCast<Object**>(&(block->message_script_))); 495 } 496 497 // Iterate over pointers on native execution stack. 498 for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { 499 it.frame()->Iterate(v); 500 } 501 502 // Iterate pointers in live lookup results. 503 thread->top_lookup_result_->Iterate(v); 504 } 505 506 507 void Isolate::Iterate(ObjectVisitor* v) { 508 ThreadLocalTop* current_t = thread_local_top(); 509 Iterate(v, current_t); 510 } 511 512 513 void Isolate::IterateDeferredHandles(ObjectVisitor* visitor) { 514 for (DeferredHandles* deferred = deferred_handles_head_; 515 deferred != NULL; 516 deferred = deferred->next_) { 517 deferred->Iterate(visitor); 518 } 519 } 520 521 522 #ifdef DEBUG 523 bool Isolate::IsDeferredHandle(Object** handle) { 524 // Each DeferredHandles instance keeps the handles to one job in the 525 // parallel recompilation queue, containing a list of blocks. Each block 526 // contains kHandleBlockSize handles except for the first block, which may 527 // not be fully filled. 528 // We iterate through all the blocks to see whether the argument handle 529 // belongs to one of the blocks. If so, it is deferred. 530 for (DeferredHandles* deferred = deferred_handles_head_; 531 deferred != NULL; 532 deferred = deferred->next_) { 533 List<Object**>* blocks = &deferred->blocks_; 534 for (int i = 0; i < blocks->length(); i++) { 535 Object** block_limit = (i == 0) ? deferred->first_block_limit_ 536 : blocks->at(i) + kHandleBlockSize; 537 if (blocks->at(i) <= handle && handle < block_limit) return true; 538 } 539 } 540 return false; 541 } 542 #endif // DEBUG 543 544 545 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) { 546 // The ARM simulator has a separate JS stack. We therefore register 547 // the C++ try catch handler with the simulator and get back an 548 // address that can be used for comparisons with addresses into the 549 // JS stack. When running without the simulator, the address 550 // returned will be the address of the C++ try catch handler itself. 551 Address address = reinterpret_cast<Address>( 552 SimulatorStack::RegisterCTryCatch(reinterpret_cast<uintptr_t>(that))); 553 thread_local_top()->set_try_catch_handler_address(address); 554 } 555 556 557 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) { 558 ASSERT(thread_local_top()->TryCatchHandler() == that); 559 thread_local_top()->set_try_catch_handler_address( 560 reinterpret_cast<Address>(that->next_)); 561 thread_local_top()->catcher_ = NULL; 562 SimulatorStack::UnregisterCTryCatch(); 563 } 564 565 566 Handle<String> Isolate::StackTraceString() { 567 if (stack_trace_nesting_level_ == 0) { 568 stack_trace_nesting_level_++; 569 HeapStringAllocator allocator; 570 StringStream::ClearMentionedObjectCache(); 571 StringStream accumulator(&allocator); 572 incomplete_message_ = &accumulator; 573 PrintStack(&accumulator); 574 Handle<String> stack_trace = accumulator.ToString(); 575 incomplete_message_ = NULL; 576 stack_trace_nesting_level_ = 0; 577 return stack_trace; 578 } else if (stack_trace_nesting_level_ == 1) { 579 stack_trace_nesting_level_++; 580 OS::PrintError( 581 "\n\nAttempt to print stack while printing stack (double fault)\n"); 582 OS::PrintError( 583 "If you are lucky you may find a partial stack dump on stdout.\n\n"); 584 incomplete_message_->OutputToStdOut(); 585 return factory()->empty_string(); 586 } else { 587 OS::Abort(); 588 // Unreachable 589 return factory()->empty_string(); 590 } 591 } 592 593 594 void Isolate::PushStackTraceAndDie(unsigned int magic, 595 Object* object, 596 Map* map, 597 unsigned int magic2) { 598 const int kMaxStackTraceSize = 8192; 599 Handle<String> trace = StackTraceString(); 600 uint8_t buffer[kMaxStackTraceSize]; 601 int length = Min(kMaxStackTraceSize - 1, trace->length()); 602 String::WriteToFlat(*trace, buffer, 0, length); 603 buffer[length] = '\0'; 604 // TODO(dcarney): convert buffer to utf8? 605 OS::PrintError("Stacktrace (%x-%x) %p %p: %s\n", 606 magic, magic2, 607 static_cast<void*>(object), static_cast<void*>(map), 608 reinterpret_cast<char*>(buffer)); 609 OS::Abort(); 610 } 611 612 613 // Determines whether the given stack frame should be displayed in 614 // a stack trace. The caller is the error constructor that asked 615 // for the stack trace to be collected. The first time a construct 616 // call to this function is encountered it is skipped. The seen_caller 617 // in/out parameter is used to remember if the caller has been seen 618 // yet. 619 static bool IsVisibleInStackTrace(StackFrame* raw_frame, 620 Object* caller, 621 bool* seen_caller) { 622 // Only display JS frames. 623 if (!raw_frame->is_java_script()) return false; 624 JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame); 625 JSFunction* fun = frame->function(); 626 if ((fun == caller) && !(*seen_caller)) { 627 *seen_caller = true; 628 return false; 629 } 630 // Skip all frames until we've seen the caller. 631 if (!(*seen_caller)) return false; 632 // Also, skip non-visible built-in functions and any call with the builtins 633 // object as receiver, so as to not reveal either the builtins object or 634 // an internal function. 635 // The --builtins-in-stack-traces command line flag allows including 636 // internal call sites in the stack trace for debugging purposes. 637 if (!FLAG_builtins_in_stack_traces) { 638 if (frame->receiver()->IsJSBuiltinsObject() || 639 (fun->IsBuiltin() && !fun->shared()->native())) { 640 return false; 641 } 642 } 643 return true; 644 } 645 646 647 Handle<JSArray> Isolate::CaptureSimpleStackTrace(Handle<JSObject> error_object, 648 Handle<Object> caller, 649 int limit) { 650 limit = Max(limit, 0); // Ensure that limit is not negative. 651 int initial_size = Min(limit, 10); 652 Handle<FixedArray> elements = 653 factory()->NewFixedArrayWithHoles(initial_size * 4 + 1); 654 655 // If the caller parameter is a function we skip frames until we're 656 // under it before starting to collect. 657 bool seen_caller = !caller->IsJSFunction(); 658 // First element is reserved to store the number of non-strict frames. 659 int cursor = 1; 660 int frames_seen = 0; 661 int non_strict_frames = 0; 662 bool encountered_strict_function = false; 663 for (StackFrameIterator iter(this); 664 !iter.done() && frames_seen < limit; 665 iter.Advance()) { 666 StackFrame* raw_frame = iter.frame(); 667 if (IsVisibleInStackTrace(raw_frame, *caller, &seen_caller)) { 668 frames_seen++; 669 JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame); 670 // Set initial size to the maximum inlining level + 1 for the outermost 671 // function. 672 List<FrameSummary> frames(FLAG_max_inlining_levels + 1); 673 frame->Summarize(&frames); 674 for (int i = frames.length() - 1; i >= 0; i--) { 675 if (cursor + 4 > elements->length()) { 676 int new_capacity = JSObject::NewElementsCapacity(elements->length()); 677 Handle<FixedArray> new_elements = 678 factory()->NewFixedArrayWithHoles(new_capacity); 679 for (int i = 0; i < cursor; i++) { 680 new_elements->set(i, elements->get(i)); 681 } 682 elements = new_elements; 683 } 684 ASSERT(cursor + 4 <= elements->length()); 685 686 Handle<Object> recv = frames[i].receiver(); 687 Handle<JSFunction> fun = frames[i].function(); 688 Handle<Code> code = frames[i].code(); 689 Handle<Smi> offset(Smi::FromInt(frames[i].offset()), this); 690 // The stack trace API should not expose receivers and function 691 // objects on frames deeper than the top-most one with a strict 692 // mode function. The number of non-strict frames is stored as 693 // first element in the result array. 694 if (!encountered_strict_function) { 695 if (!fun->shared()->is_classic_mode()) { 696 encountered_strict_function = true; 697 } else { 698 non_strict_frames++; 699 } 700 } 701 elements->set(cursor++, *recv); 702 elements->set(cursor++, *fun); 703 elements->set(cursor++, *code); 704 elements->set(cursor++, *offset); 705 } 706 } 707 } 708 elements->set(0, Smi::FromInt(non_strict_frames)); 709 Handle<JSArray> result = factory()->NewJSArrayWithElements(elements); 710 result->set_length(Smi::FromInt(cursor)); 711 return result; 712 } 713 714 715 void Isolate::CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object) { 716 if (capture_stack_trace_for_uncaught_exceptions_) { 717 // Capture stack trace for a detailed exception message. 718 Handle<String> key = factory()->hidden_stack_trace_string(); 719 Handle<JSArray> stack_trace = CaptureCurrentStackTrace( 720 stack_trace_for_uncaught_exceptions_frame_limit_, 721 stack_trace_for_uncaught_exceptions_options_); 722 JSObject::SetHiddenProperty(error_object, key, stack_trace); 723 } 724 } 725 726 727 Handle<JSArray> Isolate::CaptureCurrentStackTrace( 728 int frame_limit, StackTrace::StackTraceOptions options) { 729 // Ensure no negative values. 730 int limit = Max(frame_limit, 0); 731 Handle<JSArray> stack_trace = factory()->NewJSArray(frame_limit); 732 733 Handle<String> column_key = 734 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("column")); 735 Handle<String> line_key = 736 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("lineNumber")); 737 Handle<String> script_key = 738 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("scriptName")); 739 Handle<String> script_name_or_source_url_key = 740 factory()->InternalizeOneByteString( 741 STATIC_ASCII_VECTOR("scriptNameOrSourceURL")); 742 Handle<String> function_key = 743 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("functionName")); 744 Handle<String> eval_key = 745 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("isEval")); 746 Handle<String> constructor_key = 747 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("isConstructor")); 748 749 StackTraceFrameIterator it(this); 750 int frames_seen = 0; 751 while (!it.done() && (frames_seen < limit)) { 752 JavaScriptFrame* frame = it.frame(); 753 // Set initial size to the maximum inlining level + 1 for the outermost 754 // function. 755 List<FrameSummary> frames(FLAG_max_inlining_levels + 1); 756 frame->Summarize(&frames); 757 for (int i = frames.length() - 1; i >= 0 && frames_seen < limit; i--) { 758 // Create a JSObject to hold the information for the StackFrame. 759 Handle<JSObject> stack_frame = factory()->NewJSObject(object_function()); 760 761 Handle<JSFunction> fun = frames[i].function(); 762 Handle<Script> script(Script::cast(fun->shared()->script())); 763 764 if (options & StackTrace::kLineNumber) { 765 int script_line_offset = script->line_offset()->value(); 766 int position = frames[i].code()->SourcePosition(frames[i].pc()); 767 int line_number = GetScriptLineNumber(script, position); 768 // line_number is already shifted by the script_line_offset. 769 int relative_line_number = line_number - script_line_offset; 770 if (options & StackTrace::kColumnOffset && relative_line_number >= 0) { 771 Handle<FixedArray> line_ends(FixedArray::cast(script->line_ends())); 772 int start = (relative_line_number == 0) ? 0 : 773 Smi::cast(line_ends->get(relative_line_number - 1))->value() + 1; 774 int column_offset = position - start; 775 if (relative_line_number == 0) { 776 // For the case where the code is on the same line as the script 777 // tag. 778 column_offset += script->column_offset()->value(); 779 } 780 CHECK_NOT_EMPTY_HANDLE( 781 this, 782 JSObject::SetLocalPropertyIgnoreAttributes( 783 stack_frame, column_key, 784 Handle<Smi>(Smi::FromInt(column_offset + 1), this), NONE)); 785 } 786 CHECK_NOT_EMPTY_HANDLE( 787 this, 788 JSObject::SetLocalPropertyIgnoreAttributes( 789 stack_frame, line_key, 790 Handle<Smi>(Smi::FromInt(line_number + 1), this), NONE)); 791 } 792 793 if (options & StackTrace::kScriptName) { 794 Handle<Object> script_name(script->name(), this); 795 CHECK_NOT_EMPTY_HANDLE(this, 796 JSObject::SetLocalPropertyIgnoreAttributes( 797 stack_frame, script_key, script_name, NONE)); 798 } 799 800 if (options & StackTrace::kScriptNameOrSourceURL) { 801 Handle<Object> result = GetScriptNameOrSourceURL(script); 802 CHECK_NOT_EMPTY_HANDLE(this, 803 JSObject::SetLocalPropertyIgnoreAttributes( 804 stack_frame, script_name_or_source_url_key, 805 result, NONE)); 806 } 807 808 if (options & StackTrace::kFunctionName) { 809 Handle<Object> fun_name(fun->shared()->name(), this); 810 if (!fun_name->BooleanValue()) { 811 fun_name = Handle<Object>(fun->shared()->inferred_name(), this); 812 } 813 CHECK_NOT_EMPTY_HANDLE(this, 814 JSObject::SetLocalPropertyIgnoreAttributes( 815 stack_frame, function_key, fun_name, NONE)); 816 } 817 818 if (options & StackTrace::kIsEval) { 819 Handle<Object> is_eval = 820 script->compilation_type() == Script::COMPILATION_TYPE_EVAL ? 821 factory()->true_value() : factory()->false_value(); 822 CHECK_NOT_EMPTY_HANDLE(this, 823 JSObject::SetLocalPropertyIgnoreAttributes( 824 stack_frame, eval_key, is_eval, NONE)); 825 } 826 827 if (options & StackTrace::kIsConstructor) { 828 Handle<Object> is_constructor = (frames[i].is_constructor()) ? 829 factory()->true_value() : factory()->false_value(); 830 CHECK_NOT_EMPTY_HANDLE(this, 831 JSObject::SetLocalPropertyIgnoreAttributes( 832 stack_frame, constructor_key, 833 is_constructor, NONE)); 834 } 835 836 FixedArray::cast(stack_trace->elements())->set(frames_seen, *stack_frame); 837 frames_seen++; 838 } 839 it.Advance(); 840 } 841 842 stack_trace->set_length(Smi::FromInt(frames_seen)); 843 return stack_trace; 844 } 845 846 847 void Isolate::PrintStack() { 848 PrintStack(stdout); 849 } 850 851 852 void Isolate::PrintStack(FILE* out) { 853 if (stack_trace_nesting_level_ == 0) { 854 stack_trace_nesting_level_++; 855 856 StringAllocator* allocator; 857 if (preallocated_message_space_ == NULL) { 858 allocator = new HeapStringAllocator(); 859 } else { 860 allocator = preallocated_message_space_; 861 } 862 863 StringStream::ClearMentionedObjectCache(); 864 StringStream accumulator(allocator); 865 incomplete_message_ = &accumulator; 866 PrintStack(&accumulator); 867 accumulator.OutputToFile(out); 868 InitializeLoggingAndCounters(); 869 accumulator.Log(); 870 incomplete_message_ = NULL; 871 stack_trace_nesting_level_ = 0; 872 if (preallocated_message_space_ == NULL) { 873 // Remove the HeapStringAllocator created above. 874 delete allocator; 875 } 876 } else if (stack_trace_nesting_level_ == 1) { 877 stack_trace_nesting_level_++; 878 OS::PrintError( 879 "\n\nAttempt to print stack while printing stack (double fault)\n"); 880 OS::PrintError( 881 "If you are lucky you may find a partial stack dump on stdout.\n\n"); 882 incomplete_message_->OutputToFile(out); 883 } 884 } 885 886 887 static void PrintFrames(Isolate* isolate, 888 StringStream* accumulator, 889 StackFrame::PrintMode mode) { 890 StackFrameIterator it(isolate); 891 for (int i = 0; !it.done(); it.Advance()) { 892 it.frame()->Print(accumulator, mode, i++); 893 } 894 } 895 896 897 void Isolate::PrintStack(StringStream* accumulator) { 898 if (!IsInitialized()) { 899 accumulator->Add( 900 "\n==== JS stack trace is not available =======================\n\n"); 901 accumulator->Add( 902 "\n==== Isolate for the thread is not initialized =============\n\n"); 903 return; 904 } 905 // The MentionedObjectCache is not GC-proof at the moment. 906 DisallowHeapAllocation no_gc; 907 ASSERT(StringStream::IsMentionedObjectCacheClear()); 908 909 // Avoid printing anything if there are no frames. 910 if (c_entry_fp(thread_local_top()) == 0) return; 911 912 accumulator->Add( 913 "\n==== JS stack trace =========================================\n\n"); 914 PrintFrames(this, accumulator, StackFrame::OVERVIEW); 915 916 accumulator->Add( 917 "\n==== Details ================================================\n\n"); 918 PrintFrames(this, accumulator, StackFrame::DETAILS); 919 920 accumulator->PrintMentionedObjectCache(); 921 accumulator->Add("=====================\n\n"); 922 } 923 924 925 void Isolate::SetFailedAccessCheckCallback( 926 v8::FailedAccessCheckCallback callback) { 927 thread_local_top()->failed_access_check_callback_ = callback; 928 } 929 930 931 void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) { 932 if (!thread_local_top()->failed_access_check_callback_) return; 933 934 ASSERT(receiver->IsAccessCheckNeeded()); 935 ASSERT(context()); 936 937 // Get the data object from access check info. 938 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); 939 if (!constructor->shared()->IsApiFunction()) return; 940 Object* data_obj = 941 constructor->shared()->get_api_func_data()->access_check_info(); 942 if (data_obj == heap_.undefined_value()) return; 943 944 HandleScope scope(this); 945 Handle<JSObject> receiver_handle(receiver); 946 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); 947 { VMState<EXTERNAL> state(this); 948 thread_local_top()->failed_access_check_callback_( 949 v8::Utils::ToLocal(receiver_handle), 950 type, 951 v8::Utils::ToLocal(data)); 952 } 953 } 954 955 956 enum MayAccessDecision { 957 YES, NO, UNKNOWN 958 }; 959 960 961 static MayAccessDecision MayAccessPreCheck(Isolate* isolate, 962 JSObject* receiver, 963 v8::AccessType type) { 964 // During bootstrapping, callback functions are not enabled yet. 965 if (isolate->bootstrapper()->IsActive()) return YES; 966 967 if (receiver->IsJSGlobalProxy()) { 968 Object* receiver_context = JSGlobalProxy::cast(receiver)->native_context(); 969 if (!receiver_context->IsContext()) return NO; 970 971 // Get the native context of current top context. 972 // avoid using Isolate::native_context() because it uses Handle. 973 Context* native_context = 974 isolate->context()->global_object()->native_context(); 975 if (receiver_context == native_context) return YES; 976 977 if (Context::cast(receiver_context)->security_token() == 978 native_context->security_token()) 979 return YES; 980 } 981 982 return UNKNOWN; 983 } 984 985 986 bool Isolate::MayNamedAccess(JSObject* receiver, Object* key, 987 v8::AccessType type) { 988 ASSERT(receiver->IsAccessCheckNeeded()); 989 990 // The callers of this method are not expecting a GC. 991 DisallowHeapAllocation no_gc; 992 993 // Skip checks for hidden properties access. Note, we do not 994 // require existence of a context in this case. 995 if (key == heap_.hidden_string()) return true; 996 997 // Check for compatibility between the security tokens in the 998 // current lexical context and the accessed object. 999 ASSERT(context()); 1000 1001 MayAccessDecision decision = MayAccessPreCheck(this, receiver, type); 1002 if (decision != UNKNOWN) return decision == YES; 1003 1004 // Get named access check callback 1005 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); 1006 if (!constructor->shared()->IsApiFunction()) return false; 1007 1008 Object* data_obj = 1009 constructor->shared()->get_api_func_data()->access_check_info(); 1010 if (data_obj == heap_.undefined_value()) return false; 1011 1012 Object* fun_obj = AccessCheckInfo::cast(data_obj)->named_callback(); 1013 v8::NamedSecurityCallback callback = 1014 v8::ToCData<v8::NamedSecurityCallback>(fun_obj); 1015 1016 if (!callback) return false; 1017 1018 HandleScope scope(this); 1019 Handle<JSObject> receiver_handle(receiver, this); 1020 Handle<Object> key_handle(key, this); 1021 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); 1022 LOG(this, ApiNamedSecurityCheck(key)); 1023 bool result = false; 1024 { 1025 // Leaving JavaScript. 1026 VMState<EXTERNAL> state(this); 1027 result = callback(v8::Utils::ToLocal(receiver_handle), 1028 v8::Utils::ToLocal(key_handle), 1029 type, 1030 v8::Utils::ToLocal(data)); 1031 } 1032 return result; 1033 } 1034 1035 1036 bool Isolate::MayIndexedAccess(JSObject* receiver, 1037 uint32_t index, 1038 v8::AccessType type) { 1039 ASSERT(receiver->IsAccessCheckNeeded()); 1040 // Check for compatibility between the security tokens in the 1041 // current lexical context and the accessed object. 1042 ASSERT(context()); 1043 1044 MayAccessDecision decision = MayAccessPreCheck(this, receiver, type); 1045 if (decision != UNKNOWN) return decision == YES; 1046 1047 // Get indexed access check callback 1048 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor()); 1049 if (!constructor->shared()->IsApiFunction()) return false; 1050 1051 Object* data_obj = 1052 constructor->shared()->get_api_func_data()->access_check_info(); 1053 if (data_obj == heap_.undefined_value()) return false; 1054 1055 Object* fun_obj = AccessCheckInfo::cast(data_obj)->indexed_callback(); 1056 v8::IndexedSecurityCallback callback = 1057 v8::ToCData<v8::IndexedSecurityCallback>(fun_obj); 1058 1059 if (!callback) return false; 1060 1061 HandleScope scope(this); 1062 Handle<JSObject> receiver_handle(receiver, this); 1063 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this); 1064 LOG(this, ApiIndexedSecurityCheck(index)); 1065 bool result = false; 1066 { 1067 // Leaving JavaScript. 1068 VMState<EXTERNAL> state(this); 1069 result = callback(v8::Utils::ToLocal(receiver_handle), 1070 index, 1071 type, 1072 v8::Utils::ToLocal(data)); 1073 } 1074 return result; 1075 } 1076 1077 1078 const char* const Isolate::kStackOverflowMessage = 1079 "Uncaught RangeError: Maximum call stack size exceeded"; 1080 1081 1082 Failure* Isolate::StackOverflow() { 1083 HandleScope scope(this); 1084 // At this point we cannot create an Error object using its javascript 1085 // constructor. Instead, we copy the pre-constructed boilerplate and 1086 // attach the stack trace as a hidden property. 1087 Handle<String> key = factory()->stack_overflow_string(); 1088 Handle<JSObject> boilerplate = 1089 Handle<JSObject>::cast(GetProperty(this, js_builtins_object(), key)); 1090 Handle<JSObject> exception = Copy(boilerplate); 1091 DoThrow(*exception, NULL); 1092 1093 // Get stack trace limit. 1094 Handle<Object> error = GetProperty(js_builtins_object(), "$Error"); 1095 if (!error->IsJSObject()) return Failure::Exception(); 1096 Handle<Object> stack_trace_limit = 1097 GetProperty(Handle<JSObject>::cast(error), "stackTraceLimit"); 1098 if (!stack_trace_limit->IsNumber()) return Failure::Exception(); 1099 double dlimit = stack_trace_limit->Number(); 1100 int limit = std::isnan(dlimit) ? 0 : static_cast<int>(dlimit); 1101 1102 Handle<JSArray> stack_trace = CaptureSimpleStackTrace( 1103 exception, factory()->undefined_value(), limit); 1104 JSObject::SetHiddenProperty(exception, 1105 factory()->hidden_stack_trace_string(), 1106 stack_trace); 1107 return Failure::Exception(); 1108 } 1109 1110 1111 Failure* Isolate::TerminateExecution() { 1112 DoThrow(heap_.termination_exception(), NULL); 1113 return Failure::Exception(); 1114 } 1115 1116 1117 void Isolate::CancelTerminateExecution() { 1118 if (try_catch_handler()) { 1119 try_catch_handler()->has_terminated_ = false; 1120 } 1121 if (has_pending_exception() && 1122 pending_exception() == heap_.termination_exception()) { 1123 thread_local_top()->external_caught_exception_ = false; 1124 clear_pending_exception(); 1125 } 1126 if (has_scheduled_exception() && 1127 scheduled_exception() == heap_.termination_exception()) { 1128 thread_local_top()->external_caught_exception_ = false; 1129 clear_scheduled_exception(); 1130 } 1131 } 1132 1133 1134 Failure* Isolate::Throw(Object* exception, MessageLocation* location) { 1135 DoThrow(exception, location); 1136 return Failure::Exception(); 1137 } 1138 1139 1140 Failure* Isolate::ReThrow(MaybeObject* exception) { 1141 bool can_be_caught_externally = false; 1142 bool catchable_by_javascript = is_catchable_by_javascript(exception); 1143 ShouldReportException(&can_be_caught_externally, catchable_by_javascript); 1144 1145 thread_local_top()->catcher_ = can_be_caught_externally ? 1146 try_catch_handler() : NULL; 1147 1148 // Set the exception being re-thrown. 1149 set_pending_exception(exception); 1150 if (exception->IsFailure()) return exception->ToFailureUnchecked(); 1151 return Failure::Exception(); 1152 } 1153 1154 1155 Failure* Isolate::ThrowIllegalOperation() { 1156 return Throw(heap_.illegal_access_string()); 1157 } 1158 1159 1160 void Isolate::ScheduleThrow(Object* exception) { 1161 // When scheduling a throw we first throw the exception to get the 1162 // error reporting if it is uncaught before rescheduling it. 1163 Throw(exception); 1164 PropagatePendingExceptionToExternalTryCatch(); 1165 if (has_pending_exception()) { 1166 thread_local_top()->scheduled_exception_ = pending_exception(); 1167 thread_local_top()->external_caught_exception_ = false; 1168 clear_pending_exception(); 1169 } 1170 } 1171 1172 1173 void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) { 1174 ASSERT(handler == try_catch_handler()); 1175 ASSERT(handler->HasCaught()); 1176 ASSERT(handler->rethrow_); 1177 ASSERT(handler->capture_message_); 1178 Object* message = reinterpret_cast<Object*>(handler->message_obj_); 1179 Object* script = reinterpret_cast<Object*>(handler->message_script_); 1180 ASSERT(message->IsJSMessageObject() || message->IsTheHole()); 1181 ASSERT(script->IsScript() || script->IsTheHole()); 1182 thread_local_top()->pending_message_obj_ = message; 1183 thread_local_top()->pending_message_script_ = script; 1184 thread_local_top()->pending_message_start_pos_ = handler->message_start_pos_; 1185 thread_local_top()->pending_message_end_pos_ = handler->message_end_pos_; 1186 } 1187 1188 1189 Failure* Isolate::PromoteScheduledException() { 1190 MaybeObject* thrown = scheduled_exception(); 1191 clear_scheduled_exception(); 1192 // Re-throw the exception to avoid getting repeated error reporting. 1193 return ReThrow(thrown); 1194 } 1195 1196 1197 void Isolate::PrintCurrentStackTrace(FILE* out) { 1198 StackTraceFrameIterator it(this); 1199 while (!it.done()) { 1200 HandleScope scope(this); 1201 // Find code position if recorded in relocation info. 1202 JavaScriptFrame* frame = it.frame(); 1203 int pos = frame->LookupCode()->SourcePosition(frame->pc()); 1204 Handle<Object> pos_obj(Smi::FromInt(pos), this); 1205 // Fetch function and receiver. 1206 Handle<JSFunction> fun(frame->function()); 1207 Handle<Object> recv(frame->receiver(), this); 1208 // Advance to the next JavaScript frame and determine if the 1209 // current frame is the top-level frame. 1210 it.Advance(); 1211 Handle<Object> is_top_level = it.done() 1212 ? factory()->true_value() 1213 : factory()->false_value(); 1214 // Generate and print stack trace line. 1215 Handle<String> line = 1216 Execution::GetStackTraceLine(recv, fun, pos_obj, is_top_level); 1217 if (line->length() > 0) { 1218 line->PrintOn(out); 1219 PrintF(out, "\n"); 1220 } 1221 } 1222 } 1223 1224 1225 void Isolate::ComputeLocation(MessageLocation* target) { 1226 *target = MessageLocation(Handle<Script>(heap_.empty_script()), -1, -1); 1227 StackTraceFrameIterator it(this); 1228 if (!it.done()) { 1229 JavaScriptFrame* frame = it.frame(); 1230 JSFunction* fun = frame->function(); 1231 Object* script = fun->shared()->script(); 1232 if (script->IsScript() && 1233 !(Script::cast(script)->source()->IsUndefined())) { 1234 int pos = frame->LookupCode()->SourcePosition(frame->pc()); 1235 // Compute the location from the function and the reloc info. 1236 Handle<Script> casted_script(Script::cast(script)); 1237 *target = MessageLocation(casted_script, pos, pos + 1); 1238 } 1239 } 1240 } 1241 1242 1243 bool Isolate::ShouldReportException(bool* can_be_caught_externally, 1244 bool catchable_by_javascript) { 1245 // Find the top-most try-catch handler. 1246 StackHandler* handler = 1247 StackHandler::FromAddress(Isolate::handler(thread_local_top())); 1248 while (handler != NULL && !handler->is_catch()) { 1249 handler = handler->next(); 1250 } 1251 1252 // Get the address of the external handler so we can compare the address to 1253 // determine which one is closer to the top of the stack. 1254 Address external_handler_address = 1255 thread_local_top()->try_catch_handler_address(); 1256 1257 // The exception has been externally caught if and only if there is 1258 // an external handler which is on top of the top-most try-catch 1259 // handler. 1260 *can_be_caught_externally = external_handler_address != NULL && 1261 (handler == NULL || handler->address() > external_handler_address || 1262 !catchable_by_javascript); 1263 1264 if (*can_be_caught_externally) { 1265 // Only report the exception if the external handler is verbose. 1266 return try_catch_handler()->is_verbose_; 1267 } else { 1268 // Report the exception if it isn't caught by JavaScript code. 1269 return handler == NULL; 1270 } 1271 } 1272 1273 1274 bool Isolate::IsErrorObject(Handle<Object> obj) { 1275 if (!obj->IsJSObject()) return false; 1276 1277 String* error_key = 1278 *(factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("$Error"))); 1279 Object* error_constructor = 1280 js_builtins_object()->GetPropertyNoExceptionThrown(error_key); 1281 1282 for (Object* prototype = *obj; !prototype->IsNull(); 1283 prototype = prototype->GetPrototype(this)) { 1284 if (!prototype->IsJSObject()) return false; 1285 if (JSObject::cast(prototype)->map()->constructor() == error_constructor) { 1286 return true; 1287 } 1288 } 1289 return false; 1290 } 1291 1292 static int fatal_exception_depth = 0; 1293 1294 void Isolate::DoThrow(Object* exception, MessageLocation* location) { 1295 ASSERT(!has_pending_exception()); 1296 1297 HandleScope scope(this); 1298 Handle<Object> exception_handle(exception, this); 1299 1300 // Determine reporting and whether the exception is caught externally. 1301 bool catchable_by_javascript = is_catchable_by_javascript(exception); 1302 bool can_be_caught_externally = false; 1303 bool should_report_exception = 1304 ShouldReportException(&can_be_caught_externally, catchable_by_javascript); 1305 bool report_exception = catchable_by_javascript && should_report_exception; 1306 bool try_catch_needs_message = 1307 can_be_caught_externally && try_catch_handler()->capture_message_ && 1308 !thread_local_top()->rethrowing_message_; 1309 bool bootstrapping = bootstrapper()->IsActive(); 1310 1311 thread_local_top()->rethrowing_message_ = false; 1312 1313 #ifdef ENABLE_DEBUGGER_SUPPORT 1314 // Notify debugger of exception. 1315 if (catchable_by_javascript) { 1316 debugger_->OnException(exception_handle, report_exception); 1317 } 1318 #endif 1319 1320 // Generate the message if required. 1321 if (report_exception || try_catch_needs_message) { 1322 MessageLocation potential_computed_location; 1323 if (location == NULL) { 1324 // If no location was specified we use a computed one instead. 1325 ComputeLocation(&potential_computed_location); 1326 location = &potential_computed_location; 1327 } 1328 // It's not safe to try to make message objects or collect stack traces 1329 // while the bootstrapper is active since the infrastructure may not have 1330 // been properly initialized. 1331 if (!bootstrapping) { 1332 Handle<String> stack_trace; 1333 if (FLAG_trace_exception) stack_trace = StackTraceString(); 1334 Handle<JSArray> stack_trace_object; 1335 if (capture_stack_trace_for_uncaught_exceptions_) { 1336 if (IsErrorObject(exception_handle)) { 1337 // We fetch the stack trace that corresponds to this error object. 1338 String* key = heap()->hidden_stack_trace_string(); 1339 Object* stack_property = 1340 JSObject::cast(*exception_handle)->GetHiddenProperty(key); 1341 // Property lookup may have failed. In this case it's probably not 1342 // a valid Error object. 1343 if (stack_property->IsJSArray()) { 1344 stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property)); 1345 } 1346 } 1347 if (stack_trace_object.is_null()) { 1348 // Not an error object, we capture at throw site. 1349 stack_trace_object = CaptureCurrentStackTrace( 1350 stack_trace_for_uncaught_exceptions_frame_limit_, 1351 stack_trace_for_uncaught_exceptions_options_); 1352 } 1353 } 1354 1355 Handle<Object> exception_arg = exception_handle; 1356 // If the exception argument is a custom object, turn it into a string 1357 // before throwing as uncaught exception. Note that the pending 1358 // exception object to be set later must not be turned into a string. 1359 if (exception_arg->IsJSObject() && !IsErrorObject(exception_arg)) { 1360 bool failed = false; 1361 exception_arg = Execution::ToDetailString(exception_arg, &failed); 1362 if (failed) { 1363 exception_arg = factory()->InternalizeOneByteString( 1364 STATIC_ASCII_VECTOR("exception")); 1365 } 1366 } 1367 Handle<Object> message_obj = MessageHandler::MakeMessageObject( 1368 this, 1369 "uncaught_exception", 1370 location, 1371 HandleVector<Object>(&exception_arg, 1), 1372 stack_trace, 1373 stack_trace_object); 1374 thread_local_top()->pending_message_obj_ = *message_obj; 1375 if (location != NULL) { 1376 thread_local_top()->pending_message_script_ = *location->script(); 1377 thread_local_top()->pending_message_start_pos_ = location->start_pos(); 1378 thread_local_top()->pending_message_end_pos_ = location->end_pos(); 1379 } 1380 1381 // If the abort-on-uncaught-exception flag is specified, abort on any 1382 // exception not caught by JavaScript, even when an external handler is 1383 // present. This flag is intended for use by JavaScript developers, so 1384 // print a user-friendly stack trace (not an internal one). 1385 if (fatal_exception_depth == 0 && 1386 FLAG_abort_on_uncaught_exception && 1387 (report_exception || can_be_caught_externally)) { 1388 fatal_exception_depth++; 1389 PrintF(stderr, 1390 "%s\n\nFROM\n", 1391 *MessageHandler::GetLocalizedMessage(this, message_obj)); 1392 PrintCurrentStackTrace(stderr); 1393 OS::Abort(); 1394 } 1395 } else if (location != NULL && !location->script().is_null()) { 1396 // We are bootstrapping and caught an error where the location is set 1397 // and we have a script for the location. 1398 // In this case we could have an extension (or an internal error 1399 // somewhere) and we print out the line number at which the error occured 1400 // to the console for easier debugging. 1401 int line_number = GetScriptLineNumberSafe(location->script(), 1402 location->start_pos()); 1403 if (exception->IsString()) { 1404 OS::PrintError( 1405 "Extension or internal compilation error: %s in %s at line %d.\n", 1406 *String::cast(exception)->ToCString(), 1407 *String::cast(location->script()->name())->ToCString(), 1408 line_number + 1); 1409 } else { 1410 OS::PrintError( 1411 "Extension or internal compilation error in %s at line %d.\n", 1412 *String::cast(location->script()->name())->ToCString(), 1413 line_number + 1); 1414 } 1415 } 1416 } 1417 1418 // Save the message for reporting if the the exception remains uncaught. 1419 thread_local_top()->has_pending_message_ = report_exception; 1420 1421 // Do not forget to clean catcher_ if currently thrown exception cannot 1422 // be caught. If necessary, ReThrow will update the catcher. 1423 thread_local_top()->catcher_ = can_be_caught_externally ? 1424 try_catch_handler() : NULL; 1425 1426 set_pending_exception(*exception_handle); 1427 } 1428 1429 1430 bool Isolate::IsExternallyCaught() { 1431 ASSERT(has_pending_exception()); 1432 1433 if ((thread_local_top()->catcher_ == NULL) || 1434 (try_catch_handler() != thread_local_top()->catcher_)) { 1435 // When throwing the exception, we found no v8::TryCatch 1436 // which should care about this exception. 1437 return false; 1438 } 1439 1440 if (!is_catchable_by_javascript(pending_exception())) { 1441 return true; 1442 } 1443 1444 // Get the address of the external handler so we can compare the address to 1445 // determine which one is closer to the top of the stack. 1446 Address external_handler_address = 1447 thread_local_top()->try_catch_handler_address(); 1448 ASSERT(external_handler_address != NULL); 1449 1450 // The exception has been externally caught if and only if there is 1451 // an external handler which is on top of the top-most try-finally 1452 // handler. 1453 // There should be no try-catch blocks as they would prohibit us from 1454 // finding external catcher in the first place (see catcher_ check above). 1455 // 1456 // Note, that finally clause would rethrow an exception unless it's 1457 // aborted by jumps in control flow like return, break, etc. and we'll 1458 // have another chances to set proper v8::TryCatch. 1459 StackHandler* handler = 1460 StackHandler::FromAddress(Isolate::handler(thread_local_top())); 1461 while (handler != NULL && handler->address() < external_handler_address) { 1462 ASSERT(!handler->is_catch()); 1463 if (handler->is_finally()) return false; 1464 1465 handler = handler->next(); 1466 } 1467 1468 return true; 1469 } 1470 1471 1472 void Isolate::ReportPendingMessages() { 1473 ASSERT(has_pending_exception()); 1474 PropagatePendingExceptionToExternalTryCatch(); 1475 1476 // If the pending exception is OutOfMemoryException set out_of_memory in 1477 // the native context. Note: We have to mark the native context here 1478 // since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to 1479 // set it. 1480 HandleScope scope(this); 1481 if (thread_local_top_.pending_exception_->IsOutOfMemory()) { 1482 context()->mark_out_of_memory(); 1483 } else if (thread_local_top_.pending_exception_ == 1484 heap()->termination_exception()) { 1485 // Do nothing: if needed, the exception has been already propagated to 1486 // v8::TryCatch. 1487 } else { 1488 if (thread_local_top_.has_pending_message_) { 1489 thread_local_top_.has_pending_message_ = false; 1490 if (!thread_local_top_.pending_message_obj_->IsTheHole()) { 1491 HandleScope scope(this); 1492 Handle<Object> message_obj(thread_local_top_.pending_message_obj_, 1493 this); 1494 if (!thread_local_top_.pending_message_script_->IsTheHole()) { 1495 Handle<Script> script( 1496 Script::cast(thread_local_top_.pending_message_script_)); 1497 int start_pos = thread_local_top_.pending_message_start_pos_; 1498 int end_pos = thread_local_top_.pending_message_end_pos_; 1499 MessageLocation location(script, start_pos, end_pos); 1500 MessageHandler::ReportMessage(this, &location, message_obj); 1501 } else { 1502 MessageHandler::ReportMessage(this, NULL, message_obj); 1503 } 1504 } 1505 } 1506 } 1507 clear_pending_message(); 1508 } 1509 1510 1511 MessageLocation Isolate::GetMessageLocation() { 1512 ASSERT(has_pending_exception()); 1513 1514 if (!thread_local_top_.pending_exception_->IsOutOfMemory() && 1515 thread_local_top_.pending_exception_ != heap()->termination_exception() && 1516 thread_local_top_.has_pending_message_ && 1517 !thread_local_top_.pending_message_obj_->IsTheHole() && 1518 !thread_local_top_.pending_message_obj_->IsTheHole()) { 1519 Handle<Script> script( 1520 Script::cast(thread_local_top_.pending_message_script_)); 1521 int start_pos = thread_local_top_.pending_message_start_pos_; 1522 int end_pos = thread_local_top_.pending_message_end_pos_; 1523 return MessageLocation(script, start_pos, end_pos); 1524 } 1525 1526 return MessageLocation(); 1527 } 1528 1529 1530 void Isolate::TraceException(bool flag) { 1531 FLAG_trace_exception = flag; // TODO(isolates): This is an unfortunate use. 1532 } 1533 1534 1535 bool Isolate::OptionalRescheduleException(bool is_bottom_call) { 1536 ASSERT(has_pending_exception()); 1537 PropagatePendingExceptionToExternalTryCatch(); 1538 1539 // Always reschedule out of memory exceptions. 1540 if (!is_out_of_memory()) { 1541 bool is_termination_exception = 1542 pending_exception() == heap_.termination_exception(); 1543 1544 // Do not reschedule the exception if this is the bottom call. 1545 bool clear_exception = is_bottom_call; 1546 1547 if (is_termination_exception) { 1548 if (is_bottom_call) { 1549 thread_local_top()->external_caught_exception_ = false; 1550 clear_pending_exception(); 1551 return false; 1552 } 1553 } else if (thread_local_top()->external_caught_exception_) { 1554 // If the exception is externally caught, clear it if there are no 1555 // JavaScript frames on the way to the C++ frame that has the 1556 // external handler. 1557 ASSERT(thread_local_top()->try_catch_handler_address() != NULL); 1558 Address external_handler_address = 1559 thread_local_top()->try_catch_handler_address(); 1560 JavaScriptFrameIterator it(this); 1561 if (it.done() || (it.frame()->sp() > external_handler_address)) { 1562 clear_exception = true; 1563 } 1564 } 1565 1566 // Clear the exception if needed. 1567 if (clear_exception) { 1568 thread_local_top()->external_caught_exception_ = false; 1569 clear_pending_exception(); 1570 return false; 1571 } 1572 } 1573 1574 // Reschedule the exception. 1575 thread_local_top()->scheduled_exception_ = pending_exception(); 1576 clear_pending_exception(); 1577 return true; 1578 } 1579 1580 1581 void Isolate::SetCaptureStackTraceForUncaughtExceptions( 1582 bool capture, 1583 int frame_limit, 1584 StackTrace::StackTraceOptions options) { 1585 capture_stack_trace_for_uncaught_exceptions_ = capture; 1586 stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit; 1587 stack_trace_for_uncaught_exceptions_options_ = options; 1588 } 1589 1590 1591 bool Isolate::is_out_of_memory() { 1592 if (has_pending_exception()) { 1593 MaybeObject* e = pending_exception(); 1594 if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) { 1595 return true; 1596 } 1597 } 1598 if (has_scheduled_exception()) { 1599 MaybeObject* e = scheduled_exception(); 1600 if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) { 1601 return true; 1602 } 1603 } 1604 return false; 1605 } 1606 1607 1608 Handle<Context> Isolate::native_context() { 1609 return Handle<Context>(context()->global_object()->native_context()); 1610 } 1611 1612 1613 Handle<Context> Isolate::global_context() { 1614 return Handle<Context>(context()->global_object()->global_context()); 1615 } 1616 1617 1618 Handle<Context> Isolate::GetCallingNativeContext() { 1619 JavaScriptFrameIterator it(this); 1620 #ifdef ENABLE_DEBUGGER_SUPPORT 1621 if (debug_->InDebugger()) { 1622 while (!it.done()) { 1623 JavaScriptFrame* frame = it.frame(); 1624 Context* context = Context::cast(frame->context()); 1625 if (context->native_context() == *debug_->debug_context()) { 1626 it.Advance(); 1627 } else { 1628 break; 1629 } 1630 } 1631 } 1632 #endif // ENABLE_DEBUGGER_SUPPORT 1633 if (it.done()) return Handle<Context>::null(); 1634 JavaScriptFrame* frame = it.frame(); 1635 Context* context = Context::cast(frame->context()); 1636 return Handle<Context>(context->native_context()); 1637 } 1638 1639 1640 char* Isolate::ArchiveThread(char* to) { 1641 OS::MemCopy(to, reinterpret_cast<char*>(thread_local_top()), 1642 sizeof(ThreadLocalTop)); 1643 InitializeThreadLocal(); 1644 clear_pending_exception(); 1645 clear_pending_message(); 1646 clear_scheduled_exception(); 1647 return to + sizeof(ThreadLocalTop); 1648 } 1649 1650 1651 char* Isolate::RestoreThread(char* from) { 1652 OS::MemCopy(reinterpret_cast<char*>(thread_local_top()), from, 1653 sizeof(ThreadLocalTop)); 1654 // This might be just paranoia, but it seems to be needed in case a 1655 // thread_local_top_ is restored on a separate OS thread. 1656 #ifdef USE_SIMULATOR 1657 #if V8_TARGET_ARCH_ARM 1658 thread_local_top()->simulator_ = Simulator::current(this); 1659 #elif V8_TARGET_ARCH_MIPS 1660 thread_local_top()->simulator_ = Simulator::current(this); 1661 #endif 1662 #endif 1663 ASSERT(context() == NULL || context()->IsContext()); 1664 return from + sizeof(ThreadLocalTop); 1665 } 1666 1667 1668 Isolate::ThreadDataTable::ThreadDataTable() 1669 : list_(NULL) { 1670 } 1671 1672 1673 Isolate::ThreadDataTable::~ThreadDataTable() { 1674 // TODO(svenpanne) The assertion below would fire if an embedder does not 1675 // cleanly dispose all Isolates before disposing v8, so we are conservative 1676 // and leave it out for now. 1677 // ASSERT_EQ(NULL, list_); 1678 } 1679 1680 1681 Isolate::PerIsolateThreadData* 1682 Isolate::ThreadDataTable::Lookup(Isolate* isolate, 1683 ThreadId thread_id) { 1684 for (PerIsolateThreadData* data = list_; data != NULL; data = data->next_) { 1685 if (data->Matches(isolate, thread_id)) return data; 1686 } 1687 return NULL; 1688 } 1689 1690 1691 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) { 1692 if (list_ != NULL) list_->prev_ = data; 1693 data->next_ = list_; 1694 list_ = data; 1695 } 1696 1697 1698 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) { 1699 if (list_ == data) list_ = data->next_; 1700 if (data->next_ != NULL) data->next_->prev_ = data->prev_; 1701 if (data->prev_ != NULL) data->prev_->next_ = data->next_; 1702 delete data; 1703 } 1704 1705 1706 void Isolate::ThreadDataTable::Remove(Isolate* isolate, 1707 ThreadId thread_id) { 1708 PerIsolateThreadData* data = Lookup(isolate, thread_id); 1709 if (data != NULL) { 1710 Remove(data); 1711 } 1712 } 1713 1714 1715 void Isolate::ThreadDataTable::RemoveAllThreads(Isolate* isolate) { 1716 PerIsolateThreadData* data = list_; 1717 while (data != NULL) { 1718 PerIsolateThreadData* next = data->next_; 1719 if (data->isolate() == isolate) Remove(data); 1720 data = next; 1721 } 1722 } 1723 1724 1725 #ifdef DEBUG 1726 #define TRACE_ISOLATE(tag) \ 1727 do { \ 1728 if (FLAG_trace_isolates) { \ 1729 PrintF("Isolate %p (id %d)" #tag "\n", \ 1730 reinterpret_cast<void*>(this), id()); \ 1731 } \ 1732 } while (false) 1733 #else 1734 #define TRACE_ISOLATE(tag) 1735 #endif 1736 1737 1738 Isolate::Isolate() 1739 : state_(UNINITIALIZED), 1740 embedder_data_(NULL), 1741 entry_stack_(NULL), 1742 stack_trace_nesting_level_(0), 1743 incomplete_message_(NULL), 1744 preallocated_memory_thread_(NULL), 1745 preallocated_message_space_(NULL), 1746 bootstrapper_(NULL), 1747 runtime_profiler_(NULL), 1748 compilation_cache_(NULL), 1749 counters_(NULL), 1750 code_range_(NULL), 1751 // Must be initialized early to allow v8::SetResourceConstraints calls. 1752 break_access_(OS::CreateMutex()), 1753 debugger_initialized_(false), 1754 // Must be initialized early to allow v8::Debug calls. 1755 debugger_access_(OS::CreateMutex()), 1756 logger_(NULL), 1757 stats_table_(NULL), 1758 stub_cache_(NULL), 1759 deoptimizer_data_(NULL), 1760 capture_stack_trace_for_uncaught_exceptions_(false), 1761 stack_trace_for_uncaught_exceptions_frame_limit_(0), 1762 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview), 1763 transcendental_cache_(NULL), 1764 memory_allocator_(NULL), 1765 keyed_lookup_cache_(NULL), 1766 context_slot_cache_(NULL), 1767 descriptor_lookup_cache_(NULL), 1768 handle_scope_implementer_(NULL), 1769 unicode_cache_(NULL), 1770 runtime_zone_(this), 1771 in_use_list_(0), 1772 free_list_(0), 1773 preallocated_storage_preallocated_(false), 1774 inner_pointer_to_code_cache_(NULL), 1775 write_iterator_(NULL), 1776 global_handles_(NULL), 1777 eternal_handles_(NULL), 1778 context_switcher_(NULL), 1779 thread_manager_(NULL), 1780 fp_stubs_generated_(false), 1781 has_installed_extensions_(false), 1782 string_tracker_(NULL), 1783 regexp_stack_(NULL), 1784 date_cache_(NULL), 1785 code_stub_interface_descriptors_(NULL), 1786 initialized_from_snapshot_(false), 1787 cpu_profiler_(NULL), 1788 heap_profiler_(NULL), 1789 function_entry_hook_(NULL), 1790 deferred_handles_head_(NULL), 1791 optimizing_compiler_thread_(this), 1792 marking_thread_(NULL), 1793 sweeper_thread_(NULL), 1794 callback_table_(NULL), 1795 stress_deopt_count_(0) { 1796 id_ = NoBarrier_AtomicIncrement(&isolate_counter_, 1); 1797 TRACE_ISOLATE(constructor); 1798 1799 memset(isolate_addresses_, 0, 1800 sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1)); 1801 1802 heap_.isolate_ = this; 1803 stack_guard_.isolate_ = this; 1804 1805 // ThreadManager is initialized early to support locking an isolate 1806 // before it is entered. 1807 thread_manager_ = new ThreadManager(); 1808 thread_manager_->isolate_ = this; 1809 1810 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ 1811 V8_TARGET_ARCH_MIPS && !defined(__mips__) 1812 simulator_initialized_ = false; 1813 simulator_i_cache_ = NULL; 1814 simulator_redirection_ = NULL; 1815 #endif 1816 1817 #ifdef DEBUG 1818 // heap_histograms_ initializes itself. 1819 memset(&js_spill_information_, 0, sizeof(js_spill_information_)); 1820 memset(code_kind_statistics_, 0, 1821 sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS); 1822 #endif 1823 1824 #ifdef ENABLE_DEBUGGER_SUPPORT 1825 debug_ = NULL; 1826 debugger_ = NULL; 1827 #endif 1828 1829 handle_scope_data_.Initialize(); 1830 1831 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \ 1832 name##_ = (initial_value); 1833 ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE) 1834 #undef ISOLATE_INIT_EXECUTE 1835 1836 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \ 1837 memset(name##_, 0, sizeof(type) * length); 1838 ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE) 1839 #undef ISOLATE_INIT_ARRAY_EXECUTE 1840 } 1841 1842 1843 void Isolate::TearDown() { 1844 TRACE_ISOLATE(tear_down); 1845 1846 // Temporarily set this isolate as current so that various parts of 1847 // the isolate can access it in their destructors without having a 1848 // direct pointer. We don't use Enter/Exit here to avoid 1849 // initializing the thread data. 1850 PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData(); 1851 Isolate* saved_isolate = UncheckedCurrent(); 1852 SetIsolateThreadLocals(this, NULL); 1853 1854 Deinit(); 1855 1856 { ScopedLock lock(process_wide_mutex_); 1857 thread_data_table_->RemoveAllThreads(this); 1858 } 1859 1860 if (serialize_partial_snapshot_cache_ != NULL) { 1861 delete[] serialize_partial_snapshot_cache_; 1862 serialize_partial_snapshot_cache_ = NULL; 1863 } 1864 1865 if (!IsDefaultIsolate()) { 1866 delete this; 1867 } 1868 1869 // Restore the previous current isolate. 1870 SetIsolateThreadLocals(saved_isolate, saved_data); 1871 } 1872 1873 1874 void Isolate::GlobalTearDown() { 1875 delete thread_data_table_; 1876 } 1877 1878 1879 void Isolate::Deinit() { 1880 if (state_ == INITIALIZED) { 1881 TRACE_ISOLATE(deinit); 1882 1883 #ifdef ENABLE_DEBUGGER_SUPPORT 1884 debugger()->UnloadDebugger(); 1885 #endif 1886 1887 if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Stop(); 1888 1889 if (FLAG_sweeper_threads > 0) { 1890 for (int i = 0; i < FLAG_sweeper_threads; i++) { 1891 sweeper_thread_[i]->Stop(); 1892 delete sweeper_thread_[i]; 1893 } 1894 delete[] sweeper_thread_; 1895 } 1896 1897 if (FLAG_marking_threads > 0) { 1898 for (int i = 0; i < FLAG_marking_threads; i++) { 1899 marking_thread_[i]->Stop(); 1900 delete marking_thread_[i]; 1901 } 1902 delete[] marking_thread_; 1903 } 1904 1905 if (FLAG_hydrogen_stats) GetHStatistics()->Print(); 1906 1907 if (FLAG_print_deopt_stress) { 1908 PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_); 1909 } 1910 1911 // We must stop the logger before we tear down other components. 1912 Sampler* sampler = logger_->sampler(); 1913 if (sampler && sampler->IsActive()) sampler->Stop(); 1914 1915 delete deoptimizer_data_; 1916 deoptimizer_data_ = NULL; 1917 if (FLAG_preemption) { 1918 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); 1919 v8::Locker::StopPreemption(); 1920 } 1921 builtins_.TearDown(); 1922 bootstrapper_->TearDown(); 1923 1924 // Remove the external reference to the preallocated stack memory. 1925 delete preallocated_message_space_; 1926 preallocated_message_space_ = NULL; 1927 PreallocatedMemoryThreadStop(); 1928 1929 if (runtime_profiler_ != NULL) { 1930 runtime_profiler_->TearDown(); 1931 delete runtime_profiler_; 1932 runtime_profiler_ = NULL; 1933 } 1934 heap_.TearDown(); 1935 logger_->TearDown(); 1936 1937 delete heap_profiler_; 1938 heap_profiler_ = NULL; 1939 delete cpu_profiler_; 1940 cpu_profiler_ = NULL; 1941 1942 // The default isolate is re-initializable due to legacy API. 1943 state_ = UNINITIALIZED; 1944 } 1945 } 1946 1947 1948 void Isolate::PushToPartialSnapshotCache(Object* obj) { 1949 int length = serialize_partial_snapshot_cache_length(); 1950 int capacity = serialize_partial_snapshot_cache_capacity(); 1951 1952 if (length >= capacity) { 1953 int new_capacity = static_cast<int>((capacity + 10) * 1.2); 1954 Object** new_array = new Object*[new_capacity]; 1955 for (int i = 0; i < length; i++) { 1956 new_array[i] = serialize_partial_snapshot_cache()[i]; 1957 } 1958 if (capacity != 0) delete[] serialize_partial_snapshot_cache(); 1959 set_serialize_partial_snapshot_cache(new_array); 1960 set_serialize_partial_snapshot_cache_capacity(new_capacity); 1961 } 1962 1963 serialize_partial_snapshot_cache()[length] = obj; 1964 set_serialize_partial_snapshot_cache_length(length + 1); 1965 } 1966 1967 1968 void Isolate::SetIsolateThreadLocals(Isolate* isolate, 1969 PerIsolateThreadData* data) { 1970 Thread::SetThreadLocal(isolate_key_, isolate); 1971 Thread::SetThreadLocal(per_isolate_thread_data_key_, data); 1972 } 1973 1974 1975 Isolate::~Isolate() { 1976 TRACE_ISOLATE(destructor); 1977 1978 // Has to be called while counters_ are still alive 1979 runtime_zone_.DeleteKeptSegment(); 1980 1981 // The entry stack must be empty when we get here, 1982 // except for the default isolate, where it can 1983 // still contain up to one entry stack item 1984 ASSERT(entry_stack_ == NULL || this == default_isolate_); 1985 ASSERT(entry_stack_ == NULL || entry_stack_->previous_item == NULL); 1986 1987 delete entry_stack_; 1988 entry_stack_ = NULL; 1989 1990 delete[] assembler_spare_buffer_; 1991 assembler_spare_buffer_ = NULL; 1992 1993 delete unicode_cache_; 1994 unicode_cache_ = NULL; 1995 1996 delete date_cache_; 1997 date_cache_ = NULL; 1998 1999 delete[] code_stub_interface_descriptors_; 2000 code_stub_interface_descriptors_ = NULL; 2001 2002 delete regexp_stack_; 2003 regexp_stack_ = NULL; 2004 2005 delete descriptor_lookup_cache_; 2006 descriptor_lookup_cache_ = NULL; 2007 delete context_slot_cache_; 2008 context_slot_cache_ = NULL; 2009 delete keyed_lookup_cache_; 2010 keyed_lookup_cache_ = NULL; 2011 2012 delete transcendental_cache_; 2013 transcendental_cache_ = NULL; 2014 delete stub_cache_; 2015 stub_cache_ = NULL; 2016 delete stats_table_; 2017 stats_table_ = NULL; 2018 2019 delete logger_; 2020 logger_ = NULL; 2021 2022 delete counters_; 2023 counters_ = NULL; 2024 2025 delete handle_scope_implementer_; 2026 handle_scope_implementer_ = NULL; 2027 delete break_access_; 2028 break_access_ = NULL; 2029 delete debugger_access_; 2030 debugger_access_ = NULL; 2031 2032 delete compilation_cache_; 2033 compilation_cache_ = NULL; 2034 delete bootstrapper_; 2035 bootstrapper_ = NULL; 2036 delete inner_pointer_to_code_cache_; 2037 inner_pointer_to_code_cache_ = NULL; 2038 delete write_iterator_; 2039 write_iterator_ = NULL; 2040 2041 delete context_switcher_; 2042 context_switcher_ = NULL; 2043 delete thread_manager_; 2044 thread_manager_ = NULL; 2045 2046 delete string_tracker_; 2047 string_tracker_ = NULL; 2048 2049 delete memory_allocator_; 2050 memory_allocator_ = NULL; 2051 delete code_range_; 2052 code_range_ = NULL; 2053 delete global_handles_; 2054 global_handles_ = NULL; 2055 delete eternal_handles_; 2056 eternal_handles_ = NULL; 2057 2058 delete string_stream_debug_object_cache_; 2059 string_stream_debug_object_cache_ = NULL; 2060 2061 delete external_reference_table_; 2062 external_reference_table_ = NULL; 2063 2064 delete callback_table_; 2065 callback_table_ = NULL; 2066 2067 #ifdef ENABLE_DEBUGGER_SUPPORT 2068 delete debugger_; 2069 debugger_ = NULL; 2070 delete debug_; 2071 debug_ = NULL; 2072 #endif 2073 } 2074 2075 2076 void Isolate::InitializeThreadLocal() { 2077 thread_local_top_.isolate_ = this; 2078 thread_local_top_.Initialize(); 2079 } 2080 2081 2082 void Isolate::PropagatePendingExceptionToExternalTryCatch() { 2083 ASSERT(has_pending_exception()); 2084 2085 bool external_caught = IsExternallyCaught(); 2086 thread_local_top_.external_caught_exception_ = external_caught; 2087 2088 if (!external_caught) return; 2089 2090 if (thread_local_top_.pending_exception_->IsOutOfMemory()) { 2091 // Do not propagate OOM exception: we should kill VM asap. 2092 } else if (thread_local_top_.pending_exception_ == 2093 heap()->termination_exception()) { 2094 try_catch_handler()->can_continue_ = false; 2095 try_catch_handler()->has_terminated_ = true; 2096 try_catch_handler()->exception_ = heap()->null_value(); 2097 } else { 2098 v8::TryCatch* handler = try_catch_handler(); 2099 // At this point all non-object (failure) exceptions have 2100 // been dealt with so this shouldn't fail. 2101 ASSERT(!pending_exception()->IsFailure()); 2102 ASSERT(thread_local_top_.pending_message_obj_->IsJSMessageObject() || 2103 thread_local_top_.pending_message_obj_->IsTheHole()); 2104 ASSERT(thread_local_top_.pending_message_script_->IsScript() || 2105 thread_local_top_.pending_message_script_->IsTheHole()); 2106 handler->can_continue_ = true; 2107 handler->has_terminated_ = false; 2108 handler->exception_ = pending_exception(); 2109 // Propagate to the external try-catch only if we got an actual message. 2110 if (thread_local_top_.pending_message_obj_->IsTheHole()) return; 2111 2112 handler->message_obj_ = thread_local_top_.pending_message_obj_; 2113 handler->message_script_ = thread_local_top_.pending_message_script_; 2114 handler->message_start_pos_ = thread_local_top_.pending_message_start_pos_; 2115 handler->message_end_pos_ = thread_local_top_.pending_message_end_pos_; 2116 } 2117 } 2118 2119 2120 void Isolate::InitializeLoggingAndCounters() { 2121 if (logger_ == NULL) { 2122 logger_ = new Logger(this); 2123 } 2124 if (counters_ == NULL) { 2125 counters_ = new Counters(this); 2126 } 2127 } 2128 2129 2130 void Isolate::InitializeDebugger() { 2131 #ifdef ENABLE_DEBUGGER_SUPPORT 2132 ScopedLock lock(debugger_access_); 2133 if (NoBarrier_Load(&debugger_initialized_)) return; 2134 InitializeLoggingAndCounters(); 2135 debug_ = new Debug(this); 2136 debugger_ = new Debugger(this); 2137 Release_Store(&debugger_initialized_, true); 2138 #endif 2139 } 2140 2141 2142 bool Isolate::Init(Deserializer* des) { 2143 ASSERT(state_ != INITIALIZED); 2144 ASSERT(Isolate::Current() == this); 2145 TRACE_ISOLATE(init); 2146 2147 stress_deopt_count_ = FLAG_deopt_every_n_times; 2148 2149 if (function_entry_hook() != NULL) { 2150 // When function entry hooking is in effect, we have to create the code 2151 // stubs from scratch to get entry hooks, rather than loading the previously 2152 // generated stubs from disk. 2153 // If this assert fires, the initialization path has regressed. 2154 ASSERT(des == NULL); 2155 } 2156 2157 // The initialization process does not handle memory exhaustion. 2158 DisallowAllocationFailure disallow_allocation_failure; 2159 2160 InitializeLoggingAndCounters(); 2161 2162 InitializeDebugger(); 2163 2164 memory_allocator_ = new MemoryAllocator(this); 2165 code_range_ = new CodeRange(this); 2166 2167 // Safe after setting Heap::isolate_, initializing StackGuard and 2168 // ensuring that Isolate::Current() == this. 2169 heap_.SetStackLimits(); 2170 2171 #define ASSIGN_ELEMENT(CamelName, hacker_name) \ 2172 isolate_addresses_[Isolate::k##CamelName##Address] = \ 2173 reinterpret_cast<Address>(hacker_name##_address()); 2174 FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT) 2175 #undef ASSIGN_ELEMENT 2176 2177 string_tracker_ = new StringTracker(); 2178 string_tracker_->isolate_ = this; 2179 compilation_cache_ = new CompilationCache(this); 2180 transcendental_cache_ = new TranscendentalCache(); 2181 keyed_lookup_cache_ = new KeyedLookupCache(); 2182 context_slot_cache_ = new ContextSlotCache(); 2183 descriptor_lookup_cache_ = new DescriptorLookupCache(); 2184 unicode_cache_ = new UnicodeCache(); 2185 inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this); 2186 write_iterator_ = new ConsStringIteratorOp(); 2187 global_handles_ = new GlobalHandles(this); 2188 eternal_handles_ = new EternalHandles(); 2189 bootstrapper_ = new Bootstrapper(this); 2190 handle_scope_implementer_ = new HandleScopeImplementer(this); 2191 stub_cache_ = new StubCache(this); 2192 regexp_stack_ = new RegExpStack(); 2193 regexp_stack_->isolate_ = this; 2194 date_cache_ = new DateCache(); 2195 code_stub_interface_descriptors_ = 2196 new CodeStubInterfaceDescriptor[CodeStub::NUMBER_OF_IDS]; 2197 cpu_profiler_ = new CpuProfiler(this); 2198 heap_profiler_ = new HeapProfiler(heap()); 2199 2200 // Enable logging before setting up the heap 2201 logger_->SetUp(this); 2202 2203 // Initialize other runtime facilities 2204 #if defined(USE_SIMULATOR) 2205 #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_MIPS 2206 Simulator::Initialize(this); 2207 #endif 2208 #endif 2209 2210 { // NOLINT 2211 // Ensure that the thread has a valid stack guard. The v8::Locker object 2212 // will ensure this too, but we don't have to use lockers if we are only 2213 // using one thread. 2214 ExecutionAccess lock(this); 2215 stack_guard_.InitThread(lock); 2216 } 2217 2218 // SetUp the object heap. 2219 ASSERT(!heap_.HasBeenSetUp()); 2220 if (!heap_.SetUp()) { 2221 V8::FatalProcessOutOfMemory("heap setup"); 2222 return false; 2223 } 2224 2225 deoptimizer_data_ = new DeoptimizerData(memory_allocator_); 2226 2227 const bool create_heap_objects = (des == NULL); 2228 if (create_heap_objects && !heap_.CreateHeapObjects()) { 2229 V8::FatalProcessOutOfMemory("heap object creation"); 2230 return false; 2231 } 2232 2233 if (create_heap_objects) { 2234 // Terminate the cache array with the sentinel so we can iterate. 2235 PushToPartialSnapshotCache(heap_.undefined_value()); 2236 } 2237 2238 InitializeThreadLocal(); 2239 2240 bootstrapper_->Initialize(create_heap_objects); 2241 builtins_.SetUp(create_heap_objects); 2242 2243 // Only preallocate on the first initialization. 2244 if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) { 2245 // Start the thread which will set aside some memory. 2246 PreallocatedMemoryThreadStart(); 2247 preallocated_message_space_ = 2248 new NoAllocationStringAllocator( 2249 preallocated_memory_thread_->data(), 2250 preallocated_memory_thread_->length()); 2251 PreallocatedStorageInit(preallocated_memory_thread_->length() / 4); 2252 } 2253 2254 if (FLAG_preemption) { 2255 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); 2256 v8::Locker::StartPreemption(100); 2257 } 2258 2259 #ifdef ENABLE_DEBUGGER_SUPPORT 2260 debug_->SetUp(create_heap_objects); 2261 #endif 2262 2263 // If we are deserializing, read the state into the now-empty heap. 2264 if (!create_heap_objects) { 2265 des->Deserialize(); 2266 } 2267 stub_cache_->Initialize(); 2268 2269 // Finish initialization of ThreadLocal after deserialization is done. 2270 clear_pending_exception(); 2271 clear_pending_message(); 2272 clear_scheduled_exception(); 2273 2274 // Deserializing may put strange things in the root array's copy of the 2275 // stack guard. 2276 heap_.SetStackLimits(); 2277 2278 // Quiet the heap NaN if needed on target platform. 2279 if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value()); 2280 2281 runtime_profiler_ = new RuntimeProfiler(this); 2282 runtime_profiler_->SetUp(); 2283 2284 // If we are deserializing, log non-function code objects and compiled 2285 // functions found in the snapshot. 2286 if (!create_heap_objects && 2287 (FLAG_log_code || FLAG_ll_prof || logger_->is_logging_code_events())) { 2288 HandleScope scope(this); 2289 LOG(this, LogCodeObjects()); 2290 LOG(this, LogCompiledFunctions()); 2291 } 2292 2293 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)), 2294 Internals::kIsolateEmbedderDataOffset); 2295 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)), 2296 Internals::kIsolateRootsOffset); 2297 2298 state_ = INITIALIZED; 2299 time_millis_at_init_ = OS::TimeCurrentMillis(); 2300 2301 if (!create_heap_objects) { 2302 // Now that the heap is consistent, it's OK to generate the code for the 2303 // deopt entry table that might have been referred to by optimized code in 2304 // the snapshot. 2305 HandleScope scope(this); 2306 Deoptimizer::EnsureCodeForDeoptimizationEntry( 2307 this, 2308 Deoptimizer::LAZY, 2309 kDeoptTableSerializeEntryCount - 1); 2310 } 2311 2312 if (!Serializer::enabled()) { 2313 // Ensure that all stubs which need to be generated ahead of time, but 2314 // cannot be serialized into the snapshot have been generated. 2315 HandleScope scope(this); 2316 CodeStub::GenerateFPStubs(this); 2317 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this); 2318 StubFailureTrampolineStub::GenerateAheadOfTime(this); 2319 // TODO(mstarzinger): The following is an ugly hack to make sure the 2320 // interface descriptor is initialized even when stubs have been 2321 // deserialized out of the snapshot without the graph builder. 2322 FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS, 2323 DONT_TRACK_ALLOCATION_SITE, 0); 2324 stub.InitializeInterfaceDescriptor( 2325 this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray)); 2326 CompareNilICStub::InitializeForIsolate(this); 2327 ToBooleanStub::InitializeForIsolate(this); 2328 ArrayConstructorStubBase::InstallDescriptors(this); 2329 InternalArrayConstructorStubBase::InstallDescriptors(this); 2330 } 2331 2332 if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start(); 2333 2334 if (FLAG_marking_threads > 0) { 2335 marking_thread_ = new MarkingThread*[FLAG_marking_threads]; 2336 for (int i = 0; i < FLAG_marking_threads; i++) { 2337 marking_thread_[i] = new MarkingThread(this); 2338 marking_thread_[i]->Start(); 2339 } 2340 } 2341 2342 if (FLAG_sweeper_threads > 0) { 2343 sweeper_thread_ = new SweeperThread*[FLAG_sweeper_threads]; 2344 for (int i = 0; i < FLAG_sweeper_threads; i++) { 2345 sweeper_thread_[i] = new SweeperThread(this); 2346 sweeper_thread_[i]->Start(); 2347 } 2348 } 2349 2350 initialized_from_snapshot_ = (des != NULL); 2351 2352 return true; 2353 } 2354 2355 2356 // Initialized lazily to allow early 2357 // v8::V8::SetAddHistogramSampleFunction calls. 2358 StatsTable* Isolate::stats_table() { 2359 if (stats_table_ == NULL) { 2360 stats_table_ = new StatsTable; 2361 } 2362 return stats_table_; 2363 } 2364 2365 2366 void Isolate::Enter() { 2367 Isolate* current_isolate = NULL; 2368 PerIsolateThreadData* current_data = CurrentPerIsolateThreadData(); 2369 if (current_data != NULL) { 2370 current_isolate = current_data->isolate_; 2371 ASSERT(current_isolate != NULL); 2372 if (current_isolate == this) { 2373 ASSERT(Current() == this); 2374 ASSERT(entry_stack_ != NULL); 2375 ASSERT(entry_stack_->previous_thread_data == NULL || 2376 entry_stack_->previous_thread_data->thread_id().Equals( 2377 ThreadId::Current())); 2378 // Same thread re-enters the isolate, no need to re-init anything. 2379 entry_stack_->entry_count++; 2380 return; 2381 } 2382 } 2383 2384 // Threads can have default isolate set into TLS as Current but not yet have 2385 // PerIsolateThreadData for it, as it requires more advanced phase of the 2386 // initialization. For example, a thread might be the one that system used for 2387 // static initializers - in this case the default isolate is set in TLS but 2388 // the thread did not yet Enter the isolate. If PerisolateThreadData is not 2389 // there, use the isolate set in TLS. 2390 if (current_isolate == NULL) { 2391 current_isolate = Isolate::UncheckedCurrent(); 2392 } 2393 2394 PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread(); 2395 ASSERT(data != NULL); 2396 ASSERT(data->isolate_ == this); 2397 2398 EntryStackItem* item = new EntryStackItem(current_data, 2399 current_isolate, 2400 entry_stack_); 2401 entry_stack_ = item; 2402 2403 SetIsolateThreadLocals(this, data); 2404 2405 // In case it's the first time some thread enters the isolate. 2406 set_thread_id(data->thread_id()); 2407 } 2408 2409 2410 void Isolate::Exit() { 2411 ASSERT(entry_stack_ != NULL); 2412 ASSERT(entry_stack_->previous_thread_data == NULL || 2413 entry_stack_->previous_thread_data->thread_id().Equals( 2414 ThreadId::Current())); 2415 2416 if (--entry_stack_->entry_count > 0) return; 2417 2418 ASSERT(CurrentPerIsolateThreadData() != NULL); 2419 ASSERT(CurrentPerIsolateThreadData()->isolate_ == this); 2420 2421 // Pop the stack. 2422 EntryStackItem* item = entry_stack_; 2423 entry_stack_ = item->previous_item; 2424 2425 PerIsolateThreadData* previous_thread_data = item->previous_thread_data; 2426 Isolate* previous_isolate = item->previous_isolate; 2427 2428 delete item; 2429 2430 // Reinit the current thread for the isolate it was running before this one. 2431 SetIsolateThreadLocals(previous_isolate, previous_thread_data); 2432 } 2433 2434 2435 void Isolate::LinkDeferredHandles(DeferredHandles* deferred) { 2436 deferred->next_ = deferred_handles_head_; 2437 if (deferred_handles_head_ != NULL) { 2438 deferred_handles_head_->previous_ = deferred; 2439 } 2440 deferred_handles_head_ = deferred; 2441 } 2442 2443 2444 void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) { 2445 #ifdef DEBUG 2446 // In debug mode assert that the linked list is well-formed. 2447 DeferredHandles* deferred_iterator = deferred; 2448 while (deferred_iterator->previous_ != NULL) { 2449 deferred_iterator = deferred_iterator->previous_; 2450 } 2451 ASSERT(deferred_handles_head_ == deferred_iterator); 2452 #endif 2453 if (deferred_handles_head_ == deferred) { 2454 deferred_handles_head_ = deferred_handles_head_->next_; 2455 } 2456 if (deferred->next_ != NULL) { 2457 deferred->next_->previous_ = deferred->previous_; 2458 } 2459 if (deferred->previous_ != NULL) { 2460 deferred->previous_->next_ = deferred->next_; 2461 } 2462 } 2463 2464 2465 HStatistics* Isolate::GetHStatistics() { 2466 if (hstatistics() == NULL) set_hstatistics(new HStatistics()); 2467 return hstatistics(); 2468 } 2469 2470 2471 HTracer* Isolate::GetHTracer() { 2472 if (htracer() == NULL) set_htracer(new HTracer(id())); 2473 return htracer(); 2474 } 2475 2476 2477 Map* Isolate::get_initial_js_array_map(ElementsKind kind) { 2478 Context* native_context = context()->native_context(); 2479 Object* maybe_map_array = native_context->js_array_maps(); 2480 if (!maybe_map_array->IsUndefined()) { 2481 Object* maybe_transitioned_map = 2482 FixedArray::cast(maybe_map_array)->get(kind); 2483 if (!maybe_transitioned_map->IsUndefined()) { 2484 return Map::cast(maybe_transitioned_map); 2485 } 2486 } 2487 return NULL; 2488 } 2489 2490 2491 bool Isolate::IsFastArrayConstructorPrototypeChainIntact() { 2492 Map* root_array_map = 2493 get_initial_js_array_map(GetInitialFastElementsKind()); 2494 ASSERT(root_array_map != NULL); 2495 JSObject* initial_array_proto = JSObject::cast(*initial_array_prototype()); 2496 2497 // Check that the array prototype hasn't been altered WRT empty elements. 2498 if (root_array_map->prototype() != initial_array_proto) return false; 2499 if (initial_array_proto->elements() != heap()->empty_fixed_array()) { 2500 return false; 2501 } 2502 2503 // Check that the object prototype hasn't been altered WRT empty elements. 2504 JSObject* initial_object_proto = JSObject::cast(*initial_object_prototype()); 2505 Object* root_array_map_proto = initial_array_proto->GetPrototype(); 2506 if (root_array_map_proto != initial_object_proto) return false; 2507 if (initial_object_proto->elements() != heap()->empty_fixed_array()) { 2508 return false; 2509 } 2510 2511 return initial_object_proto->GetPrototype()->IsNull(); 2512 } 2513 2514 2515 CodeStubInterfaceDescriptor* 2516 Isolate::code_stub_interface_descriptor(int index) { 2517 return code_stub_interface_descriptors_ + index; 2518 } 2519 2520 2521 Object* Isolate::FindCodeObject(Address a) { 2522 return inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(a); 2523 } 2524 2525 2526 #ifdef DEBUG 2527 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ 2528 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_); 2529 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) 2530 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) 2531 #undef ISOLATE_FIELD_OFFSET 2532 #endif 2533 2534 } } // namespace v8::internal 2535