1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_ISOLATE_H_ 29 #define V8_ISOLATE_H_ 30 31 #include "../include/v8-debug.h" 32 #include "allocation.h" 33 #include "apiutils.h" 34 #include "assert-scope.h" 35 #include "atomicops.h" 36 #include "builtins.h" 37 #include "contexts.h" 38 #include "execution.h" 39 #include "frames.h" 40 #include "date.h" 41 #include "global-handles.h" 42 #include "handles.h" 43 #include "hashmap.h" 44 #include "heap.h" 45 #include "optimizing-compiler-thread.h" 46 #include "regexp-stack.h" 47 #include "runtime-profiler.h" 48 #include "runtime.h" 49 #include "zone.h" 50 51 namespace v8 { 52 namespace internal { 53 54 class Bootstrapper; 55 class CallbackTable; 56 class CodeGenerator; 57 class CodeRange; 58 struct CodeStubInterfaceDescriptor; 59 class CompilationCache; 60 class ContextSlotCache; 61 class ContextSwitcher; 62 class Counters; 63 class CpuFeatures; 64 class CpuProfiler; 65 class DeoptimizerData; 66 class Deserializer; 67 class EmptyStatement; 68 class ExternalCallbackScope; 69 class ExternalReferenceTable; 70 class Factory; 71 class FunctionInfoListener; 72 class HandleScopeImplementer; 73 class HeapProfiler; 74 class HStatistics; 75 class HTracer; 76 class InlineRuntimeFunctionsTable; 77 class NoAllocationStringAllocator; 78 class InnerPointerToCodeCache; 79 class MarkingThread; 80 class PreallocatedMemoryThread; 81 class RegExpStack; 82 class SaveContext; 83 class UnicodeCache; 84 class ConsStringIteratorOp; 85 class StringTracker; 86 class StubCache; 87 class SweeperThread; 88 class ThreadManager; 89 class ThreadState; 90 class ThreadVisitor; // Defined in v8threads.h 91 template <StateTag Tag> class VMState; 92 93 // 'void function pointer', used to roundtrip the 94 // ExternalReference::ExternalReferenceRedirector since we can not include 95 // assembler.h, where it is defined, here. 96 typedef void* ExternalReferenceRedirectorPointer(); 97 98 99 #ifdef ENABLE_DEBUGGER_SUPPORT 100 class Debug; 101 class Debugger; 102 class DebuggerAgent; 103 #endif 104 105 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 106 !defined(__mips__) && V8_TARGET_ARCH_MIPS 107 class Redirection; 108 class Simulator; 109 #endif 110 111 112 // Static indirection table for handles to constants. If a frame 113 // element represents a constant, the data contains an index into 114 // this table of handles to the actual constants. 115 // Static indirection table for handles to constants. If a Result 116 // represents a constant, the data contains an index into this table 117 // of handles to the actual constants. 118 typedef ZoneList<Handle<Object> > ZoneObjectList; 119 120 #define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \ 121 do { \ 122 Isolate* __isolate__ = (isolate); \ 123 if (__isolate__->has_scheduled_exception()) { \ 124 return __isolate__->PromoteScheduledException(); \ 125 } \ 126 } while (false) 127 128 #define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T) \ 129 do { \ 130 Isolate* __isolate__ = (isolate); \ 131 if (__isolate__->has_scheduled_exception()) { \ 132 __isolate__->PromoteScheduledException(); \ 133 return Handle<T>::null(); \ 134 } \ 135 } while (false) 136 137 #define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \ 138 do { \ 139 if ((call).is_null()) { \ 140 ASSERT((isolate)->has_pending_exception()); \ 141 return (value); \ 142 } \ 143 } while (false) 144 145 #define CHECK_NOT_EMPTY_HANDLE(isolate, call) \ 146 do { \ 147 ASSERT(!(isolate)->has_pending_exception()); \ 148 CHECK(!(call).is_null()); \ 149 CHECK(!(isolate)->has_pending_exception()); \ 150 } while (false) 151 152 #define RETURN_IF_EMPTY_HANDLE(isolate, call) \ 153 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception()) 154 155 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \ 156 C(Handler, handler) \ 157 C(CEntryFP, c_entry_fp) \ 158 C(Context, context) \ 159 C(PendingException, pending_exception) \ 160 C(ExternalCaughtException, external_caught_exception) \ 161 C(JSEntrySP, js_entry_sp) 162 163 164 // Platform-independent, reliable thread identifier. 165 class ThreadId { 166 public: 167 // Creates an invalid ThreadId. 168 ThreadId() : id_(kInvalidId) {} 169 170 // Returns ThreadId for current thread. 171 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); } 172 173 // Returns invalid ThreadId (guaranteed not to be equal to any thread). 174 static ThreadId Invalid() { return ThreadId(kInvalidId); } 175 176 // Compares ThreadIds for equality. 177 INLINE(bool Equals(const ThreadId& other) const) { 178 return id_ == other.id_; 179 } 180 181 // Checks whether this ThreadId refers to any thread. 182 INLINE(bool IsValid() const) { 183 return id_ != kInvalidId; 184 } 185 186 // Converts ThreadId to an integer representation 187 // (required for public API: V8::V8::GetCurrentThreadId). 188 int ToInteger() const { return id_; } 189 190 // Converts ThreadId to an integer representation 191 // (required for public API: V8::V8::TerminateExecution). 192 static ThreadId FromInteger(int id) { return ThreadId(id); } 193 194 private: 195 static const int kInvalidId = -1; 196 197 explicit ThreadId(int id) : id_(id) {} 198 199 static int AllocateThreadId(); 200 201 static int GetCurrentThreadId(); 202 203 int id_; 204 205 static Atomic32 highest_thread_id_; 206 207 friend class Isolate; 208 }; 209 210 211 class ThreadLocalTop BASE_EMBEDDED { 212 public: 213 // Does early low-level initialization that does not depend on the 214 // isolate being present. 215 ThreadLocalTop(); 216 217 // Initialize the thread data. 218 void Initialize(); 219 220 // Get the top C++ try catch handler or NULL if none are registered. 221 // 222 // This method is not guarenteed to return an address that can be 223 // used for comparison with addresses into the JS stack. If such an 224 // address is needed, use try_catch_handler_address. 225 v8::TryCatch* TryCatchHandler(); 226 227 // Get the address of the top C++ try catch handler or NULL if 228 // none are registered. 229 // 230 // This method always returns an address that can be compared to 231 // pointers into the JavaScript stack. When running on actual 232 // hardware, try_catch_handler_address and TryCatchHandler return 233 // the same pointer. When running on a simulator with a separate JS 234 // stack, try_catch_handler_address returns a JS stack address that 235 // corresponds to the place on the JS stack where the C++ handler 236 // would have been if the stack were not separate. 237 inline Address try_catch_handler_address() { 238 return try_catch_handler_address_; 239 } 240 241 // Set the address of the top C++ try catch handler. 242 inline void set_try_catch_handler_address(Address address) { 243 try_catch_handler_address_ = address; 244 } 245 246 void Free() { 247 ASSERT(!has_pending_message_); 248 ASSERT(!external_caught_exception_); 249 ASSERT(try_catch_handler_address_ == NULL); 250 } 251 252 Isolate* isolate_; 253 // The context where the current execution method is created and for variable 254 // lookups. 255 Context* context_; 256 ThreadId thread_id_; 257 MaybeObject* pending_exception_; 258 bool has_pending_message_; 259 bool rethrowing_message_; 260 Object* pending_message_obj_; 261 Object* pending_message_script_; 262 int pending_message_start_pos_; 263 int pending_message_end_pos_; 264 // Use a separate value for scheduled exceptions to preserve the 265 // invariants that hold about pending_exception. We may want to 266 // unify them later. 267 MaybeObject* scheduled_exception_; 268 bool external_caught_exception_; 269 SaveContext* save_context_; 270 v8::TryCatch* catcher_; 271 272 // Stack. 273 Address c_entry_fp_; // the frame pointer of the top c entry frame 274 Address handler_; // try-blocks are chained through the stack 275 276 #ifdef USE_SIMULATOR 277 #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_MIPS 278 Simulator* simulator_; 279 #endif 280 #endif // USE_SIMULATOR 281 282 Address js_entry_sp_; // the stack pointer of the bottom JS entry frame 283 // the external callback we're currently in 284 ExternalCallbackScope* external_callback_scope_; 285 StateTag current_vm_state_; 286 287 // Generated code scratch locations. 288 int32_t formal_count_; 289 290 // Call back function to report unsafe JS accesses. 291 v8::FailedAccessCheckCallback failed_access_check_callback_; 292 293 // Head of the list of live LookupResults. 294 LookupResult* top_lookup_result_; 295 296 // Whether out of memory exceptions should be ignored. 297 bool ignore_out_of_memory_; 298 299 private: 300 void InitializeInternal(); 301 302 Address try_catch_handler_address_; 303 }; 304 305 306 class SystemThreadManager { 307 public: 308 enum ParallelSystemComponent { 309 PARALLEL_SWEEPING, 310 CONCURRENT_SWEEPING, 311 PARALLEL_MARKING, 312 PARALLEL_RECOMPILATION 313 }; 314 315 static int NumberOfParallelSystemThreads(ParallelSystemComponent type); 316 317 static const int kMaxThreads = 4; 318 }; 319 320 321 #ifdef ENABLE_DEBUGGER_SUPPORT 322 323 #define ISOLATE_DEBUGGER_INIT_LIST(V) \ 324 V(v8::Debug::EventCallback, debug_event_callback, NULL) \ 325 V(DebuggerAgent*, debugger_agent_instance, NULL) 326 #else 327 328 #define ISOLATE_DEBUGGER_INIT_LIST(V) 329 330 #endif 331 332 #ifdef DEBUG 333 334 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \ 335 V(CommentStatistic, paged_space_comments_statistics, \ 336 CommentStatistic::kMaxComments + 1) 337 #else 338 339 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) 340 341 #endif 342 343 #define ISOLATE_INIT_ARRAY_LIST(V) \ 344 /* SerializerDeserializer state. */ \ 345 V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \ 346 V(int, bad_char_shift_table, kUC16AlphabetSize) \ 347 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \ 348 V(int, suffix_table, (kBMMaxShift + 1)) \ 349 V(uint32_t, private_random_seed, 2) \ 350 ISOLATE_INIT_DEBUG_ARRAY_LIST(V) 351 352 typedef List<HeapObject*, PreallocatedStorageAllocationPolicy> DebugObjectCache; 353 354 #define ISOLATE_INIT_LIST(V) \ 355 /* SerializerDeserializer state. */ \ 356 V(int, serialize_partial_snapshot_cache_length, 0) \ 357 V(int, serialize_partial_snapshot_cache_capacity, 0) \ 358 V(Object**, serialize_partial_snapshot_cache, NULL) \ 359 /* Assembler state. */ \ 360 /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \ 361 V(byte*, assembler_spare_buffer, NULL) \ 362 V(FatalErrorCallback, exception_behavior, NULL) \ 363 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \ 364 V(v8::Debug::MessageHandler, message_handler, NULL) \ 365 /* To distinguish the function templates, so that we can find them in the */ \ 366 /* function cache of the native context. */ \ 367 V(int, next_serial_number, 0) \ 368 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \ 369 V(bool, always_allow_natives_syntax, false) \ 370 /* Part of the state of liveedit. */ \ 371 V(FunctionInfoListener*, active_function_info_listener, NULL) \ 372 /* State for Relocatable. */ \ 373 V(Relocatable*, relocatable_top, NULL) \ 374 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \ 375 V(Object*, string_stream_current_security_token, NULL) \ 376 /* TODO(isolates): Release this on destruction? */ \ 377 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \ 378 /* Serializer state. */ \ 379 V(ExternalReferenceTable*, external_reference_table, NULL) \ 380 /* AstNode state. */ \ 381 V(int, ast_node_id, 0) \ 382 V(unsigned, ast_node_count, 0) \ 383 V(bool, observer_delivery_pending, false) \ 384 V(HStatistics*, hstatistics, NULL) \ 385 V(HTracer*, htracer, NULL) \ 386 ISOLATE_DEBUGGER_INIT_LIST(V) 387 388 class Isolate { 389 // These forward declarations are required to make the friend declarations in 390 // PerIsolateThreadData work on some older versions of gcc. 391 class ThreadDataTable; 392 class EntryStackItem; 393 public: 394 ~Isolate(); 395 396 // A thread has a PerIsolateThreadData instance for each isolate that it has 397 // entered. That instance is allocated when the isolate is initially entered 398 // and reused on subsequent entries. 399 class PerIsolateThreadData { 400 public: 401 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id) 402 : isolate_(isolate), 403 thread_id_(thread_id), 404 stack_limit_(0), 405 thread_state_(NULL), 406 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 407 !defined(__mips__) && V8_TARGET_ARCH_MIPS 408 simulator_(NULL), 409 #endif 410 next_(NULL), 411 prev_(NULL) { } 412 Isolate* isolate() const { return isolate_; } 413 ThreadId thread_id() const { return thread_id_; } 414 void set_stack_limit(uintptr_t value) { stack_limit_ = value; } 415 uintptr_t stack_limit() const { return stack_limit_; } 416 ThreadState* thread_state() const { return thread_state_; } 417 void set_thread_state(ThreadState* value) { thread_state_ = value; } 418 419 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 420 !defined(__mips__) && V8_TARGET_ARCH_MIPS 421 Simulator* simulator() const { return simulator_; } 422 void set_simulator(Simulator* simulator) { 423 simulator_ = simulator; 424 } 425 #endif 426 427 bool Matches(Isolate* isolate, ThreadId thread_id) const { 428 return isolate_ == isolate && thread_id_.Equals(thread_id); 429 } 430 431 private: 432 Isolate* isolate_; 433 ThreadId thread_id_; 434 uintptr_t stack_limit_; 435 ThreadState* thread_state_; 436 437 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 438 !defined(__mips__) && V8_TARGET_ARCH_MIPS 439 Simulator* simulator_; 440 #endif 441 442 PerIsolateThreadData* next_; 443 PerIsolateThreadData* prev_; 444 445 friend class Isolate; 446 friend class ThreadDataTable; 447 friend class EntryStackItem; 448 449 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData); 450 }; 451 452 453 enum AddressId { 454 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address, 455 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM) 456 #undef DECLARE_ENUM 457 kIsolateAddressCount 458 }; 459 460 // Returns the PerIsolateThreadData for the current thread (or NULL if one is 461 // not currently set). 462 static PerIsolateThreadData* CurrentPerIsolateThreadData() { 463 return reinterpret_cast<PerIsolateThreadData*>( 464 Thread::GetThreadLocal(per_isolate_thread_data_key_)); 465 } 466 467 // Returns the isolate inside which the current thread is running. 468 INLINE(static Isolate* Current()) { 469 Isolate* isolate = reinterpret_cast<Isolate*>( 470 Thread::GetExistingThreadLocal(isolate_key_)); 471 ASSERT(isolate != NULL); 472 return isolate; 473 } 474 475 INLINE(static Isolate* UncheckedCurrent()) { 476 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_)); 477 } 478 479 // Usually called by Init(), but can be called early e.g. to allow 480 // testing components that require logging but not the whole 481 // isolate. 482 // 483 // Safe to call more than once. 484 void InitializeLoggingAndCounters(); 485 486 bool Init(Deserializer* des); 487 488 bool IsInitialized() { return state_ == INITIALIZED; } 489 490 // True if at least one thread Enter'ed this isolate. 491 bool IsInUse() { return entry_stack_ != NULL; } 492 493 // Destroys the non-default isolates. 494 // Sets default isolate into "has_been_disposed" state rather then destroying, 495 // for legacy API reasons. 496 void TearDown(); 497 498 static void GlobalTearDown(); 499 500 bool IsDefaultIsolate() const { return this == default_isolate_; } 501 502 // Ensures that process-wide resources and the default isolate have been 503 // allocated. It is only necessary to call this method in rare cases, for 504 // example if you are using V8 from within the body of a static initializer. 505 // Safe to call multiple times. 506 static void EnsureDefaultIsolate(); 507 508 // Find the PerThread for this particular (isolate, thread) combination 509 // If one does not yet exist, return null. 510 PerIsolateThreadData* FindPerThreadDataForThisThread(); 511 512 // Find the PerThread for given (isolate, thread) combination 513 // If one does not yet exist, return null. 514 PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id); 515 516 #ifdef ENABLE_DEBUGGER_SUPPORT 517 // Get the debugger from the default isolate. Preinitializes the 518 // default isolate if needed. 519 static Debugger* GetDefaultIsolateDebugger(); 520 #endif 521 522 // Get the stack guard from the default isolate. Preinitializes the 523 // default isolate if needed. 524 static StackGuard* GetDefaultIsolateStackGuard(); 525 526 // Returns the key used to store the pointer to the current isolate. 527 // Used internally for V8 threads that do not execute JavaScript but still 528 // are part of the domain of an isolate (like the context switcher). 529 static Thread::LocalStorageKey isolate_key() { 530 return isolate_key_; 531 } 532 533 // Returns the key used to store process-wide thread IDs. 534 static Thread::LocalStorageKey thread_id_key() { 535 return thread_id_key_; 536 } 537 538 static Thread::LocalStorageKey per_isolate_thread_data_key(); 539 540 // If a client attempts to create a Locker without specifying an isolate, 541 // we assume that the client is using legacy behavior. Set up the current 542 // thread to be inside the implicit isolate (or fail a check if we have 543 // switched to non-legacy behavior). 544 static void EnterDefaultIsolate(); 545 546 // Mutex for serializing access to break control structures. 547 Mutex* break_access() { return break_access_; } 548 549 // Mutex for serializing access to debugger. 550 Mutex* debugger_access() { return debugger_access_; } 551 552 Address get_address_from_id(AddressId id); 553 554 // Access to top context (where the current function object was created). 555 Context* context() { return thread_local_top_.context_; } 556 void set_context(Context* context) { 557 ASSERT(context == NULL || context->IsContext()); 558 thread_local_top_.context_ = context; 559 } 560 Context** context_address() { return &thread_local_top_.context_; } 561 562 SaveContext* save_context() { return thread_local_top_.save_context_; } 563 void set_save_context(SaveContext* save) { 564 thread_local_top_.save_context_ = save; 565 } 566 567 // Access to current thread id. 568 ThreadId thread_id() { return thread_local_top_.thread_id_; } 569 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; } 570 571 // Interface to pending exception. 572 MaybeObject* pending_exception() { 573 ASSERT(has_pending_exception()); 574 return thread_local_top_.pending_exception_; 575 } 576 bool external_caught_exception() { 577 return thread_local_top_.external_caught_exception_; 578 } 579 void set_external_caught_exception(bool value) { 580 thread_local_top_.external_caught_exception_ = value; 581 } 582 void set_pending_exception(MaybeObject* exception) { 583 thread_local_top_.pending_exception_ = exception; 584 } 585 void clear_pending_exception() { 586 thread_local_top_.pending_exception_ = heap_.the_hole_value(); 587 } 588 MaybeObject** pending_exception_address() { 589 return &thread_local_top_.pending_exception_; 590 } 591 bool has_pending_exception() { 592 return !thread_local_top_.pending_exception_->IsTheHole(); 593 } 594 void clear_pending_message() { 595 thread_local_top_.has_pending_message_ = false; 596 thread_local_top_.pending_message_obj_ = heap_.the_hole_value(); 597 thread_local_top_.pending_message_script_ = heap_.the_hole_value(); 598 } 599 v8::TryCatch* try_catch_handler() { 600 return thread_local_top_.TryCatchHandler(); 601 } 602 Address try_catch_handler_address() { 603 return thread_local_top_.try_catch_handler_address(); 604 } 605 bool* external_caught_exception_address() { 606 return &thread_local_top_.external_caught_exception_; 607 } 608 v8::TryCatch* catcher() { 609 return thread_local_top_.catcher_; 610 } 611 void set_catcher(v8::TryCatch* catcher) { 612 thread_local_top_.catcher_ = catcher; 613 } 614 615 MaybeObject** scheduled_exception_address() { 616 return &thread_local_top_.scheduled_exception_; 617 } 618 619 Address pending_message_obj_address() { 620 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_); 621 } 622 623 Address has_pending_message_address() { 624 return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_); 625 } 626 627 Address pending_message_script_address() { 628 return reinterpret_cast<Address>( 629 &thread_local_top_.pending_message_script_); 630 } 631 632 MaybeObject* scheduled_exception() { 633 ASSERT(has_scheduled_exception()); 634 return thread_local_top_.scheduled_exception_; 635 } 636 bool has_scheduled_exception() { 637 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value(); 638 } 639 void clear_scheduled_exception() { 640 thread_local_top_.scheduled_exception_ = heap_.the_hole_value(); 641 } 642 643 bool IsExternallyCaught(); 644 645 bool is_catchable_by_javascript(MaybeObject* exception) { 646 return (!exception->IsOutOfMemory()) && 647 (exception != heap()->termination_exception()); 648 } 649 650 // Serializer. 651 void PushToPartialSnapshotCache(Object* obj); 652 653 // JS execution stack (see frames.h). 654 static Address c_entry_fp(ThreadLocalTop* thread) { 655 return thread->c_entry_fp_; 656 } 657 static Address handler(ThreadLocalTop* thread) { return thread->handler_; } 658 659 inline Address* c_entry_fp_address() { 660 return &thread_local_top_.c_entry_fp_; 661 } 662 inline Address* handler_address() { return &thread_local_top_.handler_; } 663 664 // Bottom JS entry. 665 Address js_entry_sp() { 666 return thread_local_top_.js_entry_sp_; 667 } 668 inline Address* js_entry_sp_address() { 669 return &thread_local_top_.js_entry_sp_; 670 } 671 672 // Generated code scratch locations. 673 void* formal_count_address() { return &thread_local_top_.formal_count_; } 674 675 // Returns the global object of the current context. It could be 676 // a builtin object, or a JS global object. 677 Handle<GlobalObject> global_object() { 678 return Handle<GlobalObject>(context()->global_object()); 679 } 680 681 // Returns the global proxy object of the current context. 682 Object* global_proxy() { 683 return context()->global_proxy(); 684 } 685 686 Handle<JSBuiltinsObject> js_builtins_object() { 687 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins()); 688 } 689 690 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); } 691 void FreeThreadResources() { thread_local_top_.Free(); } 692 693 // This method is called by the api after operations that may throw 694 // exceptions. If an exception was thrown and not handled by an external 695 // handler the exception is scheduled to be rethrown when we return to running 696 // JavaScript code. If an exception is scheduled true is returned. 697 bool OptionalRescheduleException(bool is_bottom_call); 698 699 class ExceptionScope { 700 public: 701 explicit ExceptionScope(Isolate* isolate) : 702 // Scope currently can only be used for regular exceptions, not 703 // failures like OOM or termination exception. 704 isolate_(isolate), 705 pending_exception_(isolate_->pending_exception()->ToObjectUnchecked(), 706 isolate_), 707 catcher_(isolate_->catcher()) 708 { } 709 710 ~ExceptionScope() { 711 isolate_->set_catcher(catcher_); 712 isolate_->set_pending_exception(*pending_exception_); 713 } 714 715 private: 716 Isolate* isolate_; 717 Handle<Object> pending_exception_; 718 v8::TryCatch* catcher_; 719 }; 720 721 void SetCaptureStackTraceForUncaughtExceptions( 722 bool capture, 723 int frame_limit, 724 StackTrace::StackTraceOptions options); 725 726 // Tells whether the current context has experienced an out of memory 727 // exception. 728 bool is_out_of_memory(); 729 bool ignore_out_of_memory() { 730 return thread_local_top_.ignore_out_of_memory_; 731 } 732 void set_ignore_out_of_memory(bool value) { 733 thread_local_top_.ignore_out_of_memory_ = value; 734 } 735 736 void PrintCurrentStackTrace(FILE* out); 737 void PrintStackTrace(FILE* out, char* thread_data); 738 void PrintStack(StringStream* accumulator); 739 void PrintStack(FILE* out); 740 void PrintStack(); 741 Handle<String> StackTraceString(); 742 NO_INLINE(void PushStackTraceAndDie(unsigned int magic, 743 Object* object, 744 Map* map, 745 unsigned int magic2)); 746 Handle<JSArray> CaptureCurrentStackTrace( 747 int frame_limit, 748 StackTrace::StackTraceOptions options); 749 750 Handle<JSArray> CaptureSimpleStackTrace(Handle<JSObject> error_object, 751 Handle<Object> caller, 752 int limit); 753 void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object); 754 755 // Returns if the top context may access the given global object. If 756 // the result is false, the pending exception is guaranteed to be 757 // set. 758 bool MayNamedAccess(JSObject* receiver, 759 Object* key, 760 v8::AccessType type); 761 bool MayIndexedAccess(JSObject* receiver, 762 uint32_t index, 763 v8::AccessType type); 764 765 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback); 766 void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type); 767 768 // Exception throwing support. The caller should use the result 769 // of Throw() as its return value. 770 Failure* Throw(Object* exception, MessageLocation* location = NULL); 771 // Re-throw an exception. This involves no error reporting since 772 // error reporting was handled when the exception was thrown 773 // originally. 774 Failure* ReThrow(MaybeObject* exception); 775 void ScheduleThrow(Object* exception); 776 // Re-set pending message, script and positions reported to the TryCatch 777 // back to the TLS for re-use when rethrowing. 778 void RestorePendingMessageFromTryCatch(v8::TryCatch* handler); 779 void ReportPendingMessages(); 780 // Return pending location if any or unfilled structure. 781 MessageLocation GetMessageLocation(); 782 Failure* ThrowIllegalOperation(); 783 784 // Promote a scheduled exception to pending. Asserts has_scheduled_exception. 785 Failure* PromoteScheduledException(); 786 void DoThrow(Object* exception, MessageLocation* location); 787 // Checks if exception should be reported and finds out if it's 788 // caught externally. 789 bool ShouldReportException(bool* can_be_caught_externally, 790 bool catchable_by_javascript); 791 792 // Attempts to compute the current source location, storing the 793 // result in the target out parameter. 794 void ComputeLocation(MessageLocation* target); 795 796 // Override command line flag. 797 void TraceException(bool flag); 798 799 // Out of resource exception helpers. 800 Failure* StackOverflow(); 801 Failure* TerminateExecution(); 802 void CancelTerminateExecution(); 803 804 // Administration 805 void Iterate(ObjectVisitor* v); 806 void Iterate(ObjectVisitor* v, ThreadLocalTop* t); 807 char* Iterate(ObjectVisitor* v, char* t); 808 void IterateThread(ThreadVisitor* v, char* t); 809 810 811 // Returns the current native and global context. 812 Handle<Context> native_context(); 813 Handle<Context> global_context(); 814 815 // Returns the native context of the calling JavaScript code. That 816 // is, the native context of the top-most JavaScript frame. 817 Handle<Context> GetCallingNativeContext(); 818 819 void RegisterTryCatchHandler(v8::TryCatch* that); 820 void UnregisterTryCatchHandler(v8::TryCatch* that); 821 822 char* ArchiveThread(char* to); 823 char* RestoreThread(char* from); 824 825 static const char* const kStackOverflowMessage; 826 827 static const int kUC16AlphabetSize = 256; // See StringSearchBase. 828 static const int kBMMaxShift = 250; // See StringSearchBase. 829 830 // Accessors. 831 #define GLOBAL_ACCESSOR(type, name, initialvalue) \ 832 inline type name() const { \ 833 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 834 return name##_; \ 835 } \ 836 inline void set_##name(type value) { \ 837 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 838 name##_ = value; \ 839 } 840 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR) 841 #undef GLOBAL_ACCESSOR 842 843 #define GLOBAL_ARRAY_ACCESSOR(type, name, length) \ 844 inline type* name() { \ 845 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 846 return &(name##_)[0]; \ 847 } 848 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR) 849 #undef GLOBAL_ARRAY_ACCESSOR 850 851 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \ 852 Handle<type> name() { \ 853 return Handle<type>(context()->native_context()->name(), this); \ 854 } \ 855 bool is_##name(type* value) { \ 856 return context()->native_context()->is_##name(value); \ 857 } 858 NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR) 859 #undef NATIVE_CONTEXT_FIELD_ACCESSOR 860 861 Bootstrapper* bootstrapper() { return bootstrapper_; } 862 Counters* counters() { 863 // Call InitializeLoggingAndCounters() if logging is needed before 864 // the isolate is fully initialized. 865 ASSERT(counters_ != NULL); 866 return counters_; 867 } 868 CodeRange* code_range() { return code_range_; } 869 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; } 870 CompilationCache* compilation_cache() { return compilation_cache_; } 871 Logger* logger() { 872 // Call InitializeLoggingAndCounters() if logging is needed before 873 // the isolate is fully initialized. 874 ASSERT(logger_ != NULL); 875 return logger_; 876 } 877 StackGuard* stack_guard() { return &stack_guard_; } 878 Heap* heap() { return &heap_; } 879 StatsTable* stats_table(); 880 StubCache* stub_cache() { return stub_cache_; } 881 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } 882 ThreadLocalTop* thread_local_top() { return &thread_local_top_; } 883 884 TranscendentalCache* transcendental_cache() const { 885 return transcendental_cache_; 886 } 887 888 MemoryAllocator* memory_allocator() { 889 return memory_allocator_; 890 } 891 892 KeyedLookupCache* keyed_lookup_cache() { 893 return keyed_lookup_cache_; 894 } 895 896 ContextSlotCache* context_slot_cache() { 897 return context_slot_cache_; 898 } 899 900 DescriptorLookupCache* descriptor_lookup_cache() { 901 return descriptor_lookup_cache_; 902 } 903 904 v8::ImplementationUtilities::HandleScopeData* handle_scope_data() { 905 return &handle_scope_data_; 906 } 907 HandleScopeImplementer* handle_scope_implementer() { 908 ASSERT(handle_scope_implementer_); 909 return handle_scope_implementer_; 910 } 911 Zone* runtime_zone() { return &runtime_zone_; } 912 913 UnicodeCache* unicode_cache() { 914 return unicode_cache_; 915 } 916 917 InnerPointerToCodeCache* inner_pointer_to_code_cache() { 918 return inner_pointer_to_code_cache_; 919 } 920 921 ConsStringIteratorOp* write_iterator() { return write_iterator_; } 922 923 GlobalHandles* global_handles() { return global_handles_; } 924 925 EternalHandles* eternal_handles() { return eternal_handles_; } 926 927 ThreadManager* thread_manager() { return thread_manager_; } 928 929 ContextSwitcher* context_switcher() { return context_switcher_; } 930 931 void set_context_switcher(ContextSwitcher* switcher) { 932 context_switcher_ = switcher; 933 } 934 935 StringTracker* string_tracker() { return string_tracker_; } 936 937 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() { 938 return &jsregexp_uncanonicalize_; 939 } 940 941 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() { 942 return &jsregexp_canonrange_; 943 } 944 945 ConsStringIteratorOp* objects_string_compare_iterator_a() { 946 return &objects_string_compare_iterator_a_; 947 } 948 949 ConsStringIteratorOp* objects_string_compare_iterator_b() { 950 return &objects_string_compare_iterator_b_; 951 } 952 953 StaticResource<ConsStringIteratorOp>* objects_string_iterator() { 954 return &objects_string_iterator_; 955 } 956 957 RuntimeState* runtime_state() { return &runtime_state_; } 958 959 void set_fp_stubs_generated(bool value) { 960 fp_stubs_generated_ = value; 961 } 962 963 bool fp_stubs_generated() { return fp_stubs_generated_; } 964 965 Builtins* builtins() { return &builtins_; } 966 967 void NotifyExtensionInstalled() { 968 has_installed_extensions_ = true; 969 } 970 971 bool has_installed_extensions() { return has_installed_extensions_; } 972 973 unibrow::Mapping<unibrow::Ecma262Canonicalize>* 974 regexp_macro_assembler_canonicalize() { 975 return ®exp_macro_assembler_canonicalize_; 976 } 977 978 RegExpStack* regexp_stack() { return regexp_stack_; } 979 980 unibrow::Mapping<unibrow::Ecma262Canonicalize>* 981 interp_canonicalize_mapping() { 982 return &interp_canonicalize_mapping_; 983 } 984 985 void* PreallocatedStorageNew(size_t size); 986 void PreallocatedStorageDelete(void* p); 987 void PreallocatedStorageInit(size_t size); 988 989 #ifdef ENABLE_DEBUGGER_SUPPORT 990 Debugger* debugger() { 991 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); 992 return debugger_; 993 } 994 Debug* debug() { 995 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger(); 996 return debug_; 997 } 998 #endif 999 1000 inline bool IsDebuggerActive(); 1001 inline bool DebuggerHasBreakPoints(); 1002 1003 CpuProfiler* cpu_profiler() const { return cpu_profiler_; } 1004 HeapProfiler* heap_profiler() const { return heap_profiler_; } 1005 1006 #ifdef DEBUG 1007 HistogramInfo* heap_histograms() { return heap_histograms_; } 1008 1009 JSObject::SpillInformation* js_spill_information() { 1010 return &js_spill_information_; 1011 } 1012 1013 int* code_kind_statistics() { return code_kind_statistics_; } 1014 #endif 1015 1016 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ 1017 V8_TARGET_ARCH_MIPS && !defined(__mips__) 1018 bool simulator_initialized() { return simulator_initialized_; } 1019 void set_simulator_initialized(bool initialized) { 1020 simulator_initialized_ = initialized; 1021 } 1022 1023 HashMap* simulator_i_cache() { return simulator_i_cache_; } 1024 void set_simulator_i_cache(HashMap* hash_map) { 1025 simulator_i_cache_ = hash_map; 1026 } 1027 1028 Redirection* simulator_redirection() { 1029 return simulator_redirection_; 1030 } 1031 void set_simulator_redirection(Redirection* redirection) { 1032 simulator_redirection_ = redirection; 1033 } 1034 #endif 1035 1036 Factory* factory() { return reinterpret_cast<Factory*>(this); } 1037 1038 static const int kJSRegexpStaticOffsetsVectorSize = 128; 1039 1040 ExternalCallbackScope* external_callback_scope() { 1041 return thread_local_top_.external_callback_scope_; 1042 } 1043 void set_external_callback_scope(ExternalCallbackScope* scope) { 1044 thread_local_top_.external_callback_scope_ = scope; 1045 } 1046 1047 StateTag current_vm_state() { 1048 return thread_local_top_.current_vm_state_; 1049 } 1050 1051 void set_current_vm_state(StateTag state) { 1052 thread_local_top_.current_vm_state_ = state; 1053 } 1054 1055 void SetData(void* data) { embedder_data_ = data; } 1056 void* GetData() { return embedder_data_; } 1057 1058 LookupResult* top_lookup_result() { 1059 return thread_local_top_.top_lookup_result_; 1060 } 1061 void SetTopLookupResult(LookupResult* top) { 1062 thread_local_top_.top_lookup_result_ = top; 1063 } 1064 1065 bool initialized_from_snapshot() { return initialized_from_snapshot_; } 1066 1067 double time_millis_since_init() { 1068 return OS::TimeCurrentMillis() - time_millis_at_init_; 1069 } 1070 1071 DateCache* date_cache() { 1072 return date_cache_; 1073 } 1074 1075 void set_date_cache(DateCache* date_cache) { 1076 if (date_cache != date_cache_) { 1077 delete date_cache_; 1078 } 1079 date_cache_ = date_cache; 1080 } 1081 1082 Map* get_initial_js_array_map(ElementsKind kind); 1083 1084 bool IsFastArrayConstructorPrototypeChainIntact(); 1085 1086 CodeStubInterfaceDescriptor* 1087 code_stub_interface_descriptor(int index); 1088 1089 void IterateDeferredHandles(ObjectVisitor* visitor); 1090 void LinkDeferredHandles(DeferredHandles* deferred_handles); 1091 void UnlinkDeferredHandles(DeferredHandles* deferred_handles); 1092 1093 #ifdef DEBUG 1094 bool IsDeferredHandle(Object** location); 1095 #endif // DEBUG 1096 1097 OptimizingCompilerThread* optimizing_compiler_thread() { 1098 return &optimizing_compiler_thread_; 1099 } 1100 1101 // PreInits and returns a default isolate. Needed when a new thread tries 1102 // to create a Locker for the first time (the lock itself is in the isolate). 1103 // TODO(svenpanne) This method is on death row... 1104 static v8::Isolate* GetDefaultIsolateForLocking(); 1105 1106 MarkingThread** marking_threads() { 1107 return marking_thread_; 1108 } 1109 1110 SweeperThread** sweeper_threads() { 1111 return sweeper_thread_; 1112 } 1113 1114 CallbackTable* callback_table() { 1115 return callback_table_; 1116 } 1117 void set_callback_table(CallbackTable* callback_table) { 1118 callback_table_ = callback_table; 1119 } 1120 1121 int id() const { return static_cast<int>(id_); } 1122 1123 HStatistics* GetHStatistics(); 1124 HTracer* GetHTracer(); 1125 1126 FunctionEntryHook function_entry_hook() { return function_entry_hook_; } 1127 void set_function_entry_hook(FunctionEntryHook function_entry_hook) { 1128 function_entry_hook_ = function_entry_hook; 1129 } 1130 1131 void* stress_deopt_count_address() { return &stress_deopt_count_; } 1132 1133 // Given an address occupied by a live code object, return that object. 1134 Object* FindCodeObject(Address a); 1135 1136 private: 1137 Isolate(); 1138 1139 friend struct GlobalState; 1140 friend struct InitializeGlobalState; 1141 1142 enum State { 1143 UNINITIALIZED, // Some components may not have been allocated. 1144 INITIALIZED // All components are fully initialized. 1145 }; 1146 1147 // These fields are accessed through the API, offsets must be kept in sync 1148 // with v8::internal::Internals (in include/v8.h) constants. This is also 1149 // verified in Isolate::Init() using runtime checks. 1150 State state_; // Will be padded to kApiPointerSize. 1151 void* embedder_data_; 1152 Heap heap_; 1153 1154 // The per-process lock should be acquired before the ThreadDataTable is 1155 // modified. 1156 class ThreadDataTable { 1157 public: 1158 ThreadDataTable(); 1159 ~ThreadDataTable(); 1160 1161 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id); 1162 void Insert(PerIsolateThreadData* data); 1163 void Remove(Isolate* isolate, ThreadId thread_id); 1164 void Remove(PerIsolateThreadData* data); 1165 void RemoveAllThreads(Isolate* isolate); 1166 1167 private: 1168 PerIsolateThreadData* list_; 1169 }; 1170 1171 // These items form a stack synchronously with threads Enter'ing and Exit'ing 1172 // the Isolate. The top of the stack points to a thread which is currently 1173 // running the Isolate. When the stack is empty, the Isolate is considered 1174 // not entered by any thread and can be Disposed. 1175 // If the same thread enters the Isolate more then once, the entry_count_ 1176 // is incremented rather then a new item pushed to the stack. 1177 class EntryStackItem { 1178 public: 1179 EntryStackItem(PerIsolateThreadData* previous_thread_data, 1180 Isolate* previous_isolate, 1181 EntryStackItem* previous_item) 1182 : entry_count(1), 1183 previous_thread_data(previous_thread_data), 1184 previous_isolate(previous_isolate), 1185 previous_item(previous_item) { } 1186 1187 int entry_count; 1188 PerIsolateThreadData* previous_thread_data; 1189 Isolate* previous_isolate; 1190 EntryStackItem* previous_item; 1191 1192 private: 1193 DISALLOW_COPY_AND_ASSIGN(EntryStackItem); 1194 }; 1195 1196 // This mutex protects highest_thread_id_, thread_data_table_ and 1197 // default_isolate_. 1198 static Mutex* process_wide_mutex_; 1199 1200 static Thread::LocalStorageKey per_isolate_thread_data_key_; 1201 static Thread::LocalStorageKey isolate_key_; 1202 static Thread::LocalStorageKey thread_id_key_; 1203 static Isolate* default_isolate_; 1204 static ThreadDataTable* thread_data_table_; 1205 1206 // A global counter for all generated Isolates, might overflow. 1207 static Atomic32 isolate_counter_; 1208 1209 void Deinit(); 1210 1211 static void SetIsolateThreadLocals(Isolate* isolate, 1212 PerIsolateThreadData* data); 1213 1214 // Allocate and insert PerIsolateThreadData into the ThreadDataTable 1215 // (regardless of whether such data already exists). 1216 PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id); 1217 1218 // Find the PerThread for this particular (isolate, thread) combination. 1219 // If one does not yet exist, allocate a new one. 1220 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread(); 1221 1222 // Initializes the current thread to run this Isolate. 1223 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate 1224 // at the same time, this should be prevented using external locking. 1225 void Enter(); 1226 1227 // Exits the current thread. The previosuly entered Isolate is restored 1228 // for the thread. 1229 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate 1230 // at the same time, this should be prevented using external locking. 1231 void Exit(); 1232 1233 void PreallocatedMemoryThreadStart(); 1234 void PreallocatedMemoryThreadStop(); 1235 void InitializeThreadLocal(); 1236 1237 void PrintStackTrace(FILE* out, ThreadLocalTop* thread); 1238 void MarkCompactPrologue(bool is_compacting, 1239 ThreadLocalTop* archived_thread_data); 1240 void MarkCompactEpilogue(bool is_compacting, 1241 ThreadLocalTop* archived_thread_data); 1242 1243 void FillCache(); 1244 1245 void PropagatePendingExceptionToExternalTryCatch(); 1246 1247 void InitializeDebugger(); 1248 1249 // Traverse prototype chain to find out whether the object is derived from 1250 // the Error object. 1251 bool IsErrorObject(Handle<Object> obj); 1252 1253 Atomic32 id_; 1254 EntryStackItem* entry_stack_; 1255 int stack_trace_nesting_level_; 1256 StringStream* incomplete_message_; 1257 // The preallocated memory thread singleton. 1258 PreallocatedMemoryThread* preallocated_memory_thread_; 1259 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT 1260 NoAllocationStringAllocator* preallocated_message_space_; 1261 Bootstrapper* bootstrapper_; 1262 RuntimeProfiler* runtime_profiler_; 1263 CompilationCache* compilation_cache_; 1264 Counters* counters_; 1265 CodeRange* code_range_; 1266 Mutex* break_access_; 1267 Atomic32 debugger_initialized_; 1268 Mutex* debugger_access_; 1269 Logger* logger_; 1270 StackGuard stack_guard_; 1271 StatsTable* stats_table_; 1272 StubCache* stub_cache_; 1273 DeoptimizerData* deoptimizer_data_; 1274 ThreadLocalTop thread_local_top_; 1275 bool capture_stack_trace_for_uncaught_exceptions_; 1276 int stack_trace_for_uncaught_exceptions_frame_limit_; 1277 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_; 1278 TranscendentalCache* transcendental_cache_; 1279 MemoryAllocator* memory_allocator_; 1280 KeyedLookupCache* keyed_lookup_cache_; 1281 ContextSlotCache* context_slot_cache_; 1282 DescriptorLookupCache* descriptor_lookup_cache_; 1283 v8::ImplementationUtilities::HandleScopeData handle_scope_data_; 1284 HandleScopeImplementer* handle_scope_implementer_; 1285 UnicodeCache* unicode_cache_; 1286 Zone runtime_zone_; 1287 PreallocatedStorage in_use_list_; 1288 PreallocatedStorage free_list_; 1289 bool preallocated_storage_preallocated_; 1290 InnerPointerToCodeCache* inner_pointer_to_code_cache_; 1291 ConsStringIteratorOp* write_iterator_; 1292 GlobalHandles* global_handles_; 1293 EternalHandles* eternal_handles_; 1294 ContextSwitcher* context_switcher_; 1295 ThreadManager* thread_manager_; 1296 RuntimeState runtime_state_; 1297 bool fp_stubs_generated_; 1298 Builtins builtins_; 1299 bool has_installed_extensions_; 1300 StringTracker* string_tracker_; 1301 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_; 1302 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_; 1303 ConsStringIteratorOp objects_string_compare_iterator_a_; 1304 ConsStringIteratorOp objects_string_compare_iterator_b_; 1305 StaticResource<ConsStringIteratorOp> objects_string_iterator_; 1306 unibrow::Mapping<unibrow::Ecma262Canonicalize> 1307 regexp_macro_assembler_canonicalize_; 1308 RegExpStack* regexp_stack_; 1309 DateCache* date_cache_; 1310 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_; 1311 CodeStubInterfaceDescriptor* code_stub_interface_descriptors_; 1312 1313 // True if this isolate was initialized from a snapshot. 1314 bool initialized_from_snapshot_; 1315 1316 // Time stamp at initialization. 1317 double time_millis_at_init_; 1318 1319 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ 1320 V8_TARGET_ARCH_MIPS && !defined(__mips__) 1321 bool simulator_initialized_; 1322 HashMap* simulator_i_cache_; 1323 Redirection* simulator_redirection_; 1324 #endif 1325 1326 #ifdef DEBUG 1327 // A static array of histogram info for each type. 1328 HistogramInfo heap_histograms_[LAST_TYPE + 1]; 1329 JSObject::SpillInformation js_spill_information_; 1330 int code_kind_statistics_[Code::NUMBER_OF_KINDS]; 1331 #endif 1332 1333 #ifdef ENABLE_DEBUGGER_SUPPORT 1334 Debugger* debugger_; 1335 Debug* debug_; 1336 #endif 1337 CpuProfiler* cpu_profiler_; 1338 HeapProfiler* heap_profiler_; 1339 FunctionEntryHook function_entry_hook_; 1340 1341 #define GLOBAL_BACKING_STORE(type, name, initialvalue) \ 1342 type name##_; 1343 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE) 1344 #undef GLOBAL_BACKING_STORE 1345 1346 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \ 1347 type name##_[length]; 1348 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE) 1349 #undef GLOBAL_ARRAY_BACKING_STORE 1350 1351 #ifdef DEBUG 1352 // This class is huge and has a number of fields controlled by 1353 // preprocessor defines. Make sure the offsets of these fields agree 1354 // between compilation units. 1355 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ 1356 static const intptr_t name##_debug_offset_; 1357 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) 1358 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) 1359 #undef ISOLATE_FIELD_OFFSET 1360 #endif 1361 1362 DeferredHandles* deferred_handles_head_; 1363 OptimizingCompilerThread optimizing_compiler_thread_; 1364 MarkingThread** marking_thread_; 1365 SweeperThread** sweeper_thread_; 1366 CallbackTable* callback_table_; 1367 1368 // Counts deopt points if deopt_every_n_times is enabled. 1369 unsigned int stress_deopt_count_; 1370 1371 friend class ExecutionAccess; 1372 friend class HandleScopeImplementer; 1373 friend class IsolateInitializer; 1374 friend class MarkingThread; 1375 friend class OptimizingCompilerThread; 1376 friend class SweeperThread; 1377 friend class ThreadManager; 1378 friend class Simulator; 1379 friend class StackGuard; 1380 friend class ThreadId; 1381 friend class TestMemoryAllocatorScope; 1382 friend class TestCodeRangeScope; 1383 friend class v8::Isolate; 1384 friend class v8::Locker; 1385 friend class v8::Unlocker; 1386 1387 DISALLOW_COPY_AND_ASSIGN(Isolate); 1388 }; 1389 1390 1391 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the 1392 // class as a work around for a bug in the generated code found with these 1393 // versions of GCC. See V8 issue 122 for details. 1394 class SaveContext BASE_EMBEDDED { 1395 public: 1396 inline explicit SaveContext(Isolate* isolate); 1397 1398 ~SaveContext() { 1399 if (context_.is_null()) { 1400 Isolate* isolate = Isolate::Current(); 1401 isolate->set_context(NULL); 1402 isolate->set_save_context(prev_); 1403 } else { 1404 Isolate* isolate = context_->GetIsolate(); 1405 isolate->set_context(*context_); 1406 isolate->set_save_context(prev_); 1407 } 1408 } 1409 1410 Handle<Context> context() { return context_; } 1411 SaveContext* prev() { return prev_; } 1412 1413 // Returns true if this save context is below a given JavaScript frame. 1414 bool IsBelowFrame(JavaScriptFrame* frame) { 1415 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp()); 1416 } 1417 1418 private: 1419 Handle<Context> context_; 1420 #if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300 1421 Handle<Context> dummy_; 1422 #endif 1423 SaveContext* prev_; 1424 Address c_entry_fp_; 1425 }; 1426 1427 1428 class AssertNoContextChange BASE_EMBEDDED { 1429 #ifdef DEBUG 1430 public: 1431 AssertNoContextChange() : 1432 scope_(Isolate::Current()), 1433 context_(Isolate::Current()->context(), Isolate::Current()) { 1434 } 1435 1436 ~AssertNoContextChange() { 1437 ASSERT(Isolate::Current()->context() == *context_); 1438 } 1439 1440 private: 1441 HandleScope scope_; 1442 Handle<Context> context_; 1443 #else 1444 public: 1445 AssertNoContextChange() { } 1446 #endif 1447 }; 1448 1449 1450 class ExecutionAccess BASE_EMBEDDED { 1451 public: 1452 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) { 1453 Lock(isolate); 1454 } 1455 ~ExecutionAccess() { Unlock(isolate_); } 1456 1457 static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); } 1458 static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); } 1459 1460 static bool TryLock(Isolate* isolate) { 1461 return isolate->break_access_->TryLock(); 1462 } 1463 1464 private: 1465 Isolate* isolate_; 1466 }; 1467 1468 1469 // Support for checking for stack-overflows in C++ code. 1470 class StackLimitCheck BASE_EMBEDDED { 1471 public: 1472 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { } 1473 1474 bool HasOverflowed() const { 1475 StackGuard* stack_guard = isolate_->stack_guard(); 1476 return (reinterpret_cast<uintptr_t>(this) < stack_guard->real_climit()); 1477 } 1478 private: 1479 Isolate* isolate_; 1480 }; 1481 1482 1483 // Support for temporarily postponing interrupts. When the outermost 1484 // postpone scope is left the interrupts will be re-enabled and any 1485 // interrupts that occurred while in the scope will be taken into 1486 // account. 1487 class PostponeInterruptsScope BASE_EMBEDDED { 1488 public: 1489 explicit PostponeInterruptsScope(Isolate* isolate) 1490 : stack_guard_(isolate->stack_guard()) { 1491 stack_guard_->thread_local_.postpone_interrupts_nesting_++; 1492 stack_guard_->DisableInterrupts(); 1493 } 1494 1495 ~PostponeInterruptsScope() { 1496 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) { 1497 stack_guard_->EnableInterrupts(); 1498 } 1499 } 1500 private: 1501 StackGuard* stack_guard_; 1502 }; 1503 1504 1505 // Temporary macros for accessing current isolate and its subobjects. 1506 // They provide better readability, especially when used a lot in the code. 1507 #define HEAP (v8::internal::Isolate::Current()->heap()) 1508 #define ISOLATE (v8::internal::Isolate::Current()) 1509 1510 1511 // Tells whether the native context is marked with out of memory. 1512 inline bool Context::has_out_of_memory() { 1513 return native_context()->out_of_memory()->IsTrue(); 1514 } 1515 1516 1517 // Mark the native context with out of memory. 1518 inline void Context::mark_out_of_memory() { 1519 native_context()->set_out_of_memory(HEAP->true_value()); 1520 } 1521 1522 1523 } } // namespace v8::internal 1524 1525 #endif // V8_ISOLATE_H_ 1526