Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_ISOLATE_H_
      6 #define V8_ISOLATE_H_
      7 
      8 #include "include/v8-debug.h"
      9 #include "src/allocation.h"
     10 #include "src/assert-scope.h"
     11 #include "src/base/atomicops.h"
     12 #include "src/builtins.h"
     13 #include "src/contexts.h"
     14 #include "src/execution.h"
     15 #include "src/frames.h"
     16 #include "src/date.h"
     17 #include "src/global-handles.h"
     18 #include "src/handles.h"
     19 #include "src/hashmap.h"
     20 #include "src/heap.h"
     21 #include "src/optimizing-compiler-thread.h"
     22 #include "src/regexp-stack.h"
     23 #include "src/runtime-profiler.h"
     24 #include "src/runtime.h"
     25 #include "src/zone.h"
     26 
     27 namespace v8 {
     28 namespace internal {
     29 
     30 class Bootstrapper;
     31 struct CallInterfaceDescriptor;
     32 class CodeGenerator;
     33 class CodeRange;
     34 struct CodeStubInterfaceDescriptor;
     35 class CodeTracer;
     36 class CompilationCache;
     37 class ConsStringIteratorOp;
     38 class ContextSlotCache;
     39 class Counters;
     40 class CpuFeatures;
     41 class CpuProfiler;
     42 class DeoptimizerData;
     43 class Deserializer;
     44 class EmptyStatement;
     45 class ExternalCallbackScope;
     46 class ExternalReferenceTable;
     47 class Factory;
     48 class FunctionInfoListener;
     49 class HandleScopeImplementer;
     50 class HeapProfiler;
     51 class HStatistics;
     52 class HTracer;
     53 class InlineRuntimeFunctionsTable;
     54 class InnerPointerToCodeCache;
     55 class MaterializedObjectStore;
     56 class NoAllocationStringAllocator;
     57 class CodeAgingHelper;
     58 class RandomNumberGenerator;
     59 class RegExpStack;
     60 class SaveContext;
     61 class StringTracker;
     62 class StubCache;
     63 class SweeperThread;
     64 class ThreadManager;
     65 class ThreadState;
     66 class ThreadVisitor;  // Defined in v8threads.h
     67 class UnicodeCache;
     68 template <StateTag Tag> class VMState;
     69 
     70 // 'void function pointer', used to roundtrip the
     71 // ExternalReference::ExternalReferenceRedirector since we can not include
     72 // assembler.h, where it is defined, here.
     73 typedef void* ExternalReferenceRedirectorPointer();
     74 
     75 
     76 class Debug;
     77 class Debugger;
     78 
     79 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
     80     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
     81     !defined(__mips__) && V8_TARGET_ARCH_MIPS
     82 class Redirection;
     83 class Simulator;
     84 #endif
     85 
     86 
     87 // Static indirection table for handles to constants.  If a frame
     88 // element represents a constant, the data contains an index into
     89 // this table of handles to the actual constants.
     90 // Static indirection table for handles to constants.  If a Result
     91 // represents a constant, the data contains an index into this table
     92 // of handles to the actual constants.
     93 typedef ZoneList<Handle<Object> > ZoneObjectList;
     94 
     95 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
     96   do {                                                    \
     97     Isolate* __isolate__ = (isolate);                     \
     98     if (__isolate__->has_scheduled_exception()) {         \
     99       return __isolate__->PromoteScheduledException();    \
    100     }                                                     \
    101   } while (false)
    102 
    103 // Macros for MaybeHandle.
    104 
    105 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T)  \
    106   do {                                                       \
    107     Isolate* __isolate__ = (isolate);                        \
    108     if (__isolate__->has_scheduled_exception()) {            \
    109       __isolate__->PromoteScheduledException();              \
    110       return MaybeHandle<T>();                               \
    111     }                                                        \
    112   } while (false)
    113 
    114 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)  \
    115   do {                                                               \
    116     if (!(call).ToHandle(&dst)) {                                    \
    117       ASSERT((isolate)->has_pending_exception());                    \
    118       return value;                                                  \
    119     }                                                                \
    120   } while (false)
    121 
    122 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)  \
    123   ASSIGN_RETURN_ON_EXCEPTION_VALUE(                             \
    124       isolate, dst, call, isolate->heap()->exception())
    125 
    126 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)  \
    127   ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
    128 
    129 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)            \
    130   do {                                                             \
    131     if ((call).is_null()) {                                        \
    132       ASSERT((isolate)->has_pending_exception());                  \
    133       return value;                                                \
    134     }                                                              \
    135   } while (false)
    136 
    137 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call)  \
    138   RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
    139 
    140 #define RETURN_ON_EXCEPTION(isolate, call, T)  \
    141   RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
    142 
    143 
    144 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
    145   C(Handler, handler)                                   \
    146   C(CEntryFP, c_entry_fp)                               \
    147   C(Context, context)                                   \
    148   C(PendingException, pending_exception)                \
    149   C(ExternalCaughtException, external_caught_exception) \
    150   C(JSEntrySP, js_entry_sp)
    151 
    152 
    153 // Platform-independent, reliable thread identifier.
    154 class ThreadId {
    155  public:
    156   // Creates an invalid ThreadId.
    157   ThreadId() : id_(kInvalidId) {}
    158 
    159   // Returns ThreadId for current thread.
    160   static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
    161 
    162   // Returns invalid ThreadId (guaranteed not to be equal to any thread).
    163   static ThreadId Invalid() { return ThreadId(kInvalidId); }
    164 
    165   // Compares ThreadIds for equality.
    166   INLINE(bool Equals(const ThreadId& other) const) {
    167     return id_ == other.id_;
    168   }
    169 
    170   // Checks whether this ThreadId refers to any thread.
    171   INLINE(bool IsValid() const) {
    172     return id_ != kInvalidId;
    173   }
    174 
    175   // Converts ThreadId to an integer representation
    176   // (required for public API: V8::V8::GetCurrentThreadId).
    177   int ToInteger() const { return id_; }
    178 
    179   // Converts ThreadId to an integer representation
    180   // (required for public API: V8::V8::TerminateExecution).
    181   static ThreadId FromInteger(int id) { return ThreadId(id); }
    182 
    183  private:
    184   static const int kInvalidId = -1;
    185 
    186   explicit ThreadId(int id) : id_(id) {}
    187 
    188   static int AllocateThreadId();
    189 
    190   static int GetCurrentThreadId();
    191 
    192   int id_;
    193 
    194   static base::Atomic32 highest_thread_id_;
    195 
    196   friend class Isolate;
    197 };
    198 
    199 
    200 #define FIELD_ACCESSOR(type, name)                 \
    201   inline void set_##name(type v) { name##_ = v; }  \
    202   inline type name() const { return name##_; }
    203 
    204 
    205 class ThreadLocalTop BASE_EMBEDDED {
    206  public:
    207   // Does early low-level initialization that does not depend on the
    208   // isolate being present.
    209   ThreadLocalTop();
    210 
    211   // Initialize the thread data.
    212   void Initialize();
    213 
    214   // Get the top C++ try catch handler or NULL if none are registered.
    215   //
    216   // This method is not guaranteed to return an address that can be
    217   // used for comparison with addresses into the JS stack.  If such an
    218   // address is needed, use try_catch_handler_address.
    219   FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
    220 
    221   // Get the address of the top C++ try catch handler or NULL if
    222   // none are registered.
    223   //
    224   // This method always returns an address that can be compared to
    225   // pointers into the JavaScript stack.  When running on actual
    226   // hardware, try_catch_handler_address and TryCatchHandler return
    227   // the same pointer.  When running on a simulator with a separate JS
    228   // stack, try_catch_handler_address returns a JS stack address that
    229   // corresponds to the place on the JS stack where the C++ handler
    230   // would have been if the stack were not separate.
    231   Address try_catch_handler_address() {
    232     return reinterpret_cast<Address>(
    233         v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
    234   }
    235 
    236   void Free() {
    237     ASSERT(!has_pending_message_);
    238     ASSERT(!external_caught_exception_);
    239     ASSERT(try_catch_handler_ == NULL);
    240   }
    241 
    242   Isolate* isolate_;
    243   // The context where the current execution method is created and for variable
    244   // lookups.
    245   Context* context_;
    246   ThreadId thread_id_;
    247   Object* pending_exception_;
    248   bool has_pending_message_;
    249   bool rethrowing_message_;
    250   Object* pending_message_obj_;
    251   Object* pending_message_script_;
    252   int pending_message_start_pos_;
    253   int pending_message_end_pos_;
    254   // Use a separate value for scheduled exceptions to preserve the
    255   // invariants that hold about pending_exception.  We may want to
    256   // unify them later.
    257   Object* scheduled_exception_;
    258   bool external_caught_exception_;
    259   SaveContext* save_context_;
    260   v8::TryCatch* catcher_;
    261 
    262   // Stack.
    263   Address c_entry_fp_;  // the frame pointer of the top c entry frame
    264   Address handler_;   // try-blocks are chained through the stack
    265 
    266 #ifdef USE_SIMULATOR
    267   Simulator* simulator_;
    268 #endif
    269 
    270   Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
    271   // the external callback we're currently in
    272   ExternalCallbackScope* external_callback_scope_;
    273   StateTag current_vm_state_;
    274 
    275   // Generated code scratch locations.
    276   int32_t formal_count_;
    277 
    278   // Call back function to report unsafe JS accesses.
    279   v8::FailedAccessCheckCallback failed_access_check_callback_;
    280 
    281   // Head of the list of live LookupResults.
    282   LookupResult* top_lookup_result_;
    283 
    284  private:
    285   void InitializeInternal();
    286 
    287   v8::TryCatch* try_catch_handler_;
    288 };
    289 
    290 
    291 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
    292     V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
    293     V8_TARGET_ARCH_MIPS && !defined(__mips__)
    294 
    295 #define ISOLATE_INIT_SIMULATOR_LIST(V)                                         \
    296   V(bool, simulator_initialized, false)                                        \
    297   V(HashMap*, simulator_i_cache, NULL)                                         \
    298   V(Redirection*, simulator_redirection, NULL)
    299 #else
    300 
    301 #define ISOLATE_INIT_SIMULATOR_LIST(V)
    302 
    303 #endif
    304 
    305 
    306 #ifdef DEBUG
    307 
    308 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
    309   V(CommentStatistic, paged_space_comments_statistics,                         \
    310       CommentStatistic::kMaxComments + 1)                                      \
    311   V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
    312 #else
    313 
    314 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
    315 
    316 #endif
    317 
    318 #define ISOLATE_INIT_ARRAY_LIST(V)                                             \
    319   /* SerializerDeserializer state. */                                          \
    320   V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
    321   V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
    322   V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
    323   V(int, suffix_table, (kBMMaxShift + 1))                                      \
    324   V(uint32_t, private_random_seed, 2)                                          \
    325   ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
    326 
    327 typedef List<HeapObject*> DebugObjectCache;
    328 
    329 #define ISOLATE_INIT_LIST(V)                                                   \
    330   /* SerializerDeserializer state. */                                          \
    331   V(int, serialize_partial_snapshot_cache_length, 0)                           \
    332   V(int, serialize_partial_snapshot_cache_capacity, 0)                         \
    333   V(Object**, serialize_partial_snapshot_cache, NULL)                          \
    334   /* Assembler state. */                                                       \
    335   /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */    \
    336   V(byte*, assembler_spare_buffer, NULL)                                       \
    337   V(FatalErrorCallback, exception_behavior, NULL)                              \
    338   V(LogEventCallback, event_logger, NULL)                                      \
    339   V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL)     \
    340   /* To distinguish the function templates, so that we can find them in the */ \
    341   /* function cache of the native context. */                                  \
    342   V(int, next_serial_number, 0)                                                \
    343   V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
    344   /* Part of the state of liveedit. */                                         \
    345   V(FunctionInfoListener*, active_function_info_listener, NULL)                \
    346   /* State for Relocatable. */                                                 \
    347   V(Relocatable*, relocatable_top, NULL)                                       \
    348   V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
    349   V(Object*, string_stream_current_security_token, NULL)                       \
    350   /* Serializer state. */                                                      \
    351   V(ExternalReferenceTable*, external_reference_table, NULL)                   \
    352   /* AstNode state. */                                                         \
    353   V(int, ast_node_id, 0)                                                       \
    354   V(unsigned, ast_node_count, 0)                                               \
    355   V(int, pending_microtask_count, 0)                                           \
    356   V(bool, autorun_microtasks, true)                                            \
    357   V(HStatistics*, hstatistics, NULL)                                           \
    358   V(HTracer*, htracer, NULL)                                                   \
    359   V(CodeTracer*, code_tracer, NULL)                                            \
    360   V(bool, fp_stubs_generated, false)                                           \
    361   V(int, max_available_threads, 0)                                             \
    362   V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                            \
    363   V(InterruptCallback, api_interrupt_callback, NULL)                           \
    364   V(void*, api_interrupt_callback_data, NULL)                                  \
    365   ISOLATE_INIT_SIMULATOR_LIST(V)
    366 
    367 #define THREAD_LOCAL_TOP_ACCESSOR(type, name)                        \
    368   inline void set_##name(type v) { thread_local_top_.name##_ = v; }  \
    369   inline type name() const { return thread_local_top_.name##_; }
    370 
    371 
    372 class Isolate {
    373   // These forward declarations are required to make the friend declarations in
    374   // PerIsolateThreadData work on some older versions of gcc.
    375   class ThreadDataTable;
    376   class EntryStackItem;
    377  public:
    378   ~Isolate();
    379 
    380   // A thread has a PerIsolateThreadData instance for each isolate that it has
    381   // entered. That instance is allocated when the isolate is initially entered
    382   // and reused on subsequent entries.
    383   class PerIsolateThreadData {
    384    public:
    385     PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
    386         : isolate_(isolate),
    387           thread_id_(thread_id),
    388           stack_limit_(0),
    389           thread_state_(NULL),
    390 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
    391     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
    392     !defined(__mips__) && V8_TARGET_ARCH_MIPS
    393           simulator_(NULL),
    394 #endif
    395           next_(NULL),
    396           prev_(NULL) { }
    397     ~PerIsolateThreadData();
    398     Isolate* isolate() const { return isolate_; }
    399     ThreadId thread_id() const { return thread_id_; }
    400 
    401     FIELD_ACCESSOR(uintptr_t, stack_limit)
    402     FIELD_ACCESSOR(ThreadState*, thread_state)
    403 
    404 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
    405     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
    406     !defined(__mips__) && V8_TARGET_ARCH_MIPS
    407     FIELD_ACCESSOR(Simulator*, simulator)
    408 #endif
    409 
    410     bool Matches(Isolate* isolate, ThreadId thread_id) const {
    411       return isolate_ == isolate && thread_id_.Equals(thread_id);
    412     }
    413 
    414    private:
    415     Isolate* isolate_;
    416     ThreadId thread_id_;
    417     uintptr_t stack_limit_;
    418     ThreadState* thread_state_;
    419 
    420 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
    421     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
    422     !defined(__mips__) && V8_TARGET_ARCH_MIPS
    423     Simulator* simulator_;
    424 #endif
    425 
    426     PerIsolateThreadData* next_;
    427     PerIsolateThreadData* prev_;
    428 
    429     friend class Isolate;
    430     friend class ThreadDataTable;
    431     friend class EntryStackItem;
    432 
    433     DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
    434   };
    435 
    436 
    437   enum AddressId {
    438 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
    439     FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
    440 #undef DECLARE_ENUM
    441     kIsolateAddressCount
    442   };
    443 
    444   // Returns the PerIsolateThreadData for the current thread (or NULL if one is
    445   // not currently set).
    446   static PerIsolateThreadData* CurrentPerIsolateThreadData() {
    447     return reinterpret_cast<PerIsolateThreadData*>(
    448         Thread::GetThreadLocal(per_isolate_thread_data_key_));
    449   }
    450 
    451   // Returns the isolate inside which the current thread is running.
    452   INLINE(static Isolate* Current()) {
    453     Isolate* isolate = reinterpret_cast<Isolate*>(
    454         Thread::GetExistingThreadLocal(isolate_key_));
    455     ASSERT(isolate != NULL);
    456     return isolate;
    457   }
    458 
    459   INLINE(static Isolate* UncheckedCurrent()) {
    460     return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
    461   }
    462 
    463   // Usually called by Init(), but can be called early e.g. to allow
    464   // testing components that require logging but not the whole
    465   // isolate.
    466   //
    467   // Safe to call more than once.
    468   void InitializeLoggingAndCounters();
    469 
    470   bool Init(Deserializer* des);
    471 
    472   bool IsInitialized() { return state_ == INITIALIZED; }
    473 
    474   // True if at least one thread Enter'ed this isolate.
    475   bool IsInUse() { return entry_stack_ != NULL; }
    476 
    477   // Destroys the non-default isolates.
    478   // Sets default isolate into "has_been_disposed" state rather then destroying,
    479   // for legacy API reasons.
    480   void TearDown();
    481 
    482   static void GlobalTearDown();
    483 
    484   static void SetCrashIfDefaultIsolateInitialized();
    485   // Ensures that process-wide resources and the default isolate have been
    486   // allocated. It is only necessary to call this method in rare cases, for
    487   // example if you are using V8 from within the body of a static initializer.
    488   // Safe to call multiple times.
    489   static void EnsureDefaultIsolate();
    490 
    491   // Find the PerThread for this particular (isolate, thread) combination
    492   // If one does not yet exist, return null.
    493   PerIsolateThreadData* FindPerThreadDataForThisThread();
    494 
    495   // Find the PerThread for given (isolate, thread) combination
    496   // If one does not yet exist, return null.
    497   PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
    498 
    499   // Returns the key used to store the pointer to the current isolate.
    500   // Used internally for V8 threads that do not execute JavaScript but still
    501   // are part of the domain of an isolate (like the context switcher).
    502   static Thread::LocalStorageKey isolate_key() {
    503     return isolate_key_;
    504   }
    505 
    506   // Returns the key used to store process-wide thread IDs.
    507   static Thread::LocalStorageKey thread_id_key() {
    508     return thread_id_key_;
    509   }
    510 
    511   static Thread::LocalStorageKey per_isolate_thread_data_key();
    512 
    513   // Mutex for serializing access to break control structures.
    514   RecursiveMutex* break_access() { return &break_access_; }
    515 
    516   Address get_address_from_id(AddressId id);
    517 
    518   // Access to top context (where the current function object was created).
    519   Context* context() { return thread_local_top_.context_; }
    520   void set_context(Context* context) {
    521     ASSERT(context == NULL || context->IsContext());
    522     thread_local_top_.context_ = context;
    523   }
    524   Context** context_address() { return &thread_local_top_.context_; }
    525 
    526   THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
    527 
    528   // Access to current thread id.
    529   THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
    530 
    531   // Interface to pending exception.
    532   Object* pending_exception() {
    533     ASSERT(has_pending_exception());
    534     ASSERT(!thread_local_top_.pending_exception_->IsException());
    535     return thread_local_top_.pending_exception_;
    536   }
    537 
    538   void set_pending_exception(Object* exception_obj) {
    539     ASSERT(!exception_obj->IsException());
    540     thread_local_top_.pending_exception_ = exception_obj;
    541   }
    542 
    543   void clear_pending_exception() {
    544     ASSERT(!thread_local_top_.pending_exception_->IsException());
    545     thread_local_top_.pending_exception_ = heap_.the_hole_value();
    546   }
    547 
    548   Object** pending_exception_address() {
    549     return &thread_local_top_.pending_exception_;
    550   }
    551 
    552   bool has_pending_exception() {
    553     ASSERT(!thread_local_top_.pending_exception_->IsException());
    554     return !thread_local_top_.pending_exception_->IsTheHole();
    555   }
    556 
    557   THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
    558 
    559   void clear_pending_message() {
    560     thread_local_top_.has_pending_message_ = false;
    561     thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
    562     thread_local_top_.pending_message_script_ = heap_.the_hole_value();
    563   }
    564   v8::TryCatch* try_catch_handler() {
    565     return thread_local_top_.try_catch_handler();
    566   }
    567   Address try_catch_handler_address() {
    568     return thread_local_top_.try_catch_handler_address();
    569   }
    570   bool* external_caught_exception_address() {
    571     return &thread_local_top_.external_caught_exception_;
    572   }
    573 
    574   THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
    575 
    576   Object** scheduled_exception_address() {
    577     return &thread_local_top_.scheduled_exception_;
    578   }
    579 
    580   Address pending_message_obj_address() {
    581     return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
    582   }
    583 
    584   Address has_pending_message_address() {
    585     return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
    586   }
    587 
    588   Address pending_message_script_address() {
    589     return reinterpret_cast<Address>(
    590         &thread_local_top_.pending_message_script_);
    591   }
    592 
    593   Object* scheduled_exception() {
    594     ASSERT(has_scheduled_exception());
    595     ASSERT(!thread_local_top_.scheduled_exception_->IsException());
    596     return thread_local_top_.scheduled_exception_;
    597   }
    598   bool has_scheduled_exception() {
    599     ASSERT(!thread_local_top_.scheduled_exception_->IsException());
    600     return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
    601   }
    602   void clear_scheduled_exception() {
    603     ASSERT(!thread_local_top_.scheduled_exception_->IsException());
    604     thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
    605   }
    606 
    607   bool HasExternalTryCatch();
    608   bool IsFinallyOnTop();
    609 
    610   bool is_catchable_by_javascript(Object* exception) {
    611     return exception != heap()->termination_exception();
    612   }
    613 
    614   // Serializer.
    615   void PushToPartialSnapshotCache(Object* obj);
    616 
    617   // JS execution stack (see frames.h).
    618   static Address c_entry_fp(ThreadLocalTop* thread) {
    619     return thread->c_entry_fp_;
    620   }
    621   static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
    622 
    623   inline Address* c_entry_fp_address() {
    624     return &thread_local_top_.c_entry_fp_;
    625   }
    626   inline Address* handler_address() { return &thread_local_top_.handler_; }
    627 
    628   // Bottom JS entry.
    629   Address js_entry_sp() {
    630     return thread_local_top_.js_entry_sp_;
    631   }
    632   inline Address* js_entry_sp_address() {
    633     return &thread_local_top_.js_entry_sp_;
    634   }
    635 
    636   // Generated code scratch locations.
    637   void* formal_count_address() { return &thread_local_top_.formal_count_; }
    638 
    639   // Returns the global object of the current context. It could be
    640   // a builtin object, or a JS global object.
    641   Handle<GlobalObject> global_object() {
    642     return Handle<GlobalObject>(context()->global_object());
    643   }
    644 
    645   // Returns the global proxy object of the current context.
    646   Object* global_proxy() {
    647     return context()->global_proxy();
    648   }
    649 
    650   Handle<JSBuiltinsObject> js_builtins_object() {
    651     return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
    652   }
    653 
    654   static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
    655   void FreeThreadResources() { thread_local_top_.Free(); }
    656 
    657   // This method is called by the api after operations that may throw
    658   // exceptions.  If an exception was thrown and not handled by an external
    659   // handler the exception is scheduled to be rethrown when we return to running
    660   // JavaScript code.  If an exception is scheduled true is returned.
    661   bool OptionalRescheduleException(bool is_bottom_call);
    662 
    663   class ExceptionScope {
    664    public:
    665     explicit ExceptionScope(Isolate* isolate) :
    666       // Scope currently can only be used for regular exceptions,
    667       // not termination exception.
    668       isolate_(isolate),
    669       pending_exception_(isolate_->pending_exception(), isolate_),
    670       catcher_(isolate_->catcher())
    671     { }
    672 
    673     ~ExceptionScope() {
    674       isolate_->set_catcher(catcher_);
    675       isolate_->set_pending_exception(*pending_exception_);
    676     }
    677 
    678    private:
    679     Isolate* isolate_;
    680     Handle<Object> pending_exception_;
    681     v8::TryCatch* catcher_;
    682   };
    683 
    684   void SetCaptureStackTraceForUncaughtExceptions(
    685       bool capture,
    686       int frame_limit,
    687       StackTrace::StackTraceOptions options);
    688 
    689   void PrintCurrentStackTrace(FILE* out);
    690   void PrintStack(StringStream* accumulator);
    691   void PrintStack(FILE* out);
    692   Handle<String> StackTraceString();
    693   NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
    694                                       Object* object,
    695                                       Map* map,
    696                                       unsigned int magic2));
    697   Handle<JSArray> CaptureCurrentStackTrace(
    698       int frame_limit,
    699       StackTrace::StackTraceOptions options);
    700 
    701   Handle<JSArray> CaptureSimpleStackTrace(Handle<JSObject> error_object,
    702                                           Handle<Object> caller,
    703                                           int limit);
    704   void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
    705 
    706   // Returns if the top context may access the given global object. If
    707   // the result is false, the pending exception is guaranteed to be
    708   // set.
    709 
    710   bool MayNamedAccess(Handle<JSObject> receiver,
    711                       Handle<Object> key,
    712                       v8::AccessType type);
    713   bool MayIndexedAccess(Handle<JSObject> receiver,
    714                         uint32_t index,
    715                         v8::AccessType type);
    716 
    717   void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
    718   void ReportFailedAccessCheck(Handle<JSObject> receiver, v8::AccessType type);
    719 
    720   // Exception throwing support. The caller should use the result
    721   // of Throw() as its return value.
    722   Object* Throw(Object* exception, MessageLocation* location = NULL);
    723 
    724   template <typename T>
    725   MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
    726                                        MessageLocation* location = NULL) {
    727     Throw(*exception, location);
    728     return MaybeHandle<T>();
    729   }
    730 
    731   // Re-throw an exception.  This involves no error reporting since
    732   // error reporting was handled when the exception was thrown
    733   // originally.
    734   Object* ReThrow(Object* exception);
    735   void ScheduleThrow(Object* exception);
    736   // Re-set pending message, script and positions reported to the TryCatch
    737   // back to the TLS for re-use when rethrowing.
    738   void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
    739   void ReportPendingMessages();
    740   // Return pending location if any or unfilled structure.
    741   MessageLocation GetMessageLocation();
    742   Object* ThrowIllegalOperation();
    743   Object* ThrowInvalidStringLength();
    744 
    745   // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
    746   Object* PromoteScheduledException();
    747   void DoThrow(Object* exception, MessageLocation* location);
    748   // Checks if exception should be reported and finds out if it's
    749   // caught externally.
    750   bool ShouldReportException(bool* can_be_caught_externally,
    751                              bool catchable_by_javascript);
    752 
    753   // Attempts to compute the current source location, storing the
    754   // result in the target out parameter.
    755   void ComputeLocation(MessageLocation* target);
    756 
    757   // Out of resource exception helpers.
    758   Object* StackOverflow();
    759   Object* TerminateExecution();
    760   void CancelTerminateExecution();
    761 
    762   void InvokeApiInterruptCallback();
    763 
    764   // Administration
    765   void Iterate(ObjectVisitor* v);
    766   void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
    767   char* Iterate(ObjectVisitor* v, char* t);
    768   void IterateThread(ThreadVisitor* v, char* t);
    769 
    770 
    771   // Returns the current native and global context.
    772   Handle<Context> native_context();
    773   Handle<Context> global_context();
    774 
    775   // Returns the native context of the calling JavaScript code.  That
    776   // is, the native context of the top-most JavaScript frame.
    777   Handle<Context> GetCallingNativeContext();
    778 
    779   void RegisterTryCatchHandler(v8::TryCatch* that);
    780   void UnregisterTryCatchHandler(v8::TryCatch* that);
    781 
    782   char* ArchiveThread(char* to);
    783   char* RestoreThread(char* from);
    784 
    785   static const char* const kStackOverflowMessage;
    786 
    787   static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
    788   static const int kBMMaxShift = 250;        // See StringSearchBase.
    789 
    790   // Accessors.
    791 #define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
    792   inline type name() const {                                            \
    793     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
    794     return name##_;                                                     \
    795   }                                                                     \
    796   inline void set_##name(type value) {                                  \
    797     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
    798     name##_ = value;                                                    \
    799   }
    800   ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
    801 #undef GLOBAL_ACCESSOR
    802 
    803 #define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
    804   inline type* name() {                                                 \
    805     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
    806     return &(name##_)[0];                                               \
    807   }
    808   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
    809 #undef GLOBAL_ARRAY_ACCESSOR
    810 
    811 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)            \
    812   Handle<type> name() {                                             \
    813     return Handle<type>(context()->native_context()->name(), this); \
    814   }                                                                 \
    815   bool is_##name(type* value) {                                     \
    816     return context()->native_context()->is_##name(value);           \
    817   }
    818   NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
    819 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
    820 
    821   Bootstrapper* bootstrapper() { return bootstrapper_; }
    822   Counters* counters() {
    823     // Call InitializeLoggingAndCounters() if logging is needed before
    824     // the isolate is fully initialized.
    825     ASSERT(counters_ != NULL);
    826     return counters_;
    827   }
    828   CodeRange* code_range() { return code_range_; }
    829   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
    830   CompilationCache* compilation_cache() { return compilation_cache_; }
    831   Logger* logger() {
    832     // Call InitializeLoggingAndCounters() if logging is needed before
    833     // the isolate is fully initialized.
    834     ASSERT(logger_ != NULL);
    835     return logger_;
    836   }
    837   StackGuard* stack_guard() { return &stack_guard_; }
    838   Heap* heap() { return &heap_; }
    839   StatsTable* stats_table();
    840   StubCache* stub_cache() { return stub_cache_; }
    841   CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
    842   DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
    843   ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
    844   MaterializedObjectStore* materialized_object_store() {
    845     return materialized_object_store_;
    846   }
    847 
    848   MemoryAllocator* memory_allocator() {
    849     return memory_allocator_;
    850   }
    851 
    852   KeyedLookupCache* keyed_lookup_cache() {
    853     return keyed_lookup_cache_;
    854   }
    855 
    856   ContextSlotCache* context_slot_cache() {
    857     return context_slot_cache_;
    858   }
    859 
    860   DescriptorLookupCache* descriptor_lookup_cache() {
    861     return descriptor_lookup_cache_;
    862   }
    863 
    864   HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
    865 
    866   HandleScopeImplementer* handle_scope_implementer() {
    867     ASSERT(handle_scope_implementer_);
    868     return handle_scope_implementer_;
    869   }
    870   Zone* runtime_zone() { return &runtime_zone_; }
    871 
    872   UnicodeCache* unicode_cache() {
    873     return unicode_cache_;
    874   }
    875 
    876   InnerPointerToCodeCache* inner_pointer_to_code_cache() {
    877     return inner_pointer_to_code_cache_;
    878   }
    879 
    880   ConsStringIteratorOp* write_iterator() { return write_iterator_; }
    881 
    882   GlobalHandles* global_handles() { return global_handles_; }
    883 
    884   EternalHandles* eternal_handles() { return eternal_handles_; }
    885 
    886   ThreadManager* thread_manager() { return thread_manager_; }
    887 
    888   StringTracker* string_tracker() { return string_tracker_; }
    889 
    890   unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
    891     return &jsregexp_uncanonicalize_;
    892   }
    893 
    894   unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
    895     return &jsregexp_canonrange_;
    896   }
    897 
    898   ConsStringIteratorOp* objects_string_compare_iterator_a() {
    899     return &objects_string_compare_iterator_a_;
    900   }
    901 
    902   ConsStringIteratorOp* objects_string_compare_iterator_b() {
    903     return &objects_string_compare_iterator_b_;
    904   }
    905 
    906   StaticResource<ConsStringIteratorOp>* objects_string_iterator() {
    907     return &objects_string_iterator_;
    908   }
    909 
    910   RuntimeState* runtime_state() { return &runtime_state_; }
    911 
    912   Builtins* builtins() { return &builtins_; }
    913 
    914   void NotifyExtensionInstalled() {
    915     has_installed_extensions_ = true;
    916   }
    917 
    918   bool has_installed_extensions() { return has_installed_extensions_; }
    919 
    920   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
    921       regexp_macro_assembler_canonicalize() {
    922     return &regexp_macro_assembler_canonicalize_;
    923   }
    924 
    925   RegExpStack* regexp_stack() { return regexp_stack_; }
    926 
    927   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
    928       interp_canonicalize_mapping() {
    929     return &interp_canonicalize_mapping_;
    930   }
    931 
    932   Debug* debug() { return debug_; }
    933 
    934   inline bool DebuggerHasBreakPoints();
    935 
    936   CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
    937   HeapProfiler* heap_profiler() const { return heap_profiler_; }
    938 
    939 #ifdef DEBUG
    940   HistogramInfo* heap_histograms() { return heap_histograms_; }
    941 
    942   JSObject::SpillInformation* js_spill_information() {
    943     return &js_spill_information_;
    944   }
    945 #endif
    946 
    947   Factory* factory() { return reinterpret_cast<Factory*>(this); }
    948 
    949   static const int kJSRegexpStaticOffsetsVectorSize = 128;
    950 
    951   THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
    952 
    953   THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
    954 
    955   void SetData(uint32_t slot, void* data) {
    956     ASSERT(slot < Internals::kNumIsolateDataSlots);
    957     embedder_data_[slot] = data;
    958   }
    959   void* GetData(uint32_t slot) {
    960     ASSERT(slot < Internals::kNumIsolateDataSlots);
    961     return embedder_data_[slot];
    962   }
    963 
    964   THREAD_LOCAL_TOP_ACCESSOR(LookupResult*, top_lookup_result)
    965 
    966   void enable_serializer() {
    967     // The serializer can only be enabled before the isolate init.
    968     ASSERT(state_ != INITIALIZED);
    969     serializer_enabled_ = true;
    970   }
    971 
    972   bool serializer_enabled() const { return serializer_enabled_; }
    973 
    974   bool IsDead() { return has_fatal_error_; }
    975   void SignalFatalError() { has_fatal_error_ = true; }
    976 
    977   bool use_crankshaft() const;
    978 
    979   bool initialized_from_snapshot() { return initialized_from_snapshot_; }
    980 
    981   double time_millis_since_init() {
    982     return OS::TimeCurrentMillis() - time_millis_at_init_;
    983   }
    984 
    985   DateCache* date_cache() {
    986     return date_cache_;
    987   }
    988 
    989   void set_date_cache(DateCache* date_cache) {
    990     if (date_cache != date_cache_) {
    991       delete date_cache_;
    992     }
    993     date_cache_ = date_cache;
    994   }
    995 
    996   Map* get_initial_js_array_map(ElementsKind kind);
    997 
    998   bool IsFastArrayConstructorPrototypeChainIntact();
    999 
   1000   CodeStubInterfaceDescriptor*
   1001       code_stub_interface_descriptor(int index);
   1002 
   1003   enum CallDescriptorKey {
   1004     KeyedCall,
   1005     NamedCall,
   1006     CallHandler,
   1007     ArgumentAdaptorCall,
   1008     ApiFunctionCall,
   1009     NUMBER_OF_CALL_DESCRIPTORS
   1010   };
   1011 
   1012   CallInterfaceDescriptor* call_descriptor(CallDescriptorKey index);
   1013 
   1014   void IterateDeferredHandles(ObjectVisitor* visitor);
   1015   void LinkDeferredHandles(DeferredHandles* deferred_handles);
   1016   void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
   1017 
   1018 #ifdef DEBUG
   1019   bool IsDeferredHandle(Object** location);
   1020 #endif  // DEBUG
   1021 
   1022   bool concurrent_recompilation_enabled() {
   1023     // Thread is only available with flag enabled.
   1024     ASSERT(optimizing_compiler_thread_ == NULL ||
   1025            FLAG_concurrent_recompilation);
   1026     return optimizing_compiler_thread_ != NULL;
   1027   }
   1028 
   1029   bool concurrent_osr_enabled() const {
   1030     // Thread is only available with flag enabled.
   1031     ASSERT(optimizing_compiler_thread_ == NULL ||
   1032            FLAG_concurrent_recompilation);
   1033     return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
   1034   }
   1035 
   1036   OptimizingCompilerThread* optimizing_compiler_thread() {
   1037     return optimizing_compiler_thread_;
   1038   }
   1039 
   1040   int num_sweeper_threads() const {
   1041     return num_sweeper_threads_;
   1042   }
   1043 
   1044   SweeperThread** sweeper_threads() {
   1045     return sweeper_thread_;
   1046   }
   1047 
   1048   int id() const { return static_cast<int>(id_); }
   1049 
   1050   HStatistics* GetHStatistics();
   1051   HTracer* GetHTracer();
   1052   CodeTracer* GetCodeTracer();
   1053 
   1054   FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
   1055   void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
   1056     function_entry_hook_ = function_entry_hook;
   1057   }
   1058 
   1059   void* stress_deopt_count_address() { return &stress_deopt_count_; }
   1060 
   1061   inline RandomNumberGenerator* random_number_generator();
   1062 
   1063   // Given an address occupied by a live code object, return that object.
   1064   Object* FindCodeObject(Address a);
   1065 
   1066   int NextOptimizationId() {
   1067     int id = next_optimization_id_++;
   1068     if (!Smi::IsValid(next_optimization_id_)) {
   1069       next_optimization_id_ = 0;
   1070     }
   1071     return id;
   1072   }
   1073 
   1074   // Get (and lazily initialize) the registry for per-isolate symbols.
   1075   Handle<JSObject> GetSymbolRegistry();
   1076 
   1077   void AddCallCompletedCallback(CallCompletedCallback callback);
   1078   void RemoveCallCompletedCallback(CallCompletedCallback callback);
   1079   void FireCallCompletedCallback();
   1080 
   1081   void EnqueueMicrotask(Handle<Object> microtask);
   1082   void RunMicrotasks();
   1083 
   1084  private:
   1085   Isolate();
   1086 
   1087   friend struct GlobalState;
   1088   friend struct InitializeGlobalState;
   1089 
   1090   enum State {
   1091     UNINITIALIZED,    // Some components may not have been allocated.
   1092     INITIALIZED       // All components are fully initialized.
   1093   };
   1094 
   1095   // These fields are accessed through the API, offsets must be kept in sync
   1096   // with v8::internal::Internals (in include/v8.h) constants. This is also
   1097   // verified in Isolate::Init() using runtime checks.
   1098   void* embedder_data_[Internals::kNumIsolateDataSlots];
   1099   Heap heap_;
   1100   State state_;  // Will be padded to kApiPointerSize.
   1101 
   1102   // The per-process lock should be acquired before the ThreadDataTable is
   1103   // modified.
   1104   class ThreadDataTable {
   1105    public:
   1106     ThreadDataTable();
   1107     ~ThreadDataTable();
   1108 
   1109     PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
   1110     void Insert(PerIsolateThreadData* data);
   1111     void Remove(PerIsolateThreadData* data);
   1112     void RemoveAllThreads(Isolate* isolate);
   1113 
   1114    private:
   1115     PerIsolateThreadData* list_;
   1116   };
   1117 
   1118   // These items form a stack synchronously with threads Enter'ing and Exit'ing
   1119   // the Isolate. The top of the stack points to a thread which is currently
   1120   // running the Isolate. When the stack is empty, the Isolate is considered
   1121   // not entered by any thread and can be Disposed.
   1122   // If the same thread enters the Isolate more then once, the entry_count_
   1123   // is incremented rather then a new item pushed to the stack.
   1124   class EntryStackItem {
   1125    public:
   1126     EntryStackItem(PerIsolateThreadData* previous_thread_data,
   1127                    Isolate* previous_isolate,
   1128                    EntryStackItem* previous_item)
   1129         : entry_count(1),
   1130           previous_thread_data(previous_thread_data),
   1131           previous_isolate(previous_isolate),
   1132           previous_item(previous_item) { }
   1133 
   1134     int entry_count;
   1135     PerIsolateThreadData* previous_thread_data;
   1136     Isolate* previous_isolate;
   1137     EntryStackItem* previous_item;
   1138 
   1139    private:
   1140     DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
   1141   };
   1142 
   1143   // This mutex protects highest_thread_id_ and thread_data_table_.
   1144   static Mutex process_wide_mutex_;
   1145 
   1146   static Thread::LocalStorageKey per_isolate_thread_data_key_;
   1147   static Thread::LocalStorageKey isolate_key_;
   1148   static Thread::LocalStorageKey thread_id_key_;
   1149   static ThreadDataTable* thread_data_table_;
   1150 
   1151   // A global counter for all generated Isolates, might overflow.
   1152   static base::Atomic32 isolate_counter_;
   1153 
   1154   void Deinit();
   1155 
   1156   static void SetIsolateThreadLocals(Isolate* isolate,
   1157                                      PerIsolateThreadData* data);
   1158 
   1159   // Find the PerThread for this particular (isolate, thread) combination.
   1160   // If one does not yet exist, allocate a new one.
   1161   PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
   1162 
   1163   // Initializes the current thread to run this Isolate.
   1164   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
   1165   // at the same time, this should be prevented using external locking.
   1166   void Enter();
   1167 
   1168   // Exits the current thread. The previosuly entered Isolate is restored
   1169   // for the thread.
   1170   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
   1171   // at the same time, this should be prevented using external locking.
   1172   void Exit();
   1173 
   1174   void InitializeThreadLocal();
   1175 
   1176   void MarkCompactPrologue(bool is_compacting,
   1177                            ThreadLocalTop* archived_thread_data);
   1178   void MarkCompactEpilogue(bool is_compacting,
   1179                            ThreadLocalTop* archived_thread_data);
   1180 
   1181   void FillCache();
   1182 
   1183   // Propagate pending exception message to the v8::TryCatch.
   1184   // If there is no external try-catch or message was successfully propagated,
   1185   // then return true.
   1186   bool PropagatePendingExceptionToExternalTryCatch();
   1187 
   1188   // Traverse prototype chain to find out whether the object is derived from
   1189   // the Error object.
   1190   bool IsErrorObject(Handle<Object> obj);
   1191 
   1192   base::Atomic32 id_;
   1193   EntryStackItem* entry_stack_;
   1194   int stack_trace_nesting_level_;
   1195   StringStream* incomplete_message_;
   1196   Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
   1197   Bootstrapper* bootstrapper_;
   1198   RuntimeProfiler* runtime_profiler_;
   1199   CompilationCache* compilation_cache_;
   1200   Counters* counters_;
   1201   CodeRange* code_range_;
   1202   RecursiveMutex break_access_;
   1203   base::Atomic32 debugger_initialized_;
   1204   Logger* logger_;
   1205   StackGuard stack_guard_;
   1206   StatsTable* stats_table_;
   1207   StubCache* stub_cache_;
   1208   CodeAgingHelper* code_aging_helper_;
   1209   DeoptimizerData* deoptimizer_data_;
   1210   MaterializedObjectStore* materialized_object_store_;
   1211   ThreadLocalTop thread_local_top_;
   1212   bool capture_stack_trace_for_uncaught_exceptions_;
   1213   int stack_trace_for_uncaught_exceptions_frame_limit_;
   1214   StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
   1215   MemoryAllocator* memory_allocator_;
   1216   KeyedLookupCache* keyed_lookup_cache_;
   1217   ContextSlotCache* context_slot_cache_;
   1218   DescriptorLookupCache* descriptor_lookup_cache_;
   1219   HandleScopeData handle_scope_data_;
   1220   HandleScopeImplementer* handle_scope_implementer_;
   1221   UnicodeCache* unicode_cache_;
   1222   Zone runtime_zone_;
   1223   InnerPointerToCodeCache* inner_pointer_to_code_cache_;
   1224   ConsStringIteratorOp* write_iterator_;
   1225   GlobalHandles* global_handles_;
   1226   EternalHandles* eternal_handles_;
   1227   ThreadManager* thread_manager_;
   1228   RuntimeState runtime_state_;
   1229   Builtins builtins_;
   1230   bool has_installed_extensions_;
   1231   StringTracker* string_tracker_;
   1232   unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
   1233   unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
   1234   ConsStringIteratorOp objects_string_compare_iterator_a_;
   1235   ConsStringIteratorOp objects_string_compare_iterator_b_;
   1236   StaticResource<ConsStringIteratorOp> objects_string_iterator_;
   1237   unibrow::Mapping<unibrow::Ecma262Canonicalize>
   1238       regexp_macro_assembler_canonicalize_;
   1239   RegExpStack* regexp_stack_;
   1240   DateCache* date_cache_;
   1241   unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
   1242   CodeStubInterfaceDescriptor* code_stub_interface_descriptors_;
   1243   CallInterfaceDescriptor* call_descriptors_;
   1244   RandomNumberGenerator* random_number_generator_;
   1245 
   1246   // Whether the isolate has been created for snapshotting.
   1247   bool serializer_enabled_;
   1248 
   1249   // True if fatal error has been signaled for this isolate.
   1250   bool has_fatal_error_;
   1251 
   1252   // True if this isolate was initialized from a snapshot.
   1253   bool initialized_from_snapshot_;
   1254 
   1255   // Time stamp at initialization.
   1256   double time_millis_at_init_;
   1257 
   1258 #ifdef DEBUG
   1259   // A static array of histogram info for each type.
   1260   HistogramInfo heap_histograms_[LAST_TYPE + 1];
   1261   JSObject::SpillInformation js_spill_information_;
   1262 #endif
   1263 
   1264   Debug* debug_;
   1265   CpuProfiler* cpu_profiler_;
   1266   HeapProfiler* heap_profiler_;
   1267   FunctionEntryHook function_entry_hook_;
   1268 
   1269 #define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
   1270   type name##_;
   1271   ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
   1272 #undef GLOBAL_BACKING_STORE
   1273 
   1274 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
   1275   type name##_[length];
   1276   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
   1277 #undef GLOBAL_ARRAY_BACKING_STORE
   1278 
   1279 #ifdef DEBUG
   1280   // This class is huge and has a number of fields controlled by
   1281   // preprocessor defines. Make sure the offsets of these fields agree
   1282   // between compilation units.
   1283 #define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
   1284   static const intptr_t name##_debug_offset_;
   1285   ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
   1286   ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
   1287 #undef ISOLATE_FIELD_OFFSET
   1288 #endif
   1289 
   1290   DeferredHandles* deferred_handles_head_;
   1291   OptimizingCompilerThread* optimizing_compiler_thread_;
   1292   SweeperThread** sweeper_thread_;
   1293   int num_sweeper_threads_;
   1294 
   1295   // Counts deopt points if deopt_every_n_times is enabled.
   1296   unsigned int stress_deopt_count_;
   1297 
   1298   int next_optimization_id_;
   1299 
   1300   // List of callbacks when a Call completes.
   1301   List<CallCompletedCallback> call_completed_callbacks_;
   1302 
   1303   friend class ExecutionAccess;
   1304   friend class HandleScopeImplementer;
   1305   friend class IsolateInitializer;
   1306   friend class OptimizingCompilerThread;
   1307   friend class SweeperThread;
   1308   friend class ThreadManager;
   1309   friend class Simulator;
   1310   friend class StackGuard;
   1311   friend class ThreadId;
   1312   friend class TestMemoryAllocatorScope;
   1313   friend class TestCodeRangeScope;
   1314   friend class v8::Isolate;
   1315   friend class v8::Locker;
   1316   friend class v8::Unlocker;
   1317 
   1318   DISALLOW_COPY_AND_ASSIGN(Isolate);
   1319 };
   1320 
   1321 
   1322 #undef FIELD_ACCESSOR
   1323 #undef THREAD_LOCAL_TOP_ACCESSOR
   1324 
   1325 
   1326 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
   1327 // class as a work around for a bug in the generated code found with these
   1328 // versions of GCC. See V8 issue 122 for details.
   1329 class SaveContext BASE_EMBEDDED {
   1330  public:
   1331   inline explicit SaveContext(Isolate* isolate);
   1332 
   1333   ~SaveContext() {
   1334     isolate_->set_context(context_.is_null() ? NULL : *context_);
   1335     isolate_->set_save_context(prev_);
   1336   }
   1337 
   1338   Handle<Context> context() { return context_; }
   1339   SaveContext* prev() { return prev_; }
   1340 
   1341   // Returns true if this save context is below a given JavaScript frame.
   1342   bool IsBelowFrame(JavaScriptFrame* frame) {
   1343     return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
   1344   }
   1345 
   1346  private:
   1347   Isolate* isolate_;
   1348   Handle<Context> context_;
   1349   SaveContext* prev_;
   1350   Address c_entry_fp_;
   1351 };
   1352 
   1353 
   1354 class AssertNoContextChange BASE_EMBEDDED {
   1355 #ifdef DEBUG
   1356  public:
   1357   explicit AssertNoContextChange(Isolate* isolate)
   1358     : isolate_(isolate),
   1359       context_(isolate->context(), isolate) { }
   1360   ~AssertNoContextChange() {
   1361     ASSERT(isolate_->context() == *context_);
   1362   }
   1363 
   1364  private:
   1365   Isolate* isolate_;
   1366   Handle<Context> context_;
   1367 #else
   1368  public:
   1369   explicit AssertNoContextChange(Isolate* isolate) { }
   1370 #endif
   1371 };
   1372 
   1373 
   1374 class ExecutionAccess BASE_EMBEDDED {
   1375  public:
   1376   explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
   1377     Lock(isolate);
   1378   }
   1379   ~ExecutionAccess() { Unlock(isolate_); }
   1380 
   1381   static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
   1382   static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
   1383 
   1384   static bool TryLock(Isolate* isolate) {
   1385     return isolate->break_access()->TryLock();
   1386   }
   1387 
   1388  private:
   1389   Isolate* isolate_;
   1390 };
   1391 
   1392 
   1393 // Support for checking for stack-overflows.
   1394 class StackLimitCheck BASE_EMBEDDED {
   1395  public:
   1396   explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
   1397 
   1398   // Use this to check for stack-overflows in C++ code.
   1399   inline bool HasOverflowed() const {
   1400     StackGuard* stack_guard = isolate_->stack_guard();
   1401     return reinterpret_cast<uintptr_t>(this) < stack_guard->real_climit();
   1402   }
   1403 
   1404   // Use this to check for stack-overflow when entering runtime from JS code.
   1405   bool JsHasOverflowed() const;
   1406 
   1407  private:
   1408   Isolate* isolate_;
   1409 };
   1410 
   1411 
   1412 // Support for temporarily postponing interrupts. When the outermost
   1413 // postpone scope is left the interrupts will be re-enabled and any
   1414 // interrupts that occurred while in the scope will be taken into
   1415 // account.
   1416 class PostponeInterruptsScope BASE_EMBEDDED {
   1417  public:
   1418   explicit PostponeInterruptsScope(Isolate* isolate)
   1419       : stack_guard_(isolate->stack_guard()), isolate_(isolate) {
   1420     ExecutionAccess access(isolate_);
   1421     stack_guard_->thread_local_.postpone_interrupts_nesting_++;
   1422     stack_guard_->DisableInterrupts();
   1423   }
   1424 
   1425   ~PostponeInterruptsScope() {
   1426     ExecutionAccess access(isolate_);
   1427     if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
   1428       stack_guard_->EnableInterrupts();
   1429     }
   1430   }
   1431  private:
   1432   StackGuard* stack_guard_;
   1433   Isolate* isolate_;
   1434 };
   1435 
   1436 
   1437 class CodeTracer V8_FINAL : public Malloced {
   1438  public:
   1439   explicit CodeTracer(int isolate_id)
   1440       : file_(NULL),
   1441         scope_depth_(0) {
   1442     if (!ShouldRedirect()) {
   1443       file_ = stdout;
   1444       return;
   1445     }
   1446 
   1447     if (FLAG_redirect_code_traces_to == NULL) {
   1448       SNPrintF(filename_,
   1449                "code-%d-%d.asm",
   1450                OS::GetCurrentProcessId(),
   1451                isolate_id);
   1452     } else {
   1453       StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
   1454     }
   1455 
   1456     WriteChars(filename_.start(), "", 0, false);
   1457   }
   1458 
   1459   class Scope {
   1460    public:
   1461     explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
   1462     ~Scope() { tracer_->CloseFile();  }
   1463 
   1464     FILE* file() const { return tracer_->file(); }
   1465 
   1466    private:
   1467     CodeTracer* tracer_;
   1468   };
   1469 
   1470   void OpenFile() {
   1471     if (!ShouldRedirect()) {
   1472       return;
   1473     }
   1474 
   1475     if (file_ == NULL) {
   1476       file_ = OS::FOpen(filename_.start(), "a");
   1477     }
   1478 
   1479     scope_depth_++;
   1480   }
   1481 
   1482   void CloseFile() {
   1483     if (!ShouldRedirect()) {
   1484       return;
   1485     }
   1486 
   1487     if (--scope_depth_ == 0) {
   1488       fclose(file_);
   1489       file_ = NULL;
   1490     }
   1491   }
   1492 
   1493   FILE* file() const { return file_; }
   1494 
   1495  private:
   1496   static bool ShouldRedirect() {
   1497     return FLAG_redirect_code_traces;
   1498   }
   1499 
   1500   EmbeddedVector<char, 128> filename_;
   1501   FILE* file_;
   1502   int scope_depth_;
   1503 };
   1504 
   1505 } }  // namespace v8::internal
   1506 
   1507 #endif  // V8_ISOLATE_H_
   1508