Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_H_
      6 #define V8_HEAP_H_
      7 
      8 #include <cmath>
      9 
     10 #include "src/allocation.h"
     11 #include "src/assert-scope.h"
     12 #include "src/counters.h"
     13 #include "src/globals.h"
     14 #include "src/incremental-marking.h"
     15 #include "src/list.h"
     16 #include "src/mark-compact.h"
     17 #include "src/objects-visiting.h"
     18 #include "src/spaces.h"
     19 #include "src/splay-tree-inl.h"
     20 #include "src/store-buffer.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 // Defines all the roots in Heap.
     26 #define STRONG_ROOT_LIST(V)                                                    \
     27   V(Map, byte_array_map, ByteArrayMap)                                         \
     28   V(Map, free_space_map, FreeSpaceMap)                                         \
     29   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
     30   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
     31   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
     32   V(Smi, store_buffer_top, StoreBufferTop)                                     \
     33   V(Oddball, undefined_value, UndefinedValue)                                  \
     34   V(Oddball, the_hole_value, TheHoleValue)                                     \
     35   V(Oddball, null_value, NullValue)                                            \
     36   V(Oddball, true_value, TrueValue)                                            \
     37   V(Oddball, false_value, FalseValue)                                          \
     38   V(Oddball, uninitialized_value, UninitializedValue)                          \
     39   V(Oddball, exception, Exception)                                             \
     40   V(Map, cell_map, CellMap)                                                    \
     41   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
     42   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
     43   V(Map, meta_map, MetaMap)                                                    \
     44   V(Map, heap_number_map, HeapNumberMap)                                       \
     45   V(Map, native_context_map, NativeContextMap)                                 \
     46   V(Map, fixed_array_map, FixedArrayMap)                                       \
     47   V(Map, code_map, CodeMap)                                                    \
     48   V(Map, scope_info_map, ScopeInfoMap)                                         \
     49   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
     50   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
     51   V(Map, constant_pool_array_map, ConstantPoolArrayMap)                        \
     52   V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
     53   V(Map, hash_table_map, HashTableMap)                                         \
     54   V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
     55   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
     56   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
     57   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
     58   V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray)      \
     59   V(Oddball, arguments_marker, ArgumentsMarker)                                \
     60   /* The roots above this line should be boring from a GC point of view.    */ \
     61   /* This means they are never in new space and never on a page that is     */ \
     62   /* being compacted.                                                       */ \
     63   V(FixedArray, number_string_cache, NumberStringCache)                        \
     64   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
     65   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
     66   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
     67   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
     68   V(FixedArray, string_split_cache, StringSplitCache)                          \
     69   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
     70   V(Oddball, termination_exception, TerminationException)                      \
     71   V(Smi, hash_seed, HashSeed)                                                  \
     72   V(Map, symbol_map, SymbolMap)                                                \
     73   V(Map, string_map, StringMap)                                                \
     74   V(Map, ascii_string_map, AsciiStringMap)                                     \
     75   V(Map, cons_string_map, ConsStringMap)                                       \
     76   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
     77   V(Map, sliced_string_map, SlicedStringMap)                                   \
     78   V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
     79   V(Map, external_string_map, ExternalStringMap)                               \
     80   V(Map,                                                                       \
     81     external_string_with_one_byte_data_map,                                    \
     82     ExternalStringWithOneByteDataMap)                                          \
     83   V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
     84   V(Map, short_external_string_map, ShortExternalStringMap)                    \
     85   V(Map,                                                                       \
     86     short_external_string_with_one_byte_data_map,                              \
     87     ShortExternalStringWithOneByteDataMap)                                     \
     88   V(Map, internalized_string_map, InternalizedStringMap)                       \
     89   V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap)            \
     90   V(Map,                                                                       \
     91     external_internalized_string_map,                                          \
     92     ExternalInternalizedStringMap)                                             \
     93   V(Map,                                                                       \
     94     external_internalized_string_with_one_byte_data_map,                       \
     95     ExternalInternalizedStringWithOneByteDataMap)                              \
     96   V(Map,                                                                       \
     97     external_ascii_internalized_string_map,                                    \
     98     ExternalAsciiInternalizedStringMap)                                        \
     99   V(Map,                                                                       \
    100     short_external_internalized_string_map,                                    \
    101     ShortExternalInternalizedStringMap)                                        \
    102   V(Map,                                                                       \
    103     short_external_internalized_string_with_one_byte_data_map,                 \
    104     ShortExternalInternalizedStringWithOneByteDataMap)                         \
    105   V(Map,                                                                       \
    106     short_external_ascii_internalized_string_map,                              \
    107     ShortExternalAsciiInternalizedStringMap)                                   \
    108   V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
    109   V(Map, undetectable_string_map, UndetectableStringMap)                       \
    110   V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
    111   V(Map, external_int8_array_map, ExternalInt8ArrayMap)                        \
    112   V(Map, external_uint8_array_map, ExternalUint8ArrayMap)                      \
    113   V(Map, external_int16_array_map, ExternalInt16ArrayMap)                      \
    114   V(Map, external_uint16_array_map, ExternalUint16ArrayMap)                    \
    115   V(Map, external_int32_array_map, ExternalInt32ArrayMap)                      \
    116   V(Map, external_uint32_array_map, ExternalUint32ArrayMap)                    \
    117   V(Map, external_float32_array_map, ExternalFloat32ArrayMap)                  \
    118   V(Map, external_float64_array_map, ExternalFloat64ArrayMap)                  \
    119   V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap)       \
    120   V(ExternalArray, empty_external_int8_array,                                  \
    121       EmptyExternalInt8Array)                                                  \
    122   V(ExternalArray, empty_external_uint8_array,                                 \
    123       EmptyExternalUint8Array)                                                 \
    124   V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array)        \
    125   V(ExternalArray, empty_external_uint16_array,                                \
    126       EmptyExternalUint16Array)                                                \
    127   V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array)        \
    128   V(ExternalArray, empty_external_uint32_array,                                \
    129       EmptyExternalUint32Array)                                                \
    130   V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array)    \
    131   V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array)    \
    132   V(ExternalArray, empty_external_uint8_clamped_array,                         \
    133       EmptyExternalUint8ClampedArray)                                          \
    134   V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
    135   V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
    136   V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
    137   V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
    138   V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
    139   V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
    140   V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
    141   V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
    142   V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
    143   V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
    144   V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
    145   V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
    146   V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
    147   V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
    148   V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
    149   V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
    150   V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
    151   V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
    152       EmptyFixedUint8ClampedArray)                                             \
    153   V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
    154   V(Map, function_context_map, FunctionContextMap)                             \
    155   V(Map, catch_context_map, CatchContextMap)                                   \
    156   V(Map, with_context_map, WithContextMap)                                     \
    157   V(Map, block_context_map, BlockContextMap)                                   \
    158   V(Map, module_context_map, ModuleContextMap)                                 \
    159   V(Map, global_context_map, GlobalContextMap)                                 \
    160   V(Map, undefined_map, UndefinedMap)                                          \
    161   V(Map, the_hole_map, TheHoleMap)                                             \
    162   V(Map, null_map, NullMap)                                                    \
    163   V(Map, boolean_map, BooleanMap)                                              \
    164   V(Map, uninitialized_map, UninitializedMap)                                  \
    165   V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
    166   V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
    167   V(Map, exception_map, ExceptionMap)                                          \
    168   V(Map, termination_exception_map, TerminationExceptionMap)                   \
    169   V(Map, message_object_map, JSMessageObjectMap)                               \
    170   V(Map, foreign_map, ForeignMap)                                              \
    171   V(HeapNumber, nan_value, NanValue)                                           \
    172   V(HeapNumber, infinity_value, InfinityValue)                                 \
    173   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
    174   V(Map, neander_map, NeanderMap)                                              \
    175   V(JSObject, message_listeners, MessageListeners)                             \
    176   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
    177   V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
    178   V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
    179   V(Code, js_entry_code, JsEntryCode)                                          \
    180   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
    181   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
    182   V(Script, empty_script, EmptyScript)                                         \
    183   V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
    184   V(Cell, undefined_cell, UndefineCell)                                        \
    185   V(JSObject, observation_state, ObservationState)                             \
    186   V(Map, external_map, ExternalMap)                                            \
    187   V(Object, symbol_registry, SymbolRegistry)                                   \
    188   V(Symbol, frozen_symbol, FrozenSymbol)                                       \
    189   V(Symbol, nonexistent_symbol, NonExistentSymbol)                             \
    190   V(Symbol, elements_transition_symbol, ElementsTransitionSymbol)              \
    191   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
    192       EmptySlowElementDictionary)                                              \
    193   V(Symbol, observed_symbol, ObservedSymbol)                                   \
    194   V(Symbol, uninitialized_symbol, UninitializedSymbol)                         \
    195   V(Symbol, megamorphic_symbol, MegamorphicSymbol)                             \
    196   V(FixedArray, materialized_objects, MaterializedObjects)                     \
    197   V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad)        \
    198   V(FixedArray, microtask_queue, MicrotaskQueue)
    199 
    200 // Entries in this list are limited to Smis and are not visited during GC.
    201 #define SMI_ROOT_LIST(V)                                                       \
    202   V(Smi, stack_limit, StackLimit)                                              \
    203   V(Smi, real_stack_limit, RealStackLimit)                                     \
    204   V(Smi, last_script_id, LastScriptId)                                         \
    205   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
    206   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
    207   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
    208   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
    209 
    210 #define ROOT_LIST(V)                                  \
    211   STRONG_ROOT_LIST(V)                                 \
    212   SMI_ROOT_LIST(V)                                    \
    213   V(StringTable, string_table, StringTable)
    214 
    215 // Heap roots that are known to be immortal immovable, for which we can safely
    216 // skip write barriers.
    217 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V)   \
    218   V(byte_array_map)                       \
    219   V(free_space_map)                       \
    220   V(one_pointer_filler_map)               \
    221   V(two_pointer_filler_map)               \
    222   V(undefined_value)                      \
    223   V(the_hole_value)                       \
    224   V(null_value)                           \
    225   V(true_value)                           \
    226   V(false_value)                          \
    227   V(uninitialized_value)                  \
    228   V(cell_map)                             \
    229   V(global_property_cell_map)             \
    230   V(shared_function_info_map)             \
    231   V(meta_map)                             \
    232   V(heap_number_map)                      \
    233   V(native_context_map)                   \
    234   V(fixed_array_map)                      \
    235   V(code_map)                             \
    236   V(scope_info_map)                       \
    237   V(fixed_cow_array_map)                  \
    238   V(fixed_double_array_map)               \
    239   V(constant_pool_array_map)              \
    240   V(no_interceptor_result_sentinel)       \
    241   V(hash_table_map)                       \
    242   V(ordered_hash_table_map)               \
    243   V(empty_fixed_array)                    \
    244   V(empty_byte_array)                     \
    245   V(empty_descriptor_array)               \
    246   V(empty_constant_pool_array)            \
    247   V(arguments_marker)                     \
    248   V(symbol_map)                           \
    249   V(sloppy_arguments_elements_map)        \
    250   V(function_context_map)                 \
    251   V(catch_context_map)                    \
    252   V(with_context_map)                     \
    253   V(block_context_map)                    \
    254   V(module_context_map)                   \
    255   V(global_context_map)                   \
    256   V(undefined_map)                        \
    257   V(the_hole_map)                         \
    258   V(null_map)                             \
    259   V(boolean_map)                          \
    260   V(uninitialized_map)                    \
    261   V(message_object_map)                   \
    262   V(foreign_map)                          \
    263   V(neander_map)
    264 
    265 #define INTERNALIZED_STRING_LIST(V)                                      \
    266   V(Array_string, "Array")                                               \
    267   V(Object_string, "Object")                                             \
    268   V(proto_string, "__proto__")                                           \
    269   V(arguments_string, "arguments")                                       \
    270   V(Arguments_string, "Arguments")                                       \
    271   V(call_string, "call")                                                 \
    272   V(apply_string, "apply")                                               \
    273   V(caller_string, "caller")                                             \
    274   V(boolean_string, "boolean")                                           \
    275   V(Boolean_string, "Boolean")                                           \
    276   V(callee_string, "callee")                                             \
    277   V(constructor_string, "constructor")                                   \
    278   V(dot_result_string, ".result")                                        \
    279   V(dot_for_string, ".for.")                                             \
    280   V(dot_iterable_string, ".iterable")                                    \
    281   V(dot_iterator_string, ".iterator")                                    \
    282   V(dot_generator_object_string, ".generator_object")                    \
    283   V(eval_string, "eval")                                                 \
    284   V(empty_string, "")                                                    \
    285   V(function_string, "function")                                         \
    286   V(length_string, "length")                                             \
    287   V(module_string, "module")                                             \
    288   V(name_string, "name")                                                 \
    289   V(native_string, "native")                                             \
    290   V(null_string, "null")                                                 \
    291   V(number_string, "number")                                             \
    292   V(Number_string, "Number")                                             \
    293   V(nan_string, "NaN")                                                   \
    294   V(RegExp_string, "RegExp")                                             \
    295   V(source_string, "source")                                             \
    296   V(global_string, "global")                                             \
    297   V(ignore_case_string, "ignoreCase")                                    \
    298   V(multiline_string, "multiline")                                       \
    299   V(input_string, "input")                                               \
    300   V(index_string, "index")                                               \
    301   V(last_index_string, "lastIndex")                                      \
    302   V(object_string, "object")                                             \
    303   V(literals_string, "literals")                                         \
    304   V(prototype_string, "prototype")                                       \
    305   V(string_string, "string")                                             \
    306   V(String_string, "String")                                             \
    307   V(symbol_string, "symbol")                                             \
    308   V(Symbol_string, "Symbol")                                             \
    309   V(for_string, "for")                                                   \
    310   V(for_api_string, "for_api")                                           \
    311   V(for_intern_string, "for_intern")                                     \
    312   V(private_api_string, "private_api")                                   \
    313   V(private_intern_string, "private_intern")                             \
    314   V(Date_string, "Date")                                                 \
    315   V(this_string, "this")                                                 \
    316   V(to_string_string, "toString")                                        \
    317   V(char_at_string, "CharAt")                                            \
    318   V(undefined_string, "undefined")                                       \
    319   V(value_of_string, "valueOf")                                          \
    320   V(stack_string, "stack")                                               \
    321   V(toJSON_string, "toJSON")                                             \
    322   V(InitializeVarGlobal_string, "InitializeVarGlobal")                   \
    323   V(InitializeConstGlobal_string, "InitializeConstGlobal")               \
    324   V(KeyedLoadElementMonomorphic_string,                                  \
    325     "KeyedLoadElementMonomorphic")                                       \
    326   V(KeyedStoreElementMonomorphic_string,                                 \
    327     "KeyedStoreElementMonomorphic")                                      \
    328   V(stack_overflow_string, "kStackOverflowBoilerplate")                  \
    329   V(illegal_access_string, "illegal access")                             \
    330   V(get_string, "get")                                                   \
    331   V(set_string, "set")                                                   \
    332   V(map_field_string, "%map")                                            \
    333   V(elements_field_string, "%elements")                                  \
    334   V(length_field_string, "%length")                                      \
    335   V(cell_value_string, "%cell_value")                                    \
    336   V(function_class_string, "Function")                                   \
    337   V(illegal_argument_string, "illegal argument")                         \
    338   V(MakeReferenceError_string, "MakeReferenceError")                     \
    339   V(MakeSyntaxError_string, "MakeSyntaxError")                           \
    340   V(MakeTypeError_string, "MakeTypeError")                               \
    341   V(unknown_label_string, "unknown_label")                               \
    342   V(space_string, " ")                                                   \
    343   V(exec_string, "exec")                                                 \
    344   V(zero_string, "0")                                                    \
    345   V(global_eval_string, "GlobalEval")                                    \
    346   V(identity_hash_string, "v8::IdentityHash")                            \
    347   V(closure_string, "(closure)")                                         \
    348   V(use_strict_string, "use strict")                                     \
    349   V(dot_string, ".")                                                     \
    350   V(anonymous_function_string, "(anonymous function)")                   \
    351   V(compare_ic_string, "==")                                             \
    352   V(strict_compare_ic_string, "===")                                     \
    353   V(infinity_string, "Infinity")                                         \
    354   V(minus_infinity_string, "-Infinity")                                  \
    355   V(hidden_stack_trace_string, "v8::hidden_stack_trace")                 \
    356   V(query_colon_string, "(?:)")                                          \
    357   V(Generator_string, "Generator")                                       \
    358   V(throw_string, "throw")                                               \
    359   V(done_string, "done")                                                 \
    360   V(value_string, "value")                                               \
    361   V(next_string, "next")                                                 \
    362   V(byte_length_string, "byteLength")                                    \
    363   V(byte_offset_string, "byteOffset")                                    \
    364   V(buffer_string, "buffer")                                             \
    365   V(intl_initialized_marker_string, "v8::intl_initialized_marker")       \
    366   V(intl_impl_object_string, "v8::intl_object")
    367 
    368 // Forward declarations.
    369 class GCTracer;
    370 class HeapStats;
    371 class Isolate;
    372 class WeakObjectRetainer;
    373 
    374 
    375 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
    376                                                       Object** pointer);
    377 
    378 class StoreBufferRebuilder {
    379  public:
    380   explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
    381       : store_buffer_(store_buffer) {
    382   }
    383 
    384   void Callback(MemoryChunk* page, StoreBufferEvent event);
    385 
    386  private:
    387   StoreBuffer* store_buffer_;
    388 
    389   // We record in this variable how full the store buffer was when we started
    390   // iterating over the current page, finding pointers to new space.  If the
    391   // store buffer overflows again we can exempt the page from the store buffer
    392   // by rewinding to this point instead of having to search the store buffer.
    393   Object*** start_of_current_page_;
    394   // The current page we are scanning in the store buffer iterator.
    395   MemoryChunk* current_page_;
    396 };
    397 
    398 
    399 
    400 // A queue of objects promoted during scavenge. Each object is accompanied
    401 // by it's size to avoid dereferencing a map pointer for scanning.
    402 class PromotionQueue {
    403  public:
    404   explicit PromotionQueue(Heap* heap)
    405       : front_(NULL),
    406         rear_(NULL),
    407         limit_(NULL),
    408         emergency_stack_(0),
    409         heap_(heap) { }
    410 
    411   void Initialize();
    412 
    413   void Destroy() {
    414     ASSERT(is_empty());
    415     delete emergency_stack_;
    416     emergency_stack_ = NULL;
    417   }
    418 
    419   inline void ActivateGuardIfOnTheSamePage();
    420 
    421   Page* GetHeadPage() {
    422     return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
    423   }
    424 
    425   void SetNewLimit(Address limit) {
    426     if (!guard_) {
    427       return;
    428     }
    429 
    430     ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
    431     limit_ = reinterpret_cast<intptr_t*>(limit);
    432 
    433     if (limit_ <= rear_) {
    434       return;
    435     }
    436 
    437     RelocateQueueHead();
    438   }
    439 
    440   bool is_empty() {
    441     return (front_ == rear_) &&
    442         (emergency_stack_ == NULL || emergency_stack_->length() == 0);
    443   }
    444 
    445   inline void insert(HeapObject* target, int size);
    446 
    447   void remove(HeapObject** target, int* size) {
    448     ASSERT(!is_empty());
    449     if (front_ == rear_) {
    450       Entry e = emergency_stack_->RemoveLast();
    451       *target = e.obj_;
    452       *size = e.size_;
    453       return;
    454     }
    455 
    456     if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
    457       NewSpacePage* front_page =
    458           NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
    459       ASSERT(!front_page->prev_page()->is_anchor());
    460       front_ =
    461           reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
    462     }
    463     *target = reinterpret_cast<HeapObject*>(*(--front_));
    464     *size = static_cast<int>(*(--front_));
    465     // Assert no underflow.
    466     SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
    467                                 reinterpret_cast<Address>(front_));
    468   }
    469 
    470  private:
    471   // The front of the queue is higher in the memory page chain than the rear.
    472   intptr_t* front_;
    473   intptr_t* rear_;
    474   intptr_t* limit_;
    475 
    476   bool guard_;
    477 
    478   static const int kEntrySizeInWords = 2;
    479 
    480   struct Entry {
    481     Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
    482 
    483     HeapObject* obj_;
    484     int size_;
    485   };
    486   List<Entry>* emergency_stack_;
    487 
    488   Heap* heap_;
    489 
    490   void RelocateQueueHead();
    491 
    492   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
    493 };
    494 
    495 
    496 typedef void (*ScavengingCallback)(Map* map,
    497                                    HeapObject** slot,
    498                                    HeapObject* object);
    499 
    500 
    501 // External strings table is a place where all external strings are
    502 // registered.  We need to keep track of such strings to properly
    503 // finalize them.
    504 class ExternalStringTable {
    505  public:
    506   // Registers an external string.
    507   inline void AddString(String* string);
    508 
    509   inline void Iterate(ObjectVisitor* v);
    510 
    511   // Restores internal invariant and gets rid of collected strings.
    512   // Must be called after each Iterate() that modified the strings.
    513   void CleanUp();
    514 
    515   // Destroys all allocated memory.
    516   void TearDown();
    517 
    518  private:
    519   explicit ExternalStringTable(Heap* heap) : heap_(heap) { }
    520 
    521   friend class Heap;
    522 
    523   inline void Verify();
    524 
    525   inline void AddOldString(String* string);
    526 
    527   // Notifies the table that only a prefix of the new list is valid.
    528   inline void ShrinkNewStrings(int position);
    529 
    530   // To speed up scavenge collections new space string are kept
    531   // separate from old space strings.
    532   List<Object*> new_space_strings_;
    533   List<Object*> old_space_strings_;
    534 
    535   Heap* heap_;
    536 
    537   DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
    538 };
    539 
    540 
    541 enum ArrayStorageAllocationMode {
    542   DONT_INITIALIZE_ARRAY_ELEMENTS,
    543   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
    544 };
    545 
    546 
    547 class Heap {
    548  public:
    549   // Configure heap size in MB before setup. Return false if the heap has been
    550   // set up already.
    551   bool ConfigureHeap(int max_semi_space_size,
    552                      int max_old_space_size,
    553                      int max_executable_size,
    554                      size_t code_range_size);
    555   bool ConfigureHeapDefault();
    556 
    557   // Prepares the heap, setting up memory areas that are needed in the isolate
    558   // without actually creating any objects.
    559   bool SetUp();
    560 
    561   // Bootstraps the object heap with the core set of objects required to run.
    562   // Returns whether it succeeded.
    563   bool CreateHeapObjects();
    564 
    565   // Destroys all memory allocated by the heap.
    566   void TearDown();
    567 
    568   // Set the stack limit in the roots_ array.  Some architectures generate
    569   // code that looks here, because it is faster than loading from the static
    570   // jslimit_/real_jslimit_ variable in the StackGuard.
    571   void SetStackLimits();
    572 
    573   // Returns whether SetUp has been called.
    574   bool HasBeenSetUp();
    575 
    576   // Returns the maximum amount of memory reserved for the heap.  For
    577   // the young generation, we reserve 4 times the amount needed for a
    578   // semi space.  The young generation consists of two semi spaces and
    579   // we reserve twice the amount needed for those in order to ensure
    580   // that new space can be aligned to its size.
    581   intptr_t MaxReserved() {
    582     return 4 * reserved_semispace_size_ + max_old_generation_size_;
    583   }
    584   int MaxSemiSpaceSize() { return max_semi_space_size_; }
    585   int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
    586   int InitialSemiSpaceSize() { return initial_semispace_size_; }
    587   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
    588   intptr_t MaxExecutableSize() { return max_executable_size_; }
    589 
    590   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
    591   // more spaces are needed until it reaches the limit.
    592   intptr_t Capacity();
    593 
    594   // Returns the amount of memory currently committed for the heap.
    595   intptr_t CommittedMemory();
    596 
    597   // Returns the amount of executable memory currently committed for the heap.
    598   intptr_t CommittedMemoryExecutable();
    599 
    600   // Returns the amount of phyical memory currently committed for the heap.
    601   size_t CommittedPhysicalMemory();
    602 
    603   // Returns the maximum amount of memory ever committed for the heap.
    604   intptr_t MaximumCommittedMemory() { return maximum_committed_; }
    605 
    606   // Updates the maximum committed memory for the heap. Should be called
    607   // whenever a space grows.
    608   void UpdateMaximumCommitted();
    609 
    610   // Returns the available bytes in space w/o growing.
    611   // Heap doesn't guarantee that it can allocate an object that requires
    612   // all available bytes. Check MaxHeapObjectSize() instead.
    613   intptr_t Available();
    614 
    615   // Returns of size of all objects residing in the heap.
    616   intptr_t SizeOfObjects();
    617 
    618   // Return the starting address and a mask for the new space.  And-masking an
    619   // address with the mask will result in the start address of the new space
    620   // for all addresses in either semispace.
    621   Address NewSpaceStart() { return new_space_.start(); }
    622   uintptr_t NewSpaceMask() { return new_space_.mask(); }
    623   Address NewSpaceTop() { return new_space_.top(); }
    624 
    625   NewSpace* new_space() { return &new_space_; }
    626   OldSpace* old_pointer_space() { return old_pointer_space_; }
    627   OldSpace* old_data_space() { return old_data_space_; }
    628   OldSpace* code_space() { return code_space_; }
    629   MapSpace* map_space() { return map_space_; }
    630   CellSpace* cell_space() { return cell_space_; }
    631   PropertyCellSpace* property_cell_space() {
    632     return property_cell_space_;
    633   }
    634   LargeObjectSpace* lo_space() { return lo_space_; }
    635   PagedSpace* paged_space(int idx) {
    636     switch (idx) {
    637       case OLD_POINTER_SPACE:
    638         return old_pointer_space();
    639       case OLD_DATA_SPACE:
    640         return old_data_space();
    641       case MAP_SPACE:
    642         return map_space();
    643       case CELL_SPACE:
    644         return cell_space();
    645       case PROPERTY_CELL_SPACE:
    646         return property_cell_space();
    647       case CODE_SPACE:
    648         return code_space();
    649       case NEW_SPACE:
    650       case LO_SPACE:
    651         UNREACHABLE();
    652     }
    653     return NULL;
    654   }
    655 
    656   bool always_allocate() { return always_allocate_scope_depth_ != 0; }
    657   Address always_allocate_scope_depth_address() {
    658     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
    659   }
    660   bool linear_allocation() {
    661     return linear_allocation_scope_depth_ != 0;
    662   }
    663 
    664   Address* NewSpaceAllocationTopAddress() {
    665     return new_space_.allocation_top_address();
    666   }
    667   Address* NewSpaceAllocationLimitAddress() {
    668     return new_space_.allocation_limit_address();
    669   }
    670 
    671   Address* OldPointerSpaceAllocationTopAddress() {
    672     return old_pointer_space_->allocation_top_address();
    673   }
    674   Address* OldPointerSpaceAllocationLimitAddress() {
    675     return old_pointer_space_->allocation_limit_address();
    676   }
    677 
    678   Address* OldDataSpaceAllocationTopAddress() {
    679     return old_data_space_->allocation_top_address();
    680   }
    681   Address* OldDataSpaceAllocationLimitAddress() {
    682     return old_data_space_->allocation_limit_address();
    683   }
    684 
    685   // Returns a deep copy of the JavaScript object.
    686   // Properties and elements are copied too.
    687   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
    688   MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
    689                                                 AllocationSite* site = NULL);
    690 
    691   // Clear the Instanceof cache (used when a prototype changes).
    692   inline void ClearInstanceofCache();
    693 
    694   // Iterates the whole code space to clear all ICs of the given kind.
    695   void ClearAllICsByKind(Code::Kind kind);
    696 
    697   // For use during bootup.
    698   void RepairFreeListsAfterBoot();
    699 
    700   template<typename T>
    701   static inline bool IsOneByte(T t, int chars);
    702 
    703   // Move len elements within a given array from src_index index to dst_index
    704   // index.
    705   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
    706 
    707   // Sloppy mode arguments object size.
    708   static const int kSloppyArgumentsObjectSize =
    709       JSObject::kHeaderSize + 2 * kPointerSize;
    710   // Strict mode arguments has no callee so it is smaller.
    711   static const int kStrictArgumentsObjectSize =
    712       JSObject::kHeaderSize + 1 * kPointerSize;
    713   // Indicies for direct access into argument objects.
    714   static const int kArgumentsLengthIndex = 0;
    715   // callee is only valid in sloppy mode.
    716   static const int kArgumentsCalleeIndex = 1;
    717 
    718   // Finalizes an external string by deleting the associated external
    719   // data and clearing the resource pointer.
    720   inline void FinalizeExternalString(String* string);
    721 
    722   // Initialize a filler object to keep the ability to iterate over the heap
    723   // when shortening objects.
    724   void CreateFillerObjectAt(Address addr, int size);
    725 
    726   bool CanMoveObjectStart(HeapObject* object);
    727 
    728   enum InvocationMode { FROM_GC, FROM_MUTATOR };
    729 
    730   // Maintain marking consistency for IncrementalMarking.
    731   void AdjustLiveBytes(Address address, int by, InvocationMode mode);
    732 
    733   // Converts the given boolean condition to JavaScript boolean value.
    734   inline Object* ToBoolean(bool condition);
    735 
    736   // Performs garbage collection operation.
    737   // Returns whether there is a chance that another major GC could
    738   // collect more garbage.
    739   inline bool CollectGarbage(
    740       AllocationSpace space,
    741       const char* gc_reason = NULL,
    742       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
    743 
    744   static const int kNoGCFlags = 0;
    745   static const int kSweepPreciselyMask = 1;
    746   static const int kReduceMemoryFootprintMask = 2;
    747   static const int kAbortIncrementalMarkingMask = 4;
    748 
    749   // Making the heap iterable requires us to sweep precisely and abort any
    750   // incremental marking as well.
    751   static const int kMakeHeapIterableMask =
    752       kSweepPreciselyMask | kAbortIncrementalMarkingMask;
    753 
    754   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
    755   // non-zero, then the slower precise sweeper is used, which leaves the heap
    756   // in a state where we can iterate over the heap visiting all objects.
    757   void CollectAllGarbage(
    758       int flags,
    759       const char* gc_reason = NULL,
    760       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
    761 
    762   // Last hope GC, should try to squeeze as much as possible.
    763   void CollectAllAvailableGarbage(const char* gc_reason = NULL);
    764 
    765   // Check whether the heap is currently iterable.
    766   bool IsHeapIterable();
    767 
    768   // Notify the heap that a context has been disposed.
    769   int NotifyContextDisposed();
    770 
    771   inline void increment_scan_on_scavenge_pages() {
    772     scan_on_scavenge_pages_++;
    773     if (FLAG_gc_verbose) {
    774       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
    775     }
    776   }
    777 
    778   inline void decrement_scan_on_scavenge_pages() {
    779     scan_on_scavenge_pages_--;
    780     if (FLAG_gc_verbose) {
    781       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
    782     }
    783   }
    784 
    785   PromotionQueue* promotion_queue() { return &promotion_queue_; }
    786 
    787   void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
    788                              GCType gc_type_filter,
    789                              bool pass_isolate = true);
    790   void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
    791 
    792   void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
    793                              GCType gc_type_filter,
    794                              bool pass_isolate = true);
    795   void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
    796 
    797   // Heap root getters.  We have versions with and without type::cast() here.
    798   // You can't use type::cast during GC because the assert fails.
    799   // TODO(1490): Try removing the unchecked accessors, now that GC marking does
    800   // not corrupt the map.
    801 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
    802   type* name() {                                                               \
    803     return type::cast(roots_[k##camel_name##RootIndex]);                       \
    804   }                                                                            \
    805   type* raw_unchecked_##name() {                                               \
    806     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
    807   }
    808   ROOT_LIST(ROOT_ACCESSOR)
    809 #undef ROOT_ACCESSOR
    810 
    811 // Utility type maps
    812 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
    813     Map* name##_map() {                                                        \
    814       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
    815     }
    816   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
    817 #undef STRUCT_MAP_ACCESSOR
    818 
    819 #define STRING_ACCESSOR(name, str) String* name() {                            \
    820     return String::cast(roots_[k##name##RootIndex]);                           \
    821   }
    822   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
    823 #undef STRING_ACCESSOR
    824 
    825   // The hidden_string is special because it is the empty string, but does
    826   // not match the empty string.
    827   String* hidden_string() { return hidden_string_; }
    828 
    829   void set_native_contexts_list(Object* object) {
    830     native_contexts_list_ = object;
    831   }
    832   Object* native_contexts_list() const { return native_contexts_list_; }
    833 
    834   void set_array_buffers_list(Object* object) {
    835     array_buffers_list_ = object;
    836   }
    837   Object* array_buffers_list() const { return array_buffers_list_; }
    838 
    839   void set_allocation_sites_list(Object* object) {
    840     allocation_sites_list_ = object;
    841   }
    842   Object* allocation_sites_list() { return allocation_sites_list_; }
    843 
    844   // Used in CreateAllocationSiteStub and the (de)serializer.
    845   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
    846 
    847   Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
    848 
    849   void set_encountered_weak_collections(Object* weak_collection) {
    850     encountered_weak_collections_ = weak_collection;
    851   }
    852   Object* encountered_weak_collections() const {
    853     return encountered_weak_collections_;
    854   }
    855 
    856   // Number of mark-sweeps.
    857   unsigned int ms_count() { return ms_count_; }
    858 
    859   // Iterates over all roots in the heap.
    860   void IterateRoots(ObjectVisitor* v, VisitMode mode);
    861   // Iterates over all strong roots in the heap.
    862   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
    863   // Iterates over entries in the smi roots list.  Only interesting to the
    864   // serializer/deserializer, since GC does not care about smis.
    865   void IterateSmiRoots(ObjectVisitor* v);
    866   // Iterates over all the other roots in the heap.
    867   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
    868 
    869   // Iterate pointers to from semispace of new space found in memory interval
    870   // from start to end.
    871   void IterateAndMarkPointersToFromSpace(Address start,
    872                                          Address end,
    873                                          ObjectSlotCallback callback);
    874 
    875   // Returns whether the object resides in new space.
    876   inline bool InNewSpace(Object* object);
    877   inline bool InNewSpace(Address address);
    878   inline bool InNewSpacePage(Address address);
    879   inline bool InFromSpace(Object* object);
    880   inline bool InToSpace(Object* object);
    881 
    882   // Returns whether the object resides in old pointer space.
    883   inline bool InOldPointerSpace(Address address);
    884   inline bool InOldPointerSpace(Object* object);
    885 
    886   // Returns whether the object resides in old data space.
    887   inline bool InOldDataSpace(Address address);
    888   inline bool InOldDataSpace(Object* object);
    889 
    890   // Checks whether an address/object in the heap (including auxiliary
    891   // area and unused area).
    892   bool Contains(Address addr);
    893   bool Contains(HeapObject* value);
    894 
    895   // Checks whether an address/object in a space.
    896   // Currently used by tests, serialization and heap verification only.
    897   bool InSpace(Address addr, AllocationSpace space);
    898   bool InSpace(HeapObject* value, AllocationSpace space);
    899 
    900   // Finds out which space an object should get promoted to based on its type.
    901   inline OldSpace* TargetSpace(HeapObject* object);
    902   static inline AllocationSpace TargetSpaceId(InstanceType type);
    903 
    904   // Checks whether the given object is allowed to be migrated from it's
    905   // current space into the given destination space. Used for debugging.
    906   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
    907 
    908   // Sets the stub_cache_ (only used when expanding the dictionary).
    909   void public_set_code_stubs(UnseededNumberDictionary* value) {
    910     roots_[kCodeStubsRootIndex] = value;
    911   }
    912 
    913   // Support for computing object sizes for old objects during GCs. Returns
    914   // a function that is guaranteed to be safe for computing object sizes in
    915   // the current GC phase.
    916   HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
    917     return gc_safe_size_of_old_object_;
    918   }
    919 
    920   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
    921   void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
    922     roots_[kNonMonomorphicCacheRootIndex] = value;
    923   }
    924 
    925   void public_set_empty_script(Script* script) {
    926     roots_[kEmptyScriptRootIndex] = script;
    927   }
    928 
    929   void public_set_store_buffer_top(Address* top) {
    930     roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
    931   }
    932 
    933   void public_set_materialized_objects(FixedArray* objects) {
    934     roots_[kMaterializedObjectsRootIndex] = objects;
    935   }
    936 
    937   // Generated code can embed this address to get access to the roots.
    938   Object** roots_array_start() { return roots_; }
    939 
    940   Address* store_buffer_top_address() {
    941     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
    942   }
    943 
    944 #ifdef VERIFY_HEAP
    945   // Verify the heap is in its normal state before or after a GC.
    946   void Verify();
    947 
    948 
    949   bool weak_embedded_objects_verification_enabled() {
    950     return no_weak_object_verification_scope_depth_ == 0;
    951   }
    952 #endif
    953 
    954 #ifdef DEBUG
    955   void Print();
    956   void PrintHandles();
    957 
    958   void OldPointerSpaceCheckStoreBuffer();
    959   void MapSpaceCheckStoreBuffer();
    960   void LargeObjectSpaceCheckStoreBuffer();
    961 
    962   // Report heap statistics.
    963   void ReportHeapStatistics(const char* title);
    964   void ReportCodeStatistics(const char* title);
    965 #endif
    966 
    967   // Zapping is needed for verify heap, and always done in debug builds.
    968   static inline bool ShouldZapGarbage() {
    969 #ifdef DEBUG
    970     return true;
    971 #else
    972 #ifdef VERIFY_HEAP
    973     return FLAG_verify_heap;
    974 #else
    975     return false;
    976 #endif
    977 #endif
    978   }
    979 
    980   // Print short heap statistics.
    981   void PrintShortHeapStatistics();
    982 
    983   // Write barrier support for address[offset] = o.
    984   INLINE(void RecordWrite(Address address, int offset));
    985 
    986   // Write barrier support for address[start : start + len[ = o.
    987   INLINE(void RecordWrites(Address address, int start, int len));
    988 
    989   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
    990   inline HeapState gc_state() { return gc_state_; }
    991 
    992   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
    993 
    994 #ifdef DEBUG
    995   void set_allocation_timeout(int timeout) {
    996     allocation_timeout_ = timeout;
    997   }
    998 
    999   void TracePathToObjectFrom(Object* target, Object* root);
   1000   void TracePathToObject(Object* target);
   1001   void TracePathToGlobal();
   1002 #endif
   1003 
   1004   // Callback function passed to Heap::Iterate etc.  Copies an object if
   1005   // necessary, the object might be promoted to an old space.  The caller must
   1006   // ensure the precondition that the object is (a) a heap object and (b) in
   1007   // the heap's from space.
   1008   static inline void ScavengePointer(HeapObject** p);
   1009   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
   1010 
   1011   enum ScratchpadSlotMode {
   1012     IGNORE_SCRATCHPAD_SLOT,
   1013     RECORD_SCRATCHPAD_SLOT
   1014   };
   1015 
   1016   // If an object has an AllocationMemento trailing it, return it, otherwise
   1017   // return NULL;
   1018   inline AllocationMemento* FindAllocationMemento(HeapObject* object);
   1019 
   1020   // An object may have an AllocationSite associated with it through a trailing
   1021   // AllocationMemento. Its feedback should be updated when objects are found
   1022   // in the heap.
   1023   static inline void UpdateAllocationSiteFeedback(
   1024       HeapObject* object, ScratchpadSlotMode mode);
   1025 
   1026   // Support for partial snapshots.  After calling this we have a linear
   1027   // space to write objects in each space.
   1028   void ReserveSpace(int *sizes, Address* addresses);
   1029 
   1030   //
   1031   // Support for the API.
   1032   //
   1033 
   1034   void CreateApiObjects();
   1035 
   1036   inline intptr_t PromotedTotalSize() {
   1037     int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
   1038     if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
   1039     if (total < 0) return 0;
   1040     return static_cast<intptr_t>(total);
   1041   }
   1042 
   1043   inline intptr_t OldGenerationSpaceAvailable() {
   1044     return old_generation_allocation_limit_ - PromotedTotalSize();
   1045   }
   1046 
   1047   inline intptr_t OldGenerationCapacityAvailable() {
   1048     return max_old_generation_size_ - PromotedTotalSize();
   1049   }
   1050 
   1051   static const intptr_t kMinimumOldGenerationAllocationLimit =
   1052       8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
   1053 
   1054   static const int kPointerMultiplier = i::kPointerSize / 4;
   1055 
   1056   // The new space size has to be a power of 2. Sizes are in MB.
   1057   static const int kMaxSemiSpaceSizeLowMemoryDevice =
   1058       1 * kPointerMultiplier;
   1059   static const int kMaxSemiSpaceSizeMediumMemoryDevice =
   1060       4 * kPointerMultiplier;
   1061   static const int kMaxSemiSpaceSizeHighMemoryDevice =
   1062       8 * kPointerMultiplier;
   1063   static const int kMaxSemiSpaceSizeHugeMemoryDevice =
   1064       8 * kPointerMultiplier;
   1065 
   1066   // The old space size has to be a multiple of Page::kPageSize.
   1067   // Sizes are in MB.
   1068   static const int kMaxOldSpaceSizeLowMemoryDevice =
   1069       128 * kPointerMultiplier;
   1070   static const int kMaxOldSpaceSizeMediumMemoryDevice =
   1071       256 * kPointerMultiplier;
   1072   static const int kMaxOldSpaceSizeHighMemoryDevice =
   1073       512 * kPointerMultiplier;
   1074   static const int kMaxOldSpaceSizeHugeMemoryDevice =
   1075       700 * kPointerMultiplier;
   1076 
   1077   // The executable size has to be a multiple of Page::kPageSize.
   1078   // Sizes are in MB.
   1079   static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
   1080   static const int kMaxExecutableSizeMediumMemoryDevice =
   1081       192 * kPointerMultiplier;
   1082   static const int kMaxExecutableSizeHighMemoryDevice =
   1083       256 * kPointerMultiplier;
   1084   static const int kMaxExecutableSizeHugeMemoryDevice =
   1085       256 * kPointerMultiplier;
   1086 
   1087   intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size,
   1088                                         int freed_global_handles);
   1089 
   1090   // Indicates whether inline bump-pointer allocation has been disabled.
   1091   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
   1092 
   1093   // Switch whether inline bump-pointer allocation should be used.
   1094   void EnableInlineAllocation();
   1095   void DisableInlineAllocation();
   1096 
   1097   // Implements the corresponding V8 API function.
   1098   bool IdleNotification(int hint);
   1099 
   1100   // Declare all the root indices.  This defines the root list order.
   1101   enum RootListIndex {
   1102 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
   1103     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
   1104 #undef ROOT_INDEX_DECLARATION
   1105 
   1106 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
   1107     INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
   1108 #undef STRING_DECLARATION
   1109 
   1110     // Utility type maps
   1111 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
   1112     STRUCT_LIST(DECLARE_STRUCT_MAP)
   1113 #undef DECLARE_STRUCT_MAP
   1114 
   1115     kStringTableRootIndex,
   1116 
   1117 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
   1118     SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
   1119 #undef ROOT_INDEX_DECLARATION
   1120 
   1121     kRootListLength,
   1122     kStrongRootListLength = kStringTableRootIndex,
   1123     kSmiRootsStart = kStringTableRootIndex + 1
   1124   };
   1125 
   1126   STATIC_ASSERT(kUndefinedValueRootIndex ==
   1127                 Internals::kUndefinedValueRootIndex);
   1128   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
   1129   STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
   1130   STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
   1131   STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
   1132 
   1133   // Generated code can embed direct references to non-writable roots if
   1134   // they are in new space.
   1135   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
   1136   // Generated code can treat direct references to this root as constant.
   1137   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
   1138 
   1139   Map* MapForFixedTypedArray(ExternalArrayType array_type);
   1140   RootListIndex RootIndexForFixedTypedArray(
   1141       ExternalArrayType array_type);
   1142 
   1143   Map* MapForExternalArrayType(ExternalArrayType array_type);
   1144   RootListIndex RootIndexForExternalArrayType(
   1145       ExternalArrayType array_type);
   1146 
   1147   RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
   1148   RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
   1149   ExternalArray* EmptyExternalArrayForMap(Map* map);
   1150   FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
   1151 
   1152   void RecordStats(HeapStats* stats, bool take_snapshot = false);
   1153 
   1154   // Copy block of memory from src to dst. Size of block should be aligned
   1155   // by pointer size.
   1156   static inline void CopyBlock(Address dst, Address src, int byte_size);
   1157 
   1158   // Optimized version of memmove for blocks with pointer size aligned sizes and
   1159   // pointer size aligned addresses.
   1160   static inline void MoveBlock(Address dst, Address src, int byte_size);
   1161 
   1162   // Check new space expansion criteria and expand semispaces if it was hit.
   1163   void CheckNewSpaceExpansionCriteria();
   1164 
   1165   inline void IncrementPromotedObjectsSize(int object_size) {
   1166     ASSERT(object_size > 0);
   1167     promoted_objects_size_ += object_size;
   1168   }
   1169 
   1170   inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
   1171     ASSERT(object_size > 0);
   1172     semi_space_copied_object_size_ += object_size;
   1173   }
   1174 
   1175   inline void IncrementYoungSurvivorsCounter(int survived) {
   1176     ASSERT(survived >= 0);
   1177     survived_since_last_expansion_ += survived;
   1178   }
   1179 
   1180   inline bool NextGCIsLikelyToBeFull() {
   1181     if (FLAG_gc_global) return true;
   1182 
   1183     if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
   1184 
   1185     intptr_t adjusted_allocation_limit =
   1186         old_generation_allocation_limit_ - new_space_.Capacity();
   1187 
   1188     if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
   1189 
   1190     return false;
   1191   }
   1192 
   1193   void UpdateNewSpaceReferencesInExternalStringTable(
   1194       ExternalStringTableUpdaterCallback updater_func);
   1195 
   1196   void UpdateReferencesInExternalStringTable(
   1197       ExternalStringTableUpdaterCallback updater_func);
   1198 
   1199   void ProcessWeakReferences(WeakObjectRetainer* retainer);
   1200 
   1201   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
   1202 
   1203   // Helper function that governs the promotion policy from new space to
   1204   // old.  If the object's old address lies below the new space's age
   1205   // mark or if we've already filled the bottom 1/16th of the to space,
   1206   // we try to promote this object.
   1207   inline bool ShouldBePromoted(Address old_address, int object_size);
   1208 
   1209   void ClearJSFunctionResultCaches();
   1210 
   1211   void ClearNormalizedMapCaches();
   1212 
   1213   GCTracer* tracer() { return tracer_; }
   1214 
   1215   // Returns the size of objects residing in non new spaces.
   1216   intptr_t PromotedSpaceSizeOfObjects();
   1217 
   1218   double total_regexp_code_generated() { return total_regexp_code_generated_; }
   1219   void IncreaseTotalRegexpCodeGenerated(int size) {
   1220     total_regexp_code_generated_ += size;
   1221   }
   1222 
   1223   void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
   1224     if (is_crankshafted) {
   1225       crankshaft_codegen_bytes_generated_ += size;
   1226     } else {
   1227       full_codegen_bytes_generated_ += size;
   1228     }
   1229   }
   1230 
   1231   // Returns maximum GC pause.
   1232   double get_max_gc_pause() { return max_gc_pause_; }
   1233 
   1234   // Returns maximum size of objects alive after GC.
   1235   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
   1236 
   1237   // Returns minimal interval between two subsequent collections.
   1238   double get_min_in_mutator() { return min_in_mutator_; }
   1239 
   1240   // TODO(hpayer): remove, should be handled by GCTracer
   1241   void AddMarkingTime(double marking_time) {
   1242     marking_time_ += marking_time;
   1243   }
   1244 
   1245   double marking_time() const {
   1246     return marking_time_;
   1247   }
   1248 
   1249   // TODO(hpayer): remove, should be handled by GCTracer
   1250   void AddSweepingTime(double sweeping_time) {
   1251     sweeping_time_ += sweeping_time;
   1252   }
   1253 
   1254   double sweeping_time() const {
   1255     return sweeping_time_;
   1256   }
   1257 
   1258   MarkCompactCollector* mark_compact_collector() {
   1259     return &mark_compact_collector_;
   1260   }
   1261 
   1262   StoreBuffer* store_buffer() {
   1263     return &store_buffer_;
   1264   }
   1265 
   1266   Marking* marking() {
   1267     return &marking_;
   1268   }
   1269 
   1270   IncrementalMarking* incremental_marking() {
   1271     return &incremental_marking_;
   1272   }
   1273 
   1274   ExternalStringTable* external_string_table() {
   1275     return &external_string_table_;
   1276   }
   1277 
   1278   // Returns the current sweep generation.
   1279   int sweep_generation() {
   1280     return sweep_generation_;
   1281   }
   1282 
   1283   inline Isolate* isolate();
   1284 
   1285   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1286   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1287 
   1288   inline bool OldGenerationAllocationLimitReached();
   1289 
   1290   inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
   1291     scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
   1292   }
   1293 
   1294   void QueueMemoryChunkForFree(MemoryChunk* chunk);
   1295   void FreeQueuedChunks();
   1296 
   1297   int gc_count() const { return gc_count_; }
   1298 
   1299   // Completely clear the Instanceof cache (to stop it keeping objects alive
   1300   // around a GC).
   1301   inline void CompletelyClearInstanceofCache();
   1302 
   1303   // The roots that have an index less than this are always in old space.
   1304   static const int kOldSpaceRoots = 0x20;
   1305 
   1306   uint32_t HashSeed() {
   1307     uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
   1308     ASSERT(FLAG_randomize_hashes || seed == 0);
   1309     return seed;
   1310   }
   1311 
   1312   void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
   1313     ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
   1314     set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
   1315   }
   1316 
   1317   void SetConstructStubDeoptPCOffset(int pc_offset) {
   1318     ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
   1319     set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1320   }
   1321 
   1322   void SetGetterStubDeoptPCOffset(int pc_offset) {
   1323     ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
   1324     set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1325   }
   1326 
   1327   void SetSetterStubDeoptPCOffset(int pc_offset) {
   1328     ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
   1329     set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1330   }
   1331 
   1332   // For post mortem debugging.
   1333   void RememberUnmappedPage(Address page, bool compacted);
   1334 
   1335   // Global inline caching age: it is incremented on some GCs after context
   1336   // disposal. We use it to flush inline caches.
   1337   int global_ic_age() {
   1338     return global_ic_age_;
   1339   }
   1340 
   1341   void AgeInlineCaches() {
   1342     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
   1343   }
   1344 
   1345   bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
   1346 
   1347   int64_t amount_of_external_allocated_memory() {
   1348     return amount_of_external_allocated_memory_;
   1349   }
   1350 
   1351   void DeoptMarkedAllocationSites();
   1352 
   1353   bool MaximumSizeScavenge() {
   1354     return maximum_size_scavenges_ > 0;
   1355   }
   1356 
   1357   bool DeoptMaybeTenuredAllocationSites() {
   1358     return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
   1359   }
   1360 
   1361   // ObjectStats are kept in two arrays, counts and sizes. Related stats are
   1362   // stored in a contiguous linear buffer. Stats groups are stored one after
   1363   // another.
   1364   enum {
   1365     FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
   1366     FIRST_FIXED_ARRAY_SUB_TYPE =
   1367         FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
   1368     FIRST_CODE_AGE_SUB_TYPE =
   1369         FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
   1370     OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
   1371   };
   1372 
   1373   void RecordObjectStats(InstanceType type, size_t size) {
   1374     ASSERT(type <= LAST_TYPE);
   1375     object_counts_[type]++;
   1376     object_sizes_[type] += size;
   1377   }
   1378 
   1379   void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
   1380     int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
   1381     int code_age_index =
   1382         FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
   1383     ASSERT(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
   1384            code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
   1385     ASSERT(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
   1386            code_age_index < OBJECT_STATS_COUNT);
   1387     object_counts_[code_sub_type_index]++;
   1388     object_sizes_[code_sub_type_index] += size;
   1389     object_counts_[code_age_index]++;
   1390     object_sizes_[code_age_index] += size;
   1391   }
   1392 
   1393   void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
   1394     ASSERT(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
   1395     object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
   1396     object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
   1397   }
   1398 
   1399   void CheckpointObjectStats();
   1400 
   1401   // We don't use a LockGuard here since we want to lock the heap
   1402   // only when FLAG_concurrent_recompilation is true.
   1403   class RelocationLock {
   1404    public:
   1405     explicit RelocationLock(Heap* heap) : heap_(heap) {
   1406       heap_->relocation_mutex_.Lock();
   1407     }
   1408 
   1409 
   1410     ~RelocationLock() {
   1411       heap_->relocation_mutex_.Unlock();
   1412     }
   1413 
   1414    private:
   1415     Heap* heap_;
   1416   };
   1417 
   1418   void AddWeakObjectToCodeDependency(Handle<Object> obj,
   1419                                      Handle<DependentCode> dep);
   1420 
   1421   DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
   1422 
   1423   void InitializeWeakObjectToCodeTable() {
   1424     set_weak_object_to_code_table(undefined_value());
   1425   }
   1426 
   1427   void EnsureWeakObjectToCodeTable();
   1428 
   1429   static void FatalProcessOutOfMemory(const char* location,
   1430                                       bool take_snapshot = false);
   1431 
   1432  protected:
   1433   // Methods made available to tests.
   1434 
   1435   // Allocates a JS Map in the heap.
   1436   MUST_USE_RESULT AllocationResult AllocateMap(
   1437       InstanceType instance_type,
   1438       int instance_size,
   1439       ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
   1440 
   1441   // Allocates and initializes a new JavaScript object based on a
   1442   // constructor.
   1443   // If allocation_site is non-null, then a memento is emitted after the object
   1444   // that points to the site.
   1445   MUST_USE_RESULT AllocationResult AllocateJSObject(
   1446       JSFunction* constructor,
   1447       PretenureFlag pretenure = NOT_TENURED,
   1448       AllocationSite* allocation_site = NULL);
   1449 
   1450   // Allocates and initializes a new JavaScript object based on a map.
   1451   // Passing an allocation site means that a memento will be created that
   1452   // points to the site.
   1453   MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap(
   1454       Map* map,
   1455       PretenureFlag pretenure = NOT_TENURED,
   1456       bool alloc_props = true,
   1457       AllocationSite* allocation_site = NULL);
   1458 
   1459   // Allocated a HeapNumber from value.
   1460   MUST_USE_RESULT AllocationResult AllocateHeapNumber(
   1461       double value, PretenureFlag pretenure = NOT_TENURED);
   1462 
   1463   // Allocate a byte array of the specified length
   1464   MUST_USE_RESULT AllocationResult AllocateByteArray(
   1465       int length,
   1466       PretenureFlag pretenure = NOT_TENURED);
   1467 
   1468   // Allocates an arguments object - optionally with an elements array.
   1469   MUST_USE_RESULT AllocationResult AllocateArgumentsObject(
   1470       Object* callee, int length);
   1471 
   1472   // Copy the code and scope info part of the code object, but insert
   1473   // the provided data as the relocation information.
   1474   MUST_USE_RESULT AllocationResult CopyCode(Code* code,
   1475                                             Vector<byte> reloc_info);
   1476 
   1477   MUST_USE_RESULT AllocationResult CopyCode(Code* code);
   1478 
   1479   // Allocates a fixed array initialized with undefined values
   1480   MUST_USE_RESULT AllocationResult AllocateFixedArray(
   1481       int length,
   1482       PretenureFlag pretenure = NOT_TENURED);
   1483 
   1484  private:
   1485   Heap();
   1486 
   1487   // The amount of external memory registered through the API kept alive
   1488   // by global handles
   1489   int64_t amount_of_external_allocated_memory_;
   1490 
   1491   // Caches the amount of external memory registered at the last global gc.
   1492   int64_t amount_of_external_allocated_memory_at_last_global_gc_;
   1493 
   1494   // This can be calculated directly from a pointer to the heap; however, it is
   1495   // more expedient to get at the isolate directly from within Heap methods.
   1496   Isolate* isolate_;
   1497 
   1498   Object* roots_[kRootListLength];
   1499 
   1500   size_t code_range_size_;
   1501   int reserved_semispace_size_;
   1502   int max_semi_space_size_;
   1503   int initial_semispace_size_;
   1504   intptr_t max_old_generation_size_;
   1505   intptr_t max_executable_size_;
   1506   intptr_t maximum_committed_;
   1507 
   1508   // For keeping track of how much data has survived
   1509   // scavenge since last new space expansion.
   1510   int survived_since_last_expansion_;
   1511 
   1512   // For keeping track on when to flush RegExp code.
   1513   int sweep_generation_;
   1514 
   1515   int always_allocate_scope_depth_;
   1516   int linear_allocation_scope_depth_;
   1517 
   1518   // For keeping track of context disposals.
   1519   int contexts_disposed_;
   1520 
   1521   int global_ic_age_;
   1522 
   1523   bool flush_monomorphic_ics_;
   1524 
   1525   int scan_on_scavenge_pages_;
   1526 
   1527   NewSpace new_space_;
   1528   OldSpace* old_pointer_space_;
   1529   OldSpace* old_data_space_;
   1530   OldSpace* code_space_;
   1531   MapSpace* map_space_;
   1532   CellSpace* cell_space_;
   1533   PropertyCellSpace* property_cell_space_;
   1534   LargeObjectSpace* lo_space_;
   1535   HeapState gc_state_;
   1536   int gc_post_processing_depth_;
   1537   Address new_space_top_after_last_gc_;
   1538 
   1539   // Returns the amount of external memory registered since last global gc.
   1540   int64_t PromotedExternalMemorySize();
   1541 
   1542   unsigned int ms_count_;  // how many mark-sweep collections happened
   1543   unsigned int gc_count_;  // how many gc happened
   1544 
   1545   // For post mortem debugging.
   1546   static const int kRememberedUnmappedPages = 128;
   1547   int remembered_unmapped_pages_index_;
   1548   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
   1549 
   1550   // Total length of the strings we failed to flatten since the last GC.
   1551   int unflattened_strings_length_;
   1552 
   1553 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
   1554   inline void set_##name(type* value) {                                        \
   1555     /* The deserializer makes use of the fact that these common roots are */   \
   1556     /* never in new space and never on a page that is being compacted.    */   \
   1557     ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
   1558     roots_[k##camel_name##RootIndex] = value;                                  \
   1559   }
   1560   ROOT_LIST(ROOT_ACCESSOR)
   1561 #undef ROOT_ACCESSOR
   1562 
   1563 #ifdef DEBUG
   1564   // If the --gc-interval flag is set to a positive value, this
   1565   // variable holds the value indicating the number of allocations
   1566   // remain until the next failure and garbage collection.
   1567   int allocation_timeout_;
   1568 #endif  // DEBUG
   1569 
   1570   // Limit that triggers a global GC on the next (normally caused) GC.  This
   1571   // is checked when we have already decided to do a GC to help determine
   1572   // which collector to invoke, before expanding a paged space in the old
   1573   // generation and on every allocation in large object space.
   1574   intptr_t old_generation_allocation_limit_;
   1575 
   1576   // Indicates that an allocation has failed in the old generation since the
   1577   // last GC.
   1578   bool old_gen_exhausted_;
   1579 
   1580   // Indicates that inline bump-pointer allocation has been globally disabled
   1581   // for all spaces. This is used to disable allocations in generated code.
   1582   bool inline_allocation_disabled_;
   1583 
   1584   // Weak list heads, threaded through the objects.
   1585   // List heads are initilized lazily and contain the undefined_value at start.
   1586   Object* native_contexts_list_;
   1587   Object* array_buffers_list_;
   1588   Object* allocation_sites_list_;
   1589 
   1590   // WeakHashTable that maps objects embedded in optimized code to dependent
   1591   // code list. It is initilized lazily and contains the undefined_value at
   1592   // start.
   1593   Object* weak_object_to_code_table_;
   1594 
   1595   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
   1596   // marking. It is initialized during marking, destroyed after marking and
   1597   // contains Smi(0) while marking is not active.
   1598   Object* encountered_weak_collections_;
   1599 
   1600   StoreBufferRebuilder store_buffer_rebuilder_;
   1601 
   1602   struct StringTypeTable {
   1603     InstanceType type;
   1604     int size;
   1605     RootListIndex index;
   1606   };
   1607 
   1608   struct ConstantStringTable {
   1609     const char* contents;
   1610     RootListIndex index;
   1611   };
   1612 
   1613   struct StructTable {
   1614     InstanceType type;
   1615     int size;
   1616     RootListIndex index;
   1617   };
   1618 
   1619   static const StringTypeTable string_type_table[];
   1620   static const ConstantStringTable constant_string_table[];
   1621   static const StructTable struct_table[];
   1622 
   1623   // The special hidden string which is an empty string, but does not match
   1624   // any string when looked up in properties.
   1625   String* hidden_string_;
   1626 
   1627   // GC callback function, called before and after mark-compact GC.
   1628   // Allocations in the callback function are disallowed.
   1629   struct GCPrologueCallbackPair {
   1630     GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
   1631                            GCType gc_type,
   1632                            bool pass_isolate)
   1633         : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
   1634     }
   1635     bool operator==(const GCPrologueCallbackPair& pair) const {
   1636       return pair.callback == callback;
   1637     }
   1638     v8::Isolate::GCPrologueCallback callback;
   1639     GCType gc_type;
   1640     // TODO(dcarney): remove variable
   1641     bool pass_isolate_;
   1642   };
   1643   List<GCPrologueCallbackPair> gc_prologue_callbacks_;
   1644 
   1645   struct GCEpilogueCallbackPair {
   1646     GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
   1647                            GCType gc_type,
   1648                            bool pass_isolate)
   1649         : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
   1650     }
   1651     bool operator==(const GCEpilogueCallbackPair& pair) const {
   1652       return pair.callback == callback;
   1653     }
   1654     v8::Isolate::GCPrologueCallback callback;
   1655     GCType gc_type;
   1656     // TODO(dcarney): remove variable
   1657     bool pass_isolate_;
   1658   };
   1659   List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
   1660 
   1661   // Support for computing object sizes during GC.
   1662   HeapObjectCallback gc_safe_size_of_old_object_;
   1663   static int GcSafeSizeOfOldObject(HeapObject* object);
   1664 
   1665   // Update the GC state. Called from the mark-compact collector.
   1666   void MarkMapPointersAsEncoded(bool encoded) {
   1667     ASSERT(!encoded);
   1668     gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
   1669   }
   1670 
   1671   // Code that should be run before and after each GC.  Includes some
   1672   // reporting/verification activities when compiled with DEBUG set.
   1673   void GarbageCollectionPrologue();
   1674   void GarbageCollectionEpilogue();
   1675 
   1676   // Pretenuring decisions are made based on feedback collected during new
   1677   // space evacuation. Note that between feedback collection and calling this
   1678   // method object in old space must not move.
   1679   // Right now we only process pretenuring feedback in high promotion mode.
   1680   void ProcessPretenuringFeedback();
   1681 
   1682   // Checks whether a global GC is necessary
   1683   GarbageCollector SelectGarbageCollector(AllocationSpace space,
   1684                                           const char** reason);
   1685 
   1686   // Make sure there is a filler value behind the top of the new space
   1687   // so that the GC does not confuse some unintialized/stale memory
   1688   // with the allocation memento of the object at the top
   1689   void EnsureFillerObjectAtTop();
   1690 
   1691   // Ensure that we have swept all spaces in such a way that we can iterate
   1692   // over all objects.  May cause a GC.
   1693   void MakeHeapIterable();
   1694 
   1695   // Performs garbage collection operation.
   1696   // Returns whether there is a chance that another major GC could
   1697   // collect more garbage.
   1698   bool CollectGarbage(
   1699       GarbageCollector collector,
   1700       const char* gc_reason,
   1701       const char* collector_reason,
   1702       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1703 
   1704   // Performs garbage collection
   1705   // Returns whether there is a chance another major GC could
   1706   // collect more garbage.
   1707   bool PerformGarbageCollection(
   1708       GarbageCollector collector,
   1709       GCTracer* tracer,
   1710       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1711 
   1712   inline void UpdateOldSpaceLimits();
   1713 
   1714   // Selects the proper allocation space depending on the given object
   1715   // size, pretenuring decision, and preferred old-space.
   1716   static AllocationSpace SelectSpace(int object_size,
   1717                                      AllocationSpace preferred_old_space,
   1718                                      PretenureFlag pretenure) {
   1719     ASSERT(preferred_old_space == OLD_POINTER_SPACE ||
   1720            preferred_old_space == OLD_DATA_SPACE);
   1721     if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
   1722     return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
   1723   }
   1724 
   1725   // Allocate an uninitialized object.  The memory is non-executable if the
   1726   // hardware and OS allow.  This is the single choke-point for allocations
   1727   // performed by the runtime and should not be bypassed (to extend this to
   1728   // inlined allocations, use the Heap::DisableInlineAllocation() support).
   1729   MUST_USE_RESULT inline AllocationResult AllocateRaw(
   1730       int size_in_bytes,
   1731       AllocationSpace space,
   1732       AllocationSpace retry_space);
   1733 
   1734   // Allocates a heap object based on the map.
   1735   MUST_USE_RESULT AllocationResult Allocate(
   1736       Map* map,
   1737       AllocationSpace space,
   1738       AllocationSite* allocation_site = NULL);
   1739 
   1740   // Allocates a partial map for bootstrapping.
   1741   MUST_USE_RESULT AllocationResult AllocatePartialMap(
   1742       InstanceType instance_type,
   1743       int instance_size);
   1744 
   1745   // Initializes a JSObject based on its map.
   1746   void InitializeJSObjectFromMap(JSObject* obj,
   1747                                  FixedArray* properties,
   1748                                  Map* map);
   1749   void InitializeAllocationMemento(AllocationMemento* memento,
   1750                                    AllocationSite* allocation_site);
   1751 
   1752   // Allocate a block of memory in the given space (filled with a filler).
   1753   // Used as a fall-back for generated code when the space is full.
   1754   MUST_USE_RESULT AllocationResult AllocateFillerObject(int size,
   1755                                                     bool double_align,
   1756                                                     AllocationSpace space);
   1757 
   1758   // Allocate an uninitialized fixed array.
   1759   MUST_USE_RESULT AllocationResult AllocateRawFixedArray(
   1760       int length, PretenureFlag pretenure);
   1761 
   1762   // Allocate an uninitialized fixed double array.
   1763   MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray(
   1764       int length, PretenureFlag pretenure);
   1765 
   1766   // Allocate an initialized fixed array with the given filler value.
   1767   MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller(
   1768       int length, PretenureFlag pretenure, Object* filler);
   1769 
   1770   // Allocate and partially initializes a String.  There are two String
   1771   // encodings: ASCII and two byte.  These functions allocate a string of the
   1772   // given length and set its map and length fields.  The characters of the
   1773   // string are uninitialized.
   1774   MUST_USE_RESULT AllocationResult AllocateRawOneByteString(
   1775       int length, PretenureFlag pretenure);
   1776   MUST_USE_RESULT AllocationResult AllocateRawTwoByteString(
   1777       int length, PretenureFlag pretenure);
   1778 
   1779   bool CreateInitialMaps();
   1780   void CreateInitialObjects();
   1781 
   1782   // Allocates an internalized string in old space based on the character
   1783   // stream.
   1784   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
   1785       Vector<const char> str,
   1786       int chars,
   1787       uint32_t hash_field);
   1788 
   1789   MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
   1790         Vector<const uint8_t> str,
   1791         uint32_t hash_field);
   1792 
   1793   MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
   1794         Vector<const uc16> str,
   1795         uint32_t hash_field);
   1796 
   1797   template<bool is_one_byte, typename T>
   1798   MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(
   1799       T t, int chars, uint32_t hash_field);
   1800 
   1801   template<typename T>
   1802   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
   1803       T t, int chars, uint32_t hash_field);
   1804 
   1805   // Allocates an uninitialized fixed array. It must be filled by the caller.
   1806   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
   1807 
   1808   // Make a copy of src and return it. Returns
   1809   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
   1810   MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
   1811 
   1812   // Make a copy of src, set the map, and return the copy. Returns
   1813   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
   1814   MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src,
   1815                                                          Map* map);
   1816 
   1817   // Make a copy of src and return it. Returns
   1818   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
   1819   MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
   1820       FixedDoubleArray* src);
   1821 
   1822   // Make a copy of src and return it. Returns
   1823   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
   1824   MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
   1825       ConstantPoolArray* src);
   1826 
   1827 
   1828   // Computes a single character string where the character has code.
   1829   // A cache is used for ASCII codes.
   1830   MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode(
   1831       uint16_t code);
   1832 
   1833   // Allocate a symbol in old space.
   1834   MUST_USE_RESULT AllocationResult AllocateSymbol();
   1835 
   1836   // Make a copy of src, set the map, and return the copy.
   1837   MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(
   1838       ConstantPoolArray* src, Map* map);
   1839 
   1840   MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
   1841       const ConstantPoolArray::NumberOfEntries& small);
   1842 
   1843   MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
   1844       const ConstantPoolArray::NumberOfEntries& small,
   1845       const ConstantPoolArray::NumberOfEntries& extended);
   1846 
   1847   // Allocates an external array of the specified length and type.
   1848   MUST_USE_RESULT AllocationResult AllocateExternalArray(
   1849       int length,
   1850       ExternalArrayType array_type,
   1851       void* external_pointer,
   1852       PretenureFlag pretenure);
   1853 
   1854   // Allocates a fixed typed array of the specified length and type.
   1855   MUST_USE_RESULT AllocationResult AllocateFixedTypedArray(
   1856       int length,
   1857       ExternalArrayType array_type,
   1858       PretenureFlag pretenure);
   1859 
   1860   // Make a copy of src and return it.
   1861   MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
   1862 
   1863   // Make a copy of src, set the map, and return the copy.
   1864   MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(
   1865       FixedDoubleArray* src, Map* map);
   1866 
   1867   // Allocates a fixed double array with uninitialized values. Returns
   1868   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
   1869       int length,
   1870       PretenureFlag pretenure = NOT_TENURED);
   1871 
   1872   // These five Create*EntryStub functions are here and forced to not be inlined
   1873   // because of a gcc-4.4 bug that assigns wrong vtable entries.
   1874   NO_INLINE(void CreateJSEntryStub());
   1875   NO_INLINE(void CreateJSConstructEntryStub());
   1876 
   1877   void CreateFixedStubs();
   1878 
   1879   // Allocate empty fixed array.
   1880   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
   1881 
   1882   // Allocate empty external array of given type.
   1883   MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray(
   1884       ExternalArrayType array_type);
   1885 
   1886   // Allocate empty fixed typed array of given type.
   1887   MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(
   1888       ExternalArrayType array_type);
   1889 
   1890   // Allocate empty constant pool array.
   1891   MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
   1892 
   1893   // Allocate a tenured simple cell.
   1894   MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
   1895 
   1896   // Allocate a tenured JS global property cell initialized with the hole.
   1897   MUST_USE_RESULT AllocationResult AllocatePropertyCell();
   1898 
   1899   // Allocates a new utility object in the old generation.
   1900   MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
   1901 
   1902   // Allocates a new foreign object.
   1903   MUST_USE_RESULT AllocationResult AllocateForeign(
   1904       Address address, PretenureFlag pretenure = NOT_TENURED);
   1905 
   1906   MUST_USE_RESULT AllocationResult AllocateCode(int object_size,
   1907                                                 bool immovable);
   1908 
   1909   MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
   1910 
   1911   MUST_USE_RESULT AllocationResult InternalizeString(String* str);
   1912 
   1913   // Performs a minor collection in new generation.
   1914   void Scavenge();
   1915 
   1916   // Commits from space if it is uncommitted.
   1917   void EnsureFromSpaceIsCommitted();
   1918 
   1919   // Uncommit unused semi space.
   1920   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
   1921 
   1922   // Fill in bogus values in from space
   1923   void ZapFromSpace();
   1924 
   1925   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
   1926       Heap* heap,
   1927       Object** pointer);
   1928 
   1929   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
   1930   static void ScavengeStoreBufferCallback(Heap* heap,
   1931                                           MemoryChunk* page,
   1932                                           StoreBufferEvent event);
   1933 
   1934   // Performs a major collection in the whole heap.
   1935   void MarkCompact(GCTracer* tracer);
   1936 
   1937   // Code to be run before and after mark-compact.
   1938   void MarkCompactPrologue();
   1939 
   1940   void ProcessNativeContexts(WeakObjectRetainer* retainer);
   1941   void ProcessArrayBuffers(WeakObjectRetainer* retainer);
   1942   void ProcessAllocationSites(WeakObjectRetainer* retainer);
   1943 
   1944   // Deopts all code that contains allocation instruction which are tenured or
   1945   // not tenured. Moreover it clears the pretenuring allocation site statistics.
   1946   void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
   1947 
   1948   // Evaluates local pretenuring for the old space and calls
   1949   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
   1950   // the old space.
   1951   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
   1952 
   1953   // Called on heap tear-down.
   1954   void TearDownArrayBuffers();
   1955 
   1956   // Record statistics before and after garbage collection.
   1957   void ReportStatisticsBeforeGC();
   1958   void ReportStatisticsAfterGC();
   1959 
   1960   // Slow part of scavenge object.
   1961   static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
   1962 
   1963   // Total RegExp code ever generated
   1964   double total_regexp_code_generated_;
   1965 
   1966   GCTracer* tracer_;
   1967 
   1968   // Creates and installs the full-sized number string cache.
   1969   int FullSizeNumberStringCacheLength();
   1970   // Flush the number to string cache.
   1971   void FlushNumberStringCache();
   1972 
   1973   // Sets used allocation sites entries to undefined.
   1974   void FlushAllocationSitesScratchpad();
   1975 
   1976   // Initializes the allocation sites scratchpad with undefined values.
   1977   void InitializeAllocationSitesScratchpad();
   1978 
   1979   // Adds an allocation site to the scratchpad if there is space left.
   1980   void AddAllocationSiteToScratchpad(AllocationSite* site,
   1981                                      ScratchpadSlotMode mode);
   1982 
   1983   void UpdateSurvivalStatistics(int start_new_space_size);
   1984 
   1985   static const int kYoungSurvivalRateHighThreshold = 90;
   1986   static const int kYoungSurvivalRateAllowedDeviation = 15;
   1987 
   1988   static const int kOldSurvivalRateLowThreshold = 10;
   1989 
   1990   int high_survival_rate_period_length_;
   1991   intptr_t promoted_objects_size_;
   1992   double promotion_rate_;
   1993   intptr_t semi_space_copied_object_size_;
   1994   double semi_space_copied_rate_;
   1995 
   1996   // This is the pretenuring trigger for allocation sites that are in maybe
   1997   // tenure state. When we switched to the maximum new space size we deoptimize
   1998   // the code that belongs to the allocation site and derive the lifetime
   1999   // of the allocation site.
   2000   unsigned int maximum_size_scavenges_;
   2001 
   2002   // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
   2003   // Re-visit incremental marking heuristics.
   2004   bool IsHighSurvivalRate() {
   2005     return high_survival_rate_period_length_ > 0;
   2006   }
   2007 
   2008   void SelectScavengingVisitorsTable();
   2009 
   2010   void StartIdleRound() {
   2011     mark_sweeps_since_idle_round_started_ = 0;
   2012   }
   2013 
   2014   void FinishIdleRound() {
   2015     mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
   2016     scavenges_since_last_idle_round_ = 0;
   2017   }
   2018 
   2019   bool EnoughGarbageSinceLastIdleRound() {
   2020     return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
   2021   }
   2022 
   2023   // Estimates how many milliseconds a Mark-Sweep would take to complete.
   2024   // In idle notification handler we assume that this function will return:
   2025   // - a number less than 10 for small heaps, which are less than 8Mb.
   2026   // - a number greater than 10 for large heaps, which are greater than 32Mb.
   2027   int TimeMarkSweepWouldTakeInMs() {
   2028     // Rough estimate of how many megabytes of heap can be processed in 1 ms.
   2029     static const int kMbPerMs = 2;
   2030 
   2031     int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
   2032     return heap_size_mb / kMbPerMs;
   2033   }
   2034 
   2035   // Returns true if no more GC work is left.
   2036   bool IdleGlobalGC();
   2037 
   2038   void AdvanceIdleIncrementalMarking(intptr_t step_size);
   2039 
   2040   void ClearObjectStats(bool clear_last_time_stats = false);
   2041 
   2042   void set_weak_object_to_code_table(Object* value) {
   2043     ASSERT(!InNewSpace(value));
   2044     weak_object_to_code_table_ = value;
   2045   }
   2046 
   2047   Object** weak_object_to_code_table_address() {
   2048     return &weak_object_to_code_table_;
   2049   }
   2050 
   2051   static const int kInitialStringTableSize = 2048;
   2052   static const int kInitialEvalCacheSize = 64;
   2053   static const int kInitialNumberStringCacheSize = 256;
   2054 
   2055   // Object counts and used memory by InstanceType
   2056   size_t object_counts_[OBJECT_STATS_COUNT];
   2057   size_t object_counts_last_time_[OBJECT_STATS_COUNT];
   2058   size_t object_sizes_[OBJECT_STATS_COUNT];
   2059   size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
   2060 
   2061   // Maximum GC pause.
   2062   double max_gc_pause_;
   2063 
   2064   // Total time spent in GC.
   2065   double total_gc_time_ms_;
   2066 
   2067   // Maximum size of objects alive after GC.
   2068   intptr_t max_alive_after_gc_;
   2069 
   2070   // Minimal interval between two subsequent collections.
   2071   double min_in_mutator_;
   2072 
   2073   // Size of objects alive after last GC.
   2074   intptr_t alive_after_last_gc_;
   2075 
   2076   double last_gc_end_timestamp_;
   2077 
   2078   // Cumulative GC time spent in marking
   2079   double marking_time_;
   2080 
   2081   // Cumulative GC time spent in sweeping
   2082   double sweeping_time_;
   2083 
   2084   MarkCompactCollector mark_compact_collector_;
   2085 
   2086   StoreBuffer store_buffer_;
   2087 
   2088   Marking marking_;
   2089 
   2090   IncrementalMarking incremental_marking_;
   2091 
   2092   int number_idle_notifications_;
   2093   unsigned int last_idle_notification_gc_count_;
   2094   bool last_idle_notification_gc_count_init_;
   2095 
   2096   int mark_sweeps_since_idle_round_started_;
   2097   unsigned int gc_count_at_last_idle_gc_;
   2098   int scavenges_since_last_idle_round_;
   2099 
   2100   // These two counters are monotomically increasing and never reset.
   2101   size_t full_codegen_bytes_generated_;
   2102   size_t crankshaft_codegen_bytes_generated_;
   2103 
   2104   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
   2105   // this variable holds the number of garbage collections since the last
   2106   // deoptimization triggered by garbage collection.
   2107   int gcs_since_last_deopt_;
   2108 
   2109 #ifdef VERIFY_HEAP
   2110   int no_weak_object_verification_scope_depth_;
   2111 #endif
   2112 
   2113   static const int kAllocationSiteScratchpadSize = 256;
   2114   int allocation_sites_scratchpad_length_;
   2115 
   2116   static const int kMaxMarkSweepsInIdleRound = 7;
   2117   static const int kIdleScavengeThreshold = 5;
   2118 
   2119   // Shared state read by the scavenge collector and set by ScavengeObject.
   2120   PromotionQueue promotion_queue_;
   2121 
   2122   // Flag is set when the heap has been configured.  The heap can be repeatedly
   2123   // configured through the API until it is set up.
   2124   bool configured_;
   2125 
   2126   ExternalStringTable external_string_table_;
   2127 
   2128   VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
   2129 
   2130   MemoryChunk* chunks_queued_for_free_;
   2131 
   2132   Mutex relocation_mutex_;
   2133 
   2134   int gc_callbacks_depth_;
   2135 
   2136   friend class AlwaysAllocateScope;
   2137   friend class Factory;
   2138   friend class GCCallbacksScope;
   2139   friend class GCTracer;
   2140   friend class HeapIterator;
   2141   friend class Isolate;
   2142   friend class MarkCompactCollector;
   2143   friend class MarkCompactMarkingVisitor;
   2144   friend class MapCompact;
   2145 #ifdef VERIFY_HEAP
   2146   friend class NoWeakObjectVerificationScope;
   2147 #endif
   2148   friend class Page;
   2149 
   2150   DISALLOW_COPY_AND_ASSIGN(Heap);
   2151 };
   2152 
   2153 
   2154 class HeapStats {
   2155  public:
   2156   static const int kStartMarker = 0xDECADE00;
   2157   static const int kEndMarker = 0xDECADE01;
   2158 
   2159   int* start_marker;                    //  0
   2160   int* new_space_size;                  //  1
   2161   int* new_space_capacity;              //  2
   2162   intptr_t* old_pointer_space_size;          //  3
   2163   intptr_t* old_pointer_space_capacity;      //  4
   2164   intptr_t* old_data_space_size;             //  5
   2165   intptr_t* old_data_space_capacity;         //  6
   2166   intptr_t* code_space_size;                 //  7
   2167   intptr_t* code_space_capacity;             //  8
   2168   intptr_t* map_space_size;                  //  9
   2169   intptr_t* map_space_capacity;              // 10
   2170   intptr_t* cell_space_size;                 // 11
   2171   intptr_t* cell_space_capacity;             // 12
   2172   intptr_t* lo_space_size;                   // 13
   2173   int* global_handle_count;             // 14
   2174   int* weak_global_handle_count;        // 15
   2175   int* pending_global_handle_count;     // 16
   2176   int* near_death_global_handle_count;  // 17
   2177   int* free_global_handle_count;        // 18
   2178   intptr_t* memory_allocator_size;           // 19
   2179   intptr_t* memory_allocator_capacity;       // 20
   2180   int* objects_per_type;                // 21
   2181   int* size_per_type;                   // 22
   2182   int* os_error;                        // 23
   2183   int* end_marker;                      // 24
   2184   intptr_t* property_cell_space_size;   // 25
   2185   intptr_t* property_cell_space_capacity;    // 26
   2186 };
   2187 
   2188 
   2189 class AlwaysAllocateScope {
   2190  public:
   2191   explicit inline AlwaysAllocateScope(Isolate* isolate);
   2192   inline ~AlwaysAllocateScope();
   2193 
   2194  private:
   2195   // Implicitly disable artificial allocation failures.
   2196   Heap* heap_;
   2197   DisallowAllocationFailure daf_;
   2198 };
   2199 
   2200 
   2201 #ifdef VERIFY_HEAP
   2202 class NoWeakObjectVerificationScope {
   2203  public:
   2204   inline NoWeakObjectVerificationScope();
   2205   inline ~NoWeakObjectVerificationScope();
   2206 };
   2207 #endif
   2208 
   2209 
   2210 class GCCallbacksScope {
   2211  public:
   2212   explicit inline GCCallbacksScope(Heap* heap);
   2213   inline ~GCCallbacksScope();
   2214 
   2215   inline bool CheckReenter();
   2216 
   2217  private:
   2218   Heap* heap_;
   2219 };
   2220 
   2221 
   2222 // Visitor class to verify interior pointers in spaces that do not contain
   2223 // or care about intergenerational references. All heap object pointers have to
   2224 // point into the heap to a location that has a map pointer at its first word.
   2225 // Caveat: Heap::Contains is an approximation because it can return true for
   2226 // objects in a heap space but above the allocation pointer.
   2227 class VerifyPointersVisitor: public ObjectVisitor {
   2228  public:
   2229   inline void VisitPointers(Object** start, Object** end);
   2230 };
   2231 
   2232 
   2233 // Verify that all objects are Smis.
   2234 class VerifySmisVisitor: public ObjectVisitor {
   2235  public:
   2236   inline void VisitPointers(Object** start, Object** end);
   2237 };
   2238 
   2239 
   2240 // Space iterator for iterating over all spaces of the heap.  Returns each space
   2241 // in turn, and null when it is done.
   2242 class AllSpaces BASE_EMBEDDED {
   2243  public:
   2244   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
   2245   Space* next();
   2246  private:
   2247   Heap* heap_;
   2248   int counter_;
   2249 };
   2250 
   2251 
   2252 // Space iterator for iterating over all old spaces of the heap: Old pointer
   2253 // space, old data space and code space.  Returns each space in turn, and null
   2254 // when it is done.
   2255 class OldSpaces BASE_EMBEDDED {
   2256  public:
   2257   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   2258   OldSpace* next();
   2259  private:
   2260   Heap* heap_;
   2261   int counter_;
   2262 };
   2263 
   2264 
   2265 // Space iterator for iterating over all the paged spaces of the heap: Map
   2266 // space, old pointer space, old data space, code space and cell space.  Returns
   2267 // each space in turn, and null when it is done.
   2268 class PagedSpaces BASE_EMBEDDED {
   2269  public:
   2270   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   2271   PagedSpace* next();
   2272  private:
   2273   Heap* heap_;
   2274   int counter_;
   2275 };
   2276 
   2277 
   2278 // Space iterator for iterating over all spaces of the heap.
   2279 // For each space an object iterator is provided. The deallocation of the
   2280 // returned object iterators is handled by the space iterator.
   2281 class SpaceIterator : public Malloced {
   2282  public:
   2283   explicit SpaceIterator(Heap* heap);
   2284   SpaceIterator(Heap* heap, HeapObjectCallback size_func);
   2285   virtual ~SpaceIterator();
   2286 
   2287   bool has_next();
   2288   ObjectIterator* next();
   2289 
   2290  private:
   2291   ObjectIterator* CreateIterator();
   2292 
   2293   Heap* heap_;
   2294   int current_space_;  // from enum AllocationSpace.
   2295   ObjectIterator* iterator_;  // object iterator for the current space.
   2296   HeapObjectCallback size_func_;
   2297 };
   2298 
   2299 
   2300 // A HeapIterator provides iteration over the whole heap. It
   2301 // aggregates the specific iterators for the different spaces as
   2302 // these can only iterate over one space only.
   2303 //
   2304 // HeapIterator ensures there is no allocation during its lifetime
   2305 // (using an embedded DisallowHeapAllocation instance).
   2306 //
   2307 // HeapIterator can skip free list nodes (that is, de-allocated heap
   2308 // objects that still remain in the heap). As implementation of free
   2309 // nodes filtering uses GC marks, it can't be used during MS/MC GC
   2310 // phases. Also, it is forbidden to interrupt iteration in this mode,
   2311 // as this will leave heap objects marked (and thus, unusable).
   2312 class HeapObjectsFilter;
   2313 
   2314 class HeapIterator BASE_EMBEDDED {
   2315  public:
   2316   enum HeapObjectsFiltering {
   2317     kNoFiltering,
   2318     kFilterUnreachable
   2319   };
   2320 
   2321   explicit HeapIterator(Heap* heap);
   2322   HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
   2323   ~HeapIterator();
   2324 
   2325   HeapObject* next();
   2326   void reset();
   2327 
   2328  private:
   2329   struct MakeHeapIterableHelper {
   2330     explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
   2331   };
   2332 
   2333   // Perform the initialization.
   2334   void Init();
   2335   // Perform all necessary shutdown (destruction) work.
   2336   void Shutdown();
   2337   HeapObject* NextObject();
   2338 
   2339   MakeHeapIterableHelper make_heap_iterable_helper_;
   2340   DisallowHeapAllocation no_heap_allocation_;
   2341   Heap* heap_;
   2342   HeapObjectsFiltering filtering_;
   2343   HeapObjectsFilter* filter_;
   2344   // Space iterator for iterating all the spaces.
   2345   SpaceIterator* space_iterator_;
   2346   // Object iterator for the space currently being iterated.
   2347   ObjectIterator* object_iterator_;
   2348 };
   2349 
   2350 
   2351 // Cache for mapping (map, property name) into field offset.
   2352 // Cleared at startup and prior to mark sweep collection.
   2353 class KeyedLookupCache {
   2354  public:
   2355   // Lookup field offset for (map, name). If absent, -1 is returned.
   2356   int Lookup(Handle<Map> map, Handle<Name> name);
   2357 
   2358   // Update an element in the cache.
   2359   void Update(Handle<Map> map, Handle<Name> name, int field_offset);
   2360 
   2361   // Clear the cache.
   2362   void Clear();
   2363 
   2364   static const int kLength = 256;
   2365   static const int kCapacityMask = kLength - 1;
   2366   static const int kMapHashShift = 5;
   2367   static const int kHashMask = -4;  // Zero the last two bits.
   2368   static const int kEntriesPerBucket = 4;
   2369   static const int kEntryLength = 2;
   2370   static const int kMapIndex = 0;
   2371   static const int kKeyIndex = 1;
   2372   static const int kNotFound = -1;
   2373 
   2374   // kEntriesPerBucket should be a power of 2.
   2375   STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
   2376   STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
   2377 
   2378  private:
   2379   KeyedLookupCache() {
   2380     for (int i = 0; i < kLength; ++i) {
   2381       keys_[i].map = NULL;
   2382       keys_[i].name = NULL;
   2383       field_offsets_[i] = kNotFound;
   2384     }
   2385   }
   2386 
   2387   static inline int Hash(Handle<Map> map, Handle<Name> name);
   2388 
   2389   // Get the address of the keys and field_offsets arrays.  Used in
   2390   // generated code to perform cache lookups.
   2391   Address keys_address() {
   2392     return reinterpret_cast<Address>(&keys_);
   2393   }
   2394 
   2395   Address field_offsets_address() {
   2396     return reinterpret_cast<Address>(&field_offsets_);
   2397   }
   2398 
   2399   struct Key {
   2400     Map* map;
   2401     Name* name;
   2402   };
   2403 
   2404   Key keys_[kLength];
   2405   int field_offsets_[kLength];
   2406 
   2407   friend class ExternalReference;
   2408   friend class Isolate;
   2409   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
   2410 };
   2411 
   2412 
   2413 // Cache for mapping (map, property name) into descriptor index.
   2414 // The cache contains both positive and negative results.
   2415 // Descriptor index equals kNotFound means the property is absent.
   2416 // Cleared at startup and prior to any gc.
   2417 class DescriptorLookupCache {
   2418  public:
   2419   // Lookup descriptor index for (map, name).
   2420   // If absent, kAbsent is returned.
   2421   int Lookup(Map* source, Name* name) {
   2422     if (!name->IsUniqueName()) return kAbsent;
   2423     int index = Hash(source, name);
   2424     Key& key = keys_[index];
   2425     if ((key.source == source) && (key.name == name)) return results_[index];
   2426     return kAbsent;
   2427   }
   2428 
   2429   // Update an element in the cache.
   2430   void Update(Map* source, Name* name, int result) {
   2431     ASSERT(result != kAbsent);
   2432     if (name->IsUniqueName()) {
   2433       int index = Hash(source, name);
   2434       Key& key = keys_[index];
   2435       key.source = source;
   2436       key.name = name;
   2437       results_[index] = result;
   2438     }
   2439   }
   2440 
   2441   // Clear the cache.
   2442   void Clear();
   2443 
   2444   static const int kAbsent = -2;
   2445 
   2446  private:
   2447   DescriptorLookupCache() {
   2448     for (int i = 0; i < kLength; ++i) {
   2449       keys_[i].source = NULL;
   2450       keys_[i].name = NULL;
   2451       results_[i] = kAbsent;
   2452     }
   2453   }
   2454 
   2455   static int Hash(Object* source, Name* name) {
   2456     // Uses only lower 32 bits if pointers are larger.
   2457     uint32_t source_hash =
   2458         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
   2459             >> kPointerSizeLog2;
   2460     uint32_t name_hash =
   2461         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
   2462             >> kPointerSizeLog2;
   2463     return (source_hash ^ name_hash) % kLength;
   2464   }
   2465 
   2466   static const int kLength = 64;
   2467   struct Key {
   2468     Map* source;
   2469     Name* name;
   2470   };
   2471 
   2472   Key keys_[kLength];
   2473   int results_[kLength];
   2474 
   2475   friend class Isolate;
   2476   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
   2477 };
   2478 
   2479 
   2480 // GCTracer collects and prints ONE line after each garbage collector
   2481 // invocation IFF --trace_gc is used.
   2482 
   2483 class GCTracer BASE_EMBEDDED {
   2484  public:
   2485   class Scope BASE_EMBEDDED {
   2486    public:
   2487     enum ScopeId {
   2488       EXTERNAL,
   2489       MC_MARK,
   2490       MC_SWEEP,
   2491       MC_SWEEP_NEWSPACE,
   2492       MC_SWEEP_OLDSPACE,
   2493       MC_EVACUATE_PAGES,
   2494       MC_UPDATE_NEW_TO_NEW_POINTERS,
   2495       MC_UPDATE_ROOT_TO_NEW_POINTERS,
   2496       MC_UPDATE_OLD_TO_NEW_POINTERS,
   2497       MC_UPDATE_POINTERS_TO_EVACUATED,
   2498       MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
   2499       MC_UPDATE_MISC_POINTERS,
   2500       MC_WEAKCOLLECTION_PROCESS,
   2501       MC_WEAKCOLLECTION_CLEAR,
   2502       MC_FLUSH_CODE,
   2503       kNumberOfScopes
   2504     };
   2505 
   2506     Scope(GCTracer* tracer, ScopeId scope)
   2507         : tracer_(tracer),
   2508         scope_(scope) {
   2509       start_time_ = OS::TimeCurrentMillis();
   2510     }
   2511 
   2512     ~Scope() {
   2513       ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
   2514       tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
   2515     }
   2516 
   2517    private:
   2518     GCTracer* tracer_;
   2519     ScopeId scope_;
   2520     double start_time_;
   2521   };
   2522 
   2523   explicit GCTracer(Heap* heap,
   2524                     const char* gc_reason,
   2525                     const char* collector_reason);
   2526   ~GCTracer();
   2527 
   2528   // Sets the collector.
   2529   void set_collector(GarbageCollector collector) { collector_ = collector; }
   2530 
   2531   // Sets the GC count.
   2532   void set_gc_count(unsigned int count) { gc_count_ = count; }
   2533 
   2534   // Sets the full GC count.
   2535   void set_full_gc_count(int count) { full_gc_count_ = count; }
   2536 
   2537   void increment_nodes_died_in_new_space() {
   2538     nodes_died_in_new_space_++;
   2539   }
   2540 
   2541   void increment_nodes_copied_in_new_space() {
   2542     nodes_copied_in_new_space_++;
   2543   }
   2544 
   2545   void increment_nodes_promoted() {
   2546     nodes_promoted_++;
   2547   }
   2548 
   2549  private:
   2550   // Returns a string matching the collector.
   2551   const char* CollectorString();
   2552 
   2553   // Returns size of object in heap (in MB).
   2554   inline double SizeOfHeapObjects();
   2555 
   2556   // Timestamp set in the constructor.
   2557   double start_time_;
   2558 
   2559   // Size of objects in heap set in constructor.
   2560   intptr_t start_object_size_;
   2561 
   2562   // Size of memory allocated from OS set in constructor.
   2563   intptr_t start_memory_size_;
   2564 
   2565   // Type of collector.
   2566   GarbageCollector collector_;
   2567 
   2568   // A count (including this one, e.g. the first collection is 1) of the
   2569   // number of garbage collections.
   2570   unsigned int gc_count_;
   2571 
   2572   // A count (including this one) of the number of full garbage collections.
   2573   int full_gc_count_;
   2574 
   2575   // Amounts of time spent in different scopes during GC.
   2576   double scopes_[Scope::kNumberOfScopes];
   2577 
   2578   // Total amount of space either wasted or contained in one of free lists
   2579   // before the current GC.
   2580   intptr_t in_free_list_or_wasted_before_gc_;
   2581 
   2582   // Difference between space used in the heap at the beginning of the current
   2583   // collection and the end of the previous collection.
   2584   intptr_t allocated_since_last_gc_;
   2585 
   2586   // Amount of time spent in mutator that is time elapsed between end of the
   2587   // previous collection and the beginning of the current one.
   2588   double spent_in_mutator_;
   2589 
   2590   // Number of died nodes in the new space.
   2591   int nodes_died_in_new_space_;
   2592 
   2593   // Number of copied nodes to the new space.
   2594   int nodes_copied_in_new_space_;
   2595 
   2596   // Number of promoted nodes to the old space.
   2597   int nodes_promoted_;
   2598 
   2599   // Incremental marking steps counters.
   2600   int steps_count_;
   2601   double steps_took_;
   2602   double longest_step_;
   2603   int steps_count_since_last_gc_;
   2604   double steps_took_since_last_gc_;
   2605 
   2606   Heap* heap_;
   2607 
   2608   const char* gc_reason_;
   2609   const char* collector_reason_;
   2610 };
   2611 
   2612 
   2613 class RegExpResultsCache {
   2614  public:
   2615   enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
   2616 
   2617   // Attempt to retrieve a cached result.  On failure, 0 is returned as a Smi.
   2618   // On success, the returned result is guaranteed to be a COW-array.
   2619   static Object* Lookup(Heap* heap,
   2620                         String* key_string,
   2621                         Object* key_pattern,
   2622                         ResultsCacheType type);
   2623   // Attempt to add value_array to the cache specified by type.  On success,
   2624   // value_array is turned into a COW-array.
   2625   static void Enter(Isolate* isolate,
   2626                     Handle<String> key_string,
   2627                     Handle<Object> key_pattern,
   2628                     Handle<FixedArray> value_array,
   2629                     ResultsCacheType type);
   2630   static void Clear(FixedArray* cache);
   2631   static const int kRegExpResultsCacheSize = 0x100;
   2632 
   2633  private:
   2634   static const int kArrayEntriesPerCacheEntry = 4;
   2635   static const int kStringOffset = 0;
   2636   static const int kPatternOffset = 1;
   2637   static const int kArrayOffset = 2;
   2638 };
   2639 
   2640 
   2641 // Abstract base class for checking whether a weak object should be retained.
   2642 class WeakObjectRetainer {
   2643  public:
   2644   virtual ~WeakObjectRetainer() {}
   2645 
   2646   // Return whether this object should be retained. If NULL is returned the
   2647   // object has no references. Otherwise the address of the retained object
   2648   // should be returned as in some GC situations the object has been moved.
   2649   virtual Object* RetainAs(Object* object) = 0;
   2650 };
   2651 
   2652 
   2653 // Intrusive object marking uses least significant bit of
   2654 // heap object's map word to mark objects.
   2655 // Normally all map words have least significant bit set
   2656 // because they contain tagged map pointer.
   2657 // If the bit is not set object is marked.
   2658 // All objects should be unmarked before resuming
   2659 // JavaScript execution.
   2660 class IntrusiveMarking {
   2661  public:
   2662   static bool IsMarked(HeapObject* object) {
   2663     return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
   2664   }
   2665 
   2666   static void ClearMark(HeapObject* object) {
   2667     uintptr_t map_word = object->map_word().ToRawValue();
   2668     object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
   2669     ASSERT(!IsMarked(object));
   2670   }
   2671 
   2672   static void SetMark(HeapObject* object) {
   2673     uintptr_t map_word = object->map_word().ToRawValue();
   2674     object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
   2675     ASSERT(IsMarked(object));
   2676   }
   2677 
   2678   static Map* MapOfMarkedObject(HeapObject* object) {
   2679     uintptr_t map_word = object->map_word().ToRawValue();
   2680     return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
   2681   }
   2682 
   2683   static int SizeOfMarkedObject(HeapObject* object) {
   2684     return object->SizeFromMap(MapOfMarkedObject(object));
   2685   }
   2686 
   2687  private:
   2688   static const uintptr_t kNotMarkedBit = 0x1;
   2689   STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);  // NOLINT
   2690 };
   2691 
   2692 
   2693 #ifdef DEBUG
   2694 // Helper class for tracing paths to a search target Object from all roots.
   2695 // The TracePathFrom() method can be used to trace paths from a specific
   2696 // object to the search target object.
   2697 class PathTracer : public ObjectVisitor {
   2698  public:
   2699   enum WhatToFind {
   2700     FIND_ALL,   // Will find all matches.
   2701     FIND_FIRST  // Will stop the search after first match.
   2702   };
   2703 
   2704   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
   2705   static const int kMarkTag = 2;
   2706 
   2707   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
   2708   // after the first match.  If FIND_ALL is specified, then tracing will be
   2709   // done for all matches.
   2710   PathTracer(Object* search_target,
   2711              WhatToFind what_to_find,
   2712              VisitMode visit_mode)
   2713       : search_target_(search_target),
   2714         found_target_(false),
   2715         found_target_in_trace_(false),
   2716         what_to_find_(what_to_find),
   2717         visit_mode_(visit_mode),
   2718         object_stack_(20),
   2719         no_allocation() {}
   2720 
   2721   virtual void VisitPointers(Object** start, Object** end);
   2722 
   2723   void Reset();
   2724   void TracePathFrom(Object** root);
   2725 
   2726   bool found() const { return found_target_; }
   2727 
   2728   static Object* const kAnyGlobalObject;
   2729 
   2730  protected:
   2731   class MarkVisitor;
   2732   class UnmarkVisitor;
   2733 
   2734   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
   2735   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
   2736   virtual void ProcessResults();
   2737 
   2738   Object* search_target_;
   2739   bool found_target_;
   2740   bool found_target_in_trace_;
   2741   WhatToFind what_to_find_;
   2742   VisitMode visit_mode_;
   2743   List<Object*> object_stack_;
   2744 
   2745   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
   2746 
   2747  private:
   2748   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
   2749 };
   2750 #endif  // DEBUG
   2751 
   2752 } }  // namespace v8::internal
   2753 
   2754 #endif  // V8_HEAP_H_
   2755