Home | History | Annotate | Download | only in heap
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_HEAP_H_
      6 #define V8_HEAP_HEAP_H_
      7 
      8 #include <cmath>
      9 #include <map>
     10 
     11 // Clients of this interface shouldn't depend on lots of heap internals.
     12 // Do not include anything from src/heap here!
     13 #include "include/v8.h"
     14 #include "src/allocation.h"
     15 #include "src/assert-scope.h"
     16 #include "src/base/atomic-utils.h"
     17 #include "src/globals.h"
     18 #include "src/heap-symbols.h"
     19 // TODO(mstarzinger): Two more includes to kill!
     20 #include "src/heap/spaces.h"
     21 #include "src/heap/store-buffer.h"
     22 #include "src/list.h"
     23 
     24 namespace v8 {
     25 namespace internal {
     26 
     27 using v8::MemoryPressureLevel;
     28 
     29 // Defines all the roots in Heap.
     30 #define STRONG_ROOT_LIST(V)                                                    \
     31   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
     32   /* The first 32 entries are most often used in the startup snapshot and   */ \
     33   /* can use a shorter representation in the serialization format.          */ \
     34   V(Map, free_space_map, FreeSpaceMap)                                         \
     35   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
     36   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
     37   V(Oddball, uninitialized_value, UninitializedValue)                          \
     38   V(Oddball, undefined_value, UndefinedValue)                                  \
     39   V(Oddball, the_hole_value, TheHoleValue)                                     \
     40   V(Oddball, null_value, NullValue)                                            \
     41   V(Oddball, true_value, TrueValue)                                            \
     42   V(Oddball, false_value, FalseValue)                                          \
     43   V(String, empty_string, empty_string)                                        \
     44   V(Map, meta_map, MetaMap)                                                    \
     45   V(Map, byte_array_map, ByteArrayMap)                                         \
     46   V(Map, fixed_array_map, FixedArrayMap)                                       \
     47   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
     48   V(Map, hash_table_map, HashTableMap)                                         \
     49   V(Map, symbol_map, SymbolMap)                                                \
     50   V(Map, one_byte_string_map, OneByteStringMap)                                \
     51   V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
     52   V(Map, scope_info_map, ScopeInfoMap)                                         \
     53   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
     54   V(Map, code_map, CodeMap)                                                    \
     55   V(Map, function_context_map, FunctionContextMap)                             \
     56   V(Map, cell_map, CellMap)                                                    \
     57   V(Map, weak_cell_map, WeakCellMap)                                           \
     58   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
     59   V(Map, foreign_map, ForeignMap)                                              \
     60   V(Map, heap_number_map, HeapNumberMap)                                       \
     61   V(Map, transition_array_map, TransitionArrayMap)                             \
     62   V(FixedArray, empty_literals_array, EmptyLiteralsArray)                      \
     63   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
     64   V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap)           \
     65   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
     66   /* Entries beyond the first 32                                            */ \
     67   /* The roots above this line should be boring from a GC point of view.    */ \
     68   /* This means they are never in new space and never on a page that is     */ \
     69   /* being compacted.                                                       */ \
     70   /* Oddballs */                                                               \
     71   V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
     72   V(Oddball, arguments_marker, ArgumentsMarker)                                \
     73   V(Oddball, exception, Exception)                                             \
     74   V(Oddball, termination_exception, TerminationException)                      \
     75   V(Oddball, optimized_out, OptimizedOut)                                      \
     76   V(Oddball, stale_register, StaleRegister)                                    \
     77   /* Context maps */                                                           \
     78   V(Map, native_context_map, NativeContextMap)                                 \
     79   V(Map, module_context_map, ModuleContextMap)                                 \
     80   V(Map, script_context_map, ScriptContextMap)                                 \
     81   V(Map, block_context_map, BlockContextMap)                                   \
     82   V(Map, catch_context_map, CatchContextMap)                                   \
     83   V(Map, with_context_map, WithContextMap)                                     \
     84   V(Map, debug_evaluate_context_map, DebugEvaluateContextMap)                  \
     85   V(Map, script_context_table_map, ScriptContextTableMap)                      \
     86   /* Maps */                                                                   \
     87   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
     88   V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
     89   V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
     90   V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
     91   V(Map, message_object_map, JSMessageObjectMap)                               \
     92   V(Map, neander_map, NeanderMap)                                              \
     93   V(Map, external_map, ExternalMap)                                            \
     94   V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
     95   /* String maps */                                                            \
     96   V(Map, native_source_string_map, NativeSourceStringMap)                      \
     97   V(Map, string_map, StringMap)                                                \
     98   V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
     99   V(Map, cons_string_map, ConsStringMap)                                       \
    100   V(Map, sliced_string_map, SlicedStringMap)                                   \
    101   V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
    102   V(Map, external_string_map, ExternalStringMap)                               \
    103   V(Map, external_string_with_one_byte_data_map,                               \
    104     ExternalStringWithOneByteDataMap)                                          \
    105   V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
    106   V(Map, short_external_string_map, ShortExternalStringMap)                    \
    107   V(Map, short_external_string_with_one_byte_data_map,                         \
    108     ShortExternalStringWithOneByteDataMap)                                     \
    109   V(Map, internalized_string_map, InternalizedStringMap)                       \
    110   V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
    111   V(Map, external_internalized_string_with_one_byte_data_map,                  \
    112     ExternalInternalizedStringWithOneByteDataMap)                              \
    113   V(Map, external_one_byte_internalized_string_map,                            \
    114     ExternalOneByteInternalizedStringMap)                                      \
    115   V(Map, short_external_internalized_string_map,                               \
    116     ShortExternalInternalizedStringMap)                                        \
    117   V(Map, short_external_internalized_string_with_one_byte_data_map,            \
    118     ShortExternalInternalizedStringWithOneByteDataMap)                         \
    119   V(Map, short_external_one_byte_internalized_string_map,                      \
    120     ShortExternalOneByteInternalizedStringMap)                                 \
    121   V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
    122   /* Array element maps */                                                     \
    123   V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
    124   V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
    125   V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
    126   V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
    127   V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
    128   V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
    129   V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
    130   V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
    131   V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
    132   V(Map, float32x4_map, Float32x4Map)                                          \
    133   V(Map, int32x4_map, Int32x4Map)                                              \
    134   V(Map, uint32x4_map, Uint32x4Map)                                            \
    135   V(Map, bool32x4_map, Bool32x4Map)                                            \
    136   V(Map, int16x8_map, Int16x8Map)                                              \
    137   V(Map, uint16x8_map, Uint16x8Map)                                            \
    138   V(Map, bool16x8_map, Bool16x8Map)                                            \
    139   V(Map, int8x16_map, Int8x16Map)                                              \
    140   V(Map, uint8x16_map, Uint8x16Map)                                            \
    141   V(Map, bool8x16_map, Bool8x16Map)                                            \
    142   /* Canonical empty values */                                                 \
    143   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
    144   V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
    145   V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
    146   V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
    147   V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
    148   V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
    149   V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
    150   V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
    151   V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
    152   V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
    153     EmptyFixedUint8ClampedArray)                                               \
    154   V(Script, empty_script, EmptyScript)                                         \
    155   V(Cell, undefined_cell, UndefinedCell)                                       \
    156   V(FixedArray, empty_sloppy_arguments_elements, EmptySloppyArgumentsElements) \
    157   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
    158     EmptySlowElementDictionary)                                                \
    159   V(TypeFeedbackVector, dummy_vector, DummyVector)                             \
    160   V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
    161   V(WeakCell, empty_weak_cell, EmptyWeakCell)                                  \
    162   /* Protectors */                                                             \
    163   V(PropertyCell, array_protector, ArrayProtector)                             \
    164   V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector)         \
    165   V(PropertyCell, has_instance_protector, HasInstanceProtector)                \
    166   V(Cell, species_protector, SpeciesProtector)                                 \
    167   /* Special numbers */                                                        \
    168   V(HeapNumber, nan_value, NanValue)                                           \
    169   V(HeapNumber, infinity_value, InfinityValue)                                 \
    170   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
    171   V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
    172   /* Caches */                                                                 \
    173   V(FixedArray, number_string_cache, NumberStringCache)                        \
    174   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
    175   V(FixedArray, string_split_cache, StringSplitCache)                          \
    176   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
    177   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
    178   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
    179   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
    180   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
    181   V(FixedArray, experimental_natives_source_cache,                             \
    182     ExperimentalNativesSourceCache)                                            \
    183   V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache)           \
    184   V(FixedArray, experimental_extra_natives_source_cache,                       \
    185     ExperimentalExtraNativesSourceCache)                                       \
    186   /* Lists and dictionaries */                                                 \
    187   V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
    188   V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary)    \
    189   V(Object, symbol_registry, SymbolRegistry)                                   \
    190   V(Object, script_list, ScriptList)                                           \
    191   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
    192   V(FixedArray, materialized_objects, MaterializedObjects)                     \
    193   V(FixedArray, microtask_queue, MicrotaskQueue)                               \
    194   V(FixedArray, detached_contexts, DetachedContexts)                           \
    195   V(ArrayList, retained_maps, RetainedMaps)                                    \
    196   V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
    197   V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
    198   V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos)       \
    199   V(FixedArray, serialized_templates, SerializedTemplates)                     \
    200   /* Configured values */                                                      \
    201   V(JSObject, message_listeners, MessageListeners)                             \
    202   V(Code, js_entry_code, JsEntryCode)                                          \
    203   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
    204   /* Oddball maps */                                                           \
    205   V(Map, undefined_map, UndefinedMap)                                          \
    206   V(Map, the_hole_map, TheHoleMap)                                             \
    207   V(Map, null_map, NullMap)                                                    \
    208   V(Map, boolean_map, BooleanMap)                                              \
    209   V(Map, uninitialized_map, UninitializedMap)                                  \
    210   V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
    211   V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
    212   V(Map, exception_map, ExceptionMap)                                          \
    213   V(Map, termination_exception_map, TerminationExceptionMap)                   \
    214   V(Map, optimized_out_map, OptimizedOutMap)                                   \
    215   V(Map, stale_register_map, StaleRegisterMap)
    216 
    217 // Entries in this list are limited to Smis and are not visited during GC.
    218 #define SMI_ROOT_LIST(V)                                                       \
    219   V(Smi, stack_limit, StackLimit)                                              \
    220   V(Smi, real_stack_limit, RealStackLimit)                                     \
    221   V(Smi, last_script_id, LastScriptId)                                         \
    222   V(Smi, hash_seed, HashSeed)                                                  \
    223   /* To distinguish the function templates, so that we can find them in the */ \
    224   /* function cache of the native context. */                                  \
    225   V(Smi, next_template_serial_number, NextTemplateSerialNumber)                \
    226   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
    227   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
    228   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
    229   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
    230   V(Smi, interpreter_entry_return_pc_offset, InterpreterEntryReturnPCOffset)
    231 
    232 #define ROOT_LIST(V)  \
    233   STRONG_ROOT_LIST(V) \
    234   SMI_ROOT_LIST(V)    \
    235   V(StringTable, string_table, StringTable)
    236 
    237 
    238 // Heap roots that are known to be immortal immovable, for which we can safely
    239 // skip write barriers. This list is not complete and has omissions.
    240 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
    241   V(ByteArrayMap)                       \
    242   V(BytecodeArrayMap)                   \
    243   V(FreeSpaceMap)                       \
    244   V(OnePointerFillerMap)                \
    245   V(TwoPointerFillerMap)                \
    246   V(UndefinedValue)                     \
    247   V(TheHoleValue)                       \
    248   V(NullValue)                          \
    249   V(TrueValue)                          \
    250   V(FalseValue)                         \
    251   V(UninitializedValue)                 \
    252   V(CellMap)                            \
    253   V(GlobalPropertyCellMap)              \
    254   V(SharedFunctionInfoMap)              \
    255   V(MetaMap)                            \
    256   V(HeapNumberMap)                      \
    257   V(MutableHeapNumberMap)               \
    258   V(Float32x4Map)                       \
    259   V(Int32x4Map)                         \
    260   V(Uint32x4Map)                        \
    261   V(Bool32x4Map)                        \
    262   V(Int16x8Map)                         \
    263   V(Uint16x8Map)                        \
    264   V(Bool16x8Map)                        \
    265   V(Int8x16Map)                         \
    266   V(Uint8x16Map)                        \
    267   V(Bool8x16Map)                        \
    268   V(NativeContextMap)                   \
    269   V(FixedArrayMap)                      \
    270   V(CodeMap)                            \
    271   V(ScopeInfoMap)                       \
    272   V(FixedCOWArrayMap)                   \
    273   V(FixedDoubleArrayMap)                \
    274   V(WeakCellMap)                        \
    275   V(TransitionArrayMap)                 \
    276   V(NoInterceptorResultSentinel)        \
    277   V(HashTableMap)                       \
    278   V(OrderedHashTableMap)                \
    279   V(EmptyFixedArray)                    \
    280   V(EmptyByteArray)                     \
    281   V(EmptyDescriptorArray)               \
    282   V(ArgumentsMarker)                    \
    283   V(SymbolMap)                          \
    284   V(SloppyArgumentsElementsMap)         \
    285   V(FunctionContextMap)                 \
    286   V(CatchContextMap)                    \
    287   V(WithContextMap)                     \
    288   V(BlockContextMap)                    \
    289   V(ModuleContextMap)                   \
    290   V(ScriptContextMap)                   \
    291   V(UndefinedMap)                       \
    292   V(TheHoleMap)                         \
    293   V(NullMap)                            \
    294   V(BooleanMap)                         \
    295   V(UninitializedMap)                   \
    296   V(ArgumentsMarkerMap)                 \
    297   V(JSMessageObjectMap)                 \
    298   V(ForeignMap)                         \
    299   V(NeanderMap)                         \
    300   V(NanValue)                           \
    301   V(InfinityValue)                      \
    302   V(MinusZeroValue)                     \
    303   V(MinusInfinityValue)                 \
    304   V(EmptyWeakCell)                      \
    305   V(empty_string)                       \
    306   PRIVATE_SYMBOL_LIST(V)
    307 
    308 // Forward declarations.
    309 class AllocationObserver;
    310 class ArrayBufferTracker;
    311 class GCIdleTimeAction;
    312 class GCIdleTimeHandler;
    313 class GCIdleTimeHeapState;
    314 class GCTracer;
    315 class HeapObjectsFilter;
    316 class HeapStats;
    317 class HistogramTimer;
    318 class Isolate;
    319 class MemoryReducer;
    320 class ObjectStats;
    321 class Scavenger;
    322 class ScavengeJob;
    323 class WeakObjectRetainer;
    324 
    325 enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION };
    326 
    327 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
    328 
    329 // A queue of objects promoted during scavenge. Each object is accompanied
    330 // by it's size to avoid dereferencing a map pointer for scanning.
    331 // The last page in to-space is used for the promotion queue. On conflict
    332 // during scavenge, the promotion queue is allocated externally and all
    333 // entries are copied to the external queue.
    334 class PromotionQueue {
    335  public:
    336   explicit PromotionQueue(Heap* heap)
    337       : front_(NULL),
    338         rear_(NULL),
    339         limit_(NULL),
    340         emergency_stack_(0),
    341         heap_(heap) {}
    342 
    343   void Initialize();
    344 
    345   void Destroy() {
    346     DCHECK(is_empty());
    347     delete emergency_stack_;
    348     emergency_stack_ = NULL;
    349   }
    350 
    351   Page* GetHeadPage() {
    352     return Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_));
    353   }
    354 
    355   void SetNewLimit(Address limit) {
    356     // If we are already using an emergency stack, we can ignore it.
    357     if (emergency_stack_) return;
    358 
    359     // If the limit is not on the same page, we can ignore it.
    360     if (Page::FromAllocationAreaAddress(limit) != GetHeadPage()) return;
    361 
    362     limit_ = reinterpret_cast<struct Entry*>(limit);
    363 
    364     if (limit_ <= rear_) {
    365       return;
    366     }
    367 
    368     RelocateQueueHead();
    369   }
    370 
    371   bool IsBelowPromotionQueue(Address to_space_top) {
    372     // If an emergency stack is used, the to-space address cannot interfere
    373     // with the promotion queue.
    374     if (emergency_stack_) return true;
    375 
    376     // If the given to-space top pointer and the head of the promotion queue
    377     // are not on the same page, then the to-space objects are below the
    378     // promotion queue.
    379     if (GetHeadPage() != Page::FromAddress(to_space_top)) {
    380       return true;
    381     }
    382     // If the to space top pointer is smaller or equal than the promotion
    383     // queue head, then the to-space objects are below the promotion queue.
    384     return reinterpret_cast<struct Entry*>(to_space_top) <= rear_;
    385   }
    386 
    387   bool is_empty() {
    388     return (front_ == rear_) &&
    389            (emergency_stack_ == NULL || emergency_stack_->length() == 0);
    390   }
    391 
    392   inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
    393 
    394   void remove(HeapObject** target, int32_t* size, bool* was_marked_black) {
    395     DCHECK(!is_empty());
    396     if (front_ == rear_) {
    397       Entry e = emergency_stack_->RemoveLast();
    398       *target = e.obj_;
    399       *size = e.size_;
    400       *was_marked_black = e.was_marked_black_;
    401       return;
    402     }
    403 
    404     struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
    405     *target = entry->obj_;
    406     *size = entry->size_;
    407     *was_marked_black = entry->was_marked_black_;
    408 
    409     // Assert no underflow.
    410     SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
    411                                 reinterpret_cast<Address>(front_));
    412   }
    413 
    414  private:
    415   struct Entry {
    416     Entry(HeapObject* obj, int32_t size, bool was_marked_black)
    417         : obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
    418 
    419     HeapObject* obj_;
    420     int32_t size_ : 31;
    421     bool was_marked_black_ : 1;
    422   };
    423 
    424   void RelocateQueueHead();
    425 
    426   // The front of the queue is higher in the memory page chain than the rear.
    427   struct Entry* front_;
    428   struct Entry* rear_;
    429   struct Entry* limit_;
    430 
    431   List<Entry>* emergency_stack_;
    432 
    433   Heap* heap_;
    434 
    435   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
    436 };
    437 
    438 
    439 enum ArrayStorageAllocationMode {
    440   DONT_INITIALIZE_ARRAY_ELEMENTS,
    441   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
    442 };
    443 
    444 enum class ClearRecordedSlots { kYes, kNo };
    445 
    446 class Heap {
    447  public:
    448   // Declare all the root indices.  This defines the root list order.
    449   enum RootListIndex {
    450 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    451     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
    452 #undef ROOT_INDEX_DECLARATION
    453 
    454 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
    455         INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
    456 #undef STRING_DECLARATION
    457 
    458 #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
    459             PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    460 #undef SYMBOL_INDEX_DECLARATION
    461 
    462 #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
    463                 PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    464                     WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    465 #undef SYMBOL_INDEX_DECLARATION
    466 
    467 // Utility type maps
    468 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
    469                         STRUCT_LIST(DECLARE_STRUCT_MAP)
    470 #undef DECLARE_STRUCT_MAP
    471                             kStringTableRootIndex,
    472 
    473 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    474     SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
    475 #undef ROOT_INDEX_DECLARATION
    476         kRootListLength,
    477     kStrongRootListLength = kStringTableRootIndex,
    478     kSmiRootsStart = kStringTableRootIndex + 1
    479   };
    480 
    481   enum FindMementoMode { kForRuntime, kForGC };
    482 
    483   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
    484 
    485   // Indicates whether live bytes adjustment is triggered
    486   // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
    487   // - or from within GC (CONCURRENT_TO_SWEEPER),
    488   // - or mutator code (CONCURRENT_TO_SWEEPER).
    489   enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
    490 
    491   enum UpdateAllocationSiteMode { kGlobal, kCached };
    492 
    493   // Taking this lock prevents the GC from entering a phase that relocates
    494   // object references.
    495   class RelocationLock {
    496    public:
    497     explicit RelocationLock(Heap* heap) : heap_(heap) {
    498       heap_->relocation_mutex_.Lock();
    499     }
    500 
    501     ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
    502 
    503    private:
    504     Heap* heap_;
    505   };
    506 
    507   // Support for partial snapshots.  After calling this we have a linear
    508   // space to write objects in each space.
    509   struct Chunk {
    510     uint32_t size;
    511     Address start;
    512     Address end;
    513   };
    514   typedef List<Chunk> Reservation;
    515 
    516   static const intptr_t kMinimumOldGenerationAllocationLimit =
    517       8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
    518 
    519   static const int kInitalOldGenerationLimitFactor = 2;
    520 
    521 #if V8_OS_ANDROID
    522   // Don't apply pointer multiplier on Android since it has no swap space and
    523   // should instead adapt it's heap size based on available physical memory.
    524   static const int kPointerMultiplier = 1;
    525 #else
    526   static const int kPointerMultiplier = i::kPointerSize / 4;
    527 #endif
    528 
    529   // The new space size has to be a power of 2. Sizes are in MB.
    530   static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
    531   static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
    532   static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
    533   static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
    534 
    535   // The old space size has to be a multiple of Page::kPageSize.
    536   // Sizes are in MB.
    537   static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
    538   static const int kMaxOldSpaceSizeMediumMemoryDevice =
    539       256 * kPointerMultiplier;
    540   static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
    541   static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
    542 
    543   // The executable size has to be a multiple of Page::kPageSize.
    544   // Sizes are in MB.
    545   static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
    546   static const int kMaxExecutableSizeMediumMemoryDevice =
    547       192 * kPointerMultiplier;
    548   static const int kMaxExecutableSizeHighMemoryDevice =
    549       256 * kPointerMultiplier;
    550   static const int kMaxExecutableSizeHugeMemoryDevice =
    551       256 * kPointerMultiplier;
    552 
    553   static const int kTraceRingBufferSize = 512;
    554   static const int kStacktraceBufferSize = 512;
    555 
    556   static const double kMinHeapGrowingFactor;
    557   static const double kMaxHeapGrowingFactor;
    558   static const double kMaxHeapGrowingFactorMemoryConstrained;
    559   static const double kMaxHeapGrowingFactorIdle;
    560   static const double kTargetMutatorUtilization;
    561 
    562   static const int kNoGCFlags = 0;
    563   static const int kReduceMemoryFootprintMask = 1;
    564   static const int kAbortIncrementalMarkingMask = 2;
    565   static const int kFinalizeIncrementalMarkingMask = 4;
    566 
    567   // Making the heap iterable requires us to abort incremental marking.
    568   static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
    569 
    570   // The roots that have an index less than this are always in old space.
    571   static const int kOldSpaceRoots = 0x20;
    572 
    573   // The minimum size of a HeapObject on the heap.
    574   static const int kMinObjectSizeInWords = 2;
    575 
    576   STATIC_ASSERT(kUndefinedValueRootIndex ==
    577                 Internals::kUndefinedValueRootIndex);
    578   STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
    579   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
    580   STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
    581   STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
    582   STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
    583 
    584   // Calculates the maximum amount of filler that could be required by the
    585   // given alignment.
    586   static int GetMaximumFillToAlign(AllocationAlignment alignment);
    587   // Calculates the actual amount of filler required for a given address at the
    588   // given alignment.
    589   static int GetFillToAlign(Address address, AllocationAlignment alignment);
    590 
    591   template <typename T>
    592   static inline bool IsOneByte(T t, int chars);
    593 
    594   static void FatalProcessOutOfMemory(const char* location,
    595                                       bool is_heap_oom = false);
    596 
    597   static bool RootIsImmortalImmovable(int root_index);
    598 
    599   // Checks whether the space is valid.
    600   static bool IsValidAllocationSpace(AllocationSpace space);
    601 
    602   // Generated code can embed direct references to non-writable roots if
    603   // they are in new space.
    604   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
    605 
    606   // Zapping is needed for verify heap, and always done in debug builds.
    607   static inline bool ShouldZapGarbage() {
    608 #ifdef DEBUG
    609     return true;
    610 #else
    611 #ifdef VERIFY_HEAP
    612     return FLAG_verify_heap;
    613 #else
    614     return false;
    615 #endif
    616 #endif
    617   }
    618 
    619   static double HeapGrowingFactor(double gc_speed, double mutator_speed);
    620 
    621   // Copy block of memory from src to dst. Size of block should be aligned
    622   // by pointer size.
    623   static inline void CopyBlock(Address dst, Address src, int byte_size);
    624 
    625   // Determines a static visitor id based on the given {map} that can then be
    626   // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
    627   static int GetStaticVisitorIdForMap(Map* map);
    628 
    629   // We cannot avoid stale handles to left-trimmed objects, but can only make
    630   // sure all handles still needed are updated. Filter out a stale pointer
    631   // and clear the slot to allow post processing of handles (needed because
    632   // the sweeper might actually free the underlying page).
    633   inline bool PurgeLeftTrimmedObject(Object** object);
    634 
    635   // Notifies the heap that is ok to start marking or other activities that
    636   // should not happen during deserialization.
    637   void NotifyDeserializationComplete();
    638 
    639   intptr_t old_generation_allocation_limit() const {
    640     return old_generation_allocation_limit_;
    641   }
    642 
    643   bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
    644 
    645   Address* NewSpaceAllocationTopAddress() {
    646     return new_space_.allocation_top_address();
    647   }
    648   Address* NewSpaceAllocationLimitAddress() {
    649     return new_space_.allocation_limit_address();
    650   }
    651 
    652   Address* OldSpaceAllocationTopAddress() {
    653     return old_space_->allocation_top_address();
    654   }
    655   Address* OldSpaceAllocationLimitAddress() {
    656     return old_space_->allocation_limit_address();
    657   }
    658 
    659   bool CanExpandOldGeneration(int size) {
    660     if (force_oom_) return false;
    661     return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
    662   }
    663 
    664   // Clear the Instanceof cache (used when a prototype changes).
    665   inline void ClearInstanceofCache();
    666 
    667   // FreeSpace objects have a null map after deserialization. Update the map.
    668   void RepairFreeListsAfterDeserialization();
    669 
    670   // Move len elements within a given array from src_index index to dst_index
    671   // index.
    672   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
    673 
    674   // Initialize a filler object to keep the ability to iterate over the heap
    675   // when introducing gaps within pages. If slots could have been recorded in
    676   // the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
    677   // pass ClearRecordedSlots::kNo.
    678   void CreateFillerObjectAt(Address addr, int size, ClearRecordedSlots mode);
    679 
    680   bool CanMoveObjectStart(HeapObject* object);
    681 
    682   // Maintain consistency of live bytes during incremental marking.
    683   void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
    684 
    685   // Trim the given array from the left. Note that this relocates the object
    686   // start and hence is only valid if there is only a single reference to it.
    687   FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
    688 
    689   // Trim the given array from the right.
    690   template<Heap::InvocationMode mode>
    691   void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
    692 
    693   // Converts the given boolean condition to JavaScript boolean value.
    694   inline Oddball* ToBoolean(bool condition);
    695 
    696   // Check whether the heap is currently iterable.
    697   bool IsHeapIterable();
    698 
    699   // Notify the heap that a context has been disposed.
    700   int NotifyContextDisposed(bool dependant_context);
    701 
    702   void set_native_contexts_list(Object* object) {
    703     native_contexts_list_ = object;
    704   }
    705   Object* native_contexts_list() const { return native_contexts_list_; }
    706 
    707   void set_allocation_sites_list(Object* object) {
    708     allocation_sites_list_ = object;
    709   }
    710   Object* allocation_sites_list() { return allocation_sites_list_; }
    711 
    712   // Used in CreateAllocationSiteStub and the (de)serializer.
    713   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
    714 
    715   void set_encountered_weak_collections(Object* weak_collection) {
    716     encountered_weak_collections_ = weak_collection;
    717   }
    718   Object* encountered_weak_collections() const {
    719     return encountered_weak_collections_;
    720   }
    721 
    722   void set_encountered_weak_cells(Object* weak_cell) {
    723     encountered_weak_cells_ = weak_cell;
    724   }
    725   Object* encountered_weak_cells() const { return encountered_weak_cells_; }
    726 
    727   void set_encountered_transition_arrays(Object* transition_array) {
    728     encountered_transition_arrays_ = transition_array;
    729   }
    730   Object* encountered_transition_arrays() const {
    731     return encountered_transition_arrays_;
    732   }
    733 
    734   // Number of mark-sweeps.
    735   int ms_count() const { return ms_count_; }
    736 
    737   // Checks whether the given object is allowed to be migrated from it's
    738   // current space into the given destination space. Used for debugging.
    739   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
    740 
    741   void CheckHandleCount();
    742 
    743   // Number of "runtime allocations" done so far.
    744   uint32_t allocations_count() { return allocations_count_; }
    745 
    746   // Print short heap statistics.
    747   void PrintShortHeapStatistics();
    748 
    749   inline HeapState gc_state() { return gc_state_; }
    750 
    751   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
    752 
    753   // If an object has an AllocationMemento trailing it, return it, otherwise
    754   // return NULL;
    755   template <FindMementoMode mode>
    756   inline AllocationMemento* FindAllocationMemento(HeapObject* object);
    757 
    758   // Returns false if not able to reserve.
    759   bool ReserveSpace(Reservation* reservations);
    760 
    761   void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
    762 
    763   bool UsingEmbedderHeapTracer();
    764 
    765   void TracePossibleWrapper(JSObject* js_object);
    766 
    767   void RegisterExternallyReferencedObject(Object** object);
    768 
    769   //
    770   // Support for the API.
    771   //
    772 
    773   void CreateApiObjects();
    774 
    775   // Implements the corresponding V8 API function.
    776   bool IdleNotification(double deadline_in_seconds);
    777   bool IdleNotification(int idle_time_in_ms);
    778 
    779   void MemoryPressureNotification(MemoryPressureLevel level,
    780                                   bool is_isolate_locked);
    781   void CheckMemoryPressure();
    782 
    783   double MonotonicallyIncreasingTimeInMs();
    784 
    785   void RecordStats(HeapStats* stats, bool take_snapshot = false);
    786 
    787   // Check new space expansion criteria and expand semispaces if it was hit.
    788   void CheckNewSpaceExpansionCriteria();
    789 
    790   inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
    791     if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
    792 
    793     intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
    794 
    795     if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
    796 
    797     if (HighMemoryPressure()) return true;
    798 
    799     return false;
    800   }
    801 
    802   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
    803 
    804   // An object should be promoted if the object has survived a
    805   // scavenge operation.
    806   template <PromotionMode promotion_mode>
    807   inline bool ShouldBePromoted(Address old_address, int object_size);
    808 
    809   inline PromotionMode CurrentPromotionMode();
    810 
    811   void ClearNormalizedMapCaches();
    812 
    813   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
    814 
    815   inline bool OldGenerationAllocationLimitReached();
    816 
    817   // Completely clear the Instanceof cache (to stop it keeping objects alive
    818   // around a GC).
    819   inline void CompletelyClearInstanceofCache();
    820 
    821   inline uint32_t HashSeed();
    822 
    823   inline int NextScriptId();
    824 
    825   inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
    826   inline void SetConstructStubDeoptPCOffset(int pc_offset);
    827   inline void SetGetterStubDeoptPCOffset(int pc_offset);
    828   inline void SetSetterStubDeoptPCOffset(int pc_offset);
    829   inline void SetInterpreterEntryReturnPCOffset(int pc_offset);
    830   inline int GetNextTemplateSerialNumber();
    831 
    832   inline void SetSerializedTemplates(FixedArray* templates);
    833 
    834   // For post mortem debugging.
    835   void RememberUnmappedPage(Address page, bool compacted);
    836 
    837   // Global inline caching age: it is incremented on some GCs after context
    838   // disposal. We use it to flush inline caches.
    839   int global_ic_age() { return global_ic_age_; }
    840 
    841   void AgeInlineCaches() {
    842     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
    843   }
    844 
    845   int64_t external_memory() { return external_memory_; }
    846   void update_external_memory(int64_t delta) { external_memory_ += delta; }
    847 
    848   void update_external_memory_concurrently_freed(intptr_t freed) {
    849     external_memory_concurrently_freed_.Increment(freed);
    850   }
    851 
    852   void account_external_memory_concurrently_freed() {
    853     external_memory_ -= external_memory_concurrently_freed_.Value();
    854     external_memory_concurrently_freed_.SetValue(0);
    855   }
    856 
    857   void DeoptMarkedAllocationSites();
    858 
    859   bool DeoptMaybeTenuredAllocationSites() {
    860     return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
    861   }
    862 
    863   void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
    864                                      Handle<DependentCode> dep);
    865 
    866   DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
    867 
    868   void CompactWeakFixedArrays();
    869 
    870   void AddRetainedMap(Handle<Map> map);
    871 
    872   // This event is triggered after successful allocation of a new object made
    873   // by runtime. Allocations of target space for object evacuation do not
    874   // trigger the event. In order to track ALL allocations one must turn off
    875   // FLAG_inline_new and FLAG_use_allocation_folding.
    876   inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
    877 
    878   // This event is triggered after object is moved to a new place.
    879   inline void OnMoveEvent(HeapObject* target, HeapObject* source,
    880                           int size_in_bytes);
    881 
    882   bool deserialization_complete() const { return deserialization_complete_; }
    883 
    884   bool HasLowAllocationRate();
    885   bool HasHighFragmentation();
    886   bool HasHighFragmentation(intptr_t used, intptr_t committed);
    887 
    888   void SetOptimizeForLatency() { optimize_for_memory_usage_ = false; }
    889   void SetOptimizeForMemoryUsage();
    890   bool ShouldOptimizeForMemoryUsage() {
    891     return optimize_for_memory_usage_ || HighMemoryPressure();
    892   }
    893   bool HighMemoryPressure() {
    894     return memory_pressure_level_.Value() != MemoryPressureLevel::kNone;
    895   }
    896 
    897   // ===========================================================================
    898   // Initialization. ===========================================================
    899   // ===========================================================================
    900 
    901   // Configure heap size in MB before setup. Return false if the heap has been
    902   // set up already.
    903   bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
    904                      int max_executable_size, size_t code_range_size);
    905   bool ConfigureHeapDefault();
    906 
    907   // Prepares the heap, setting up memory areas that are needed in the isolate
    908   // without actually creating any objects.
    909   bool SetUp();
    910 
    911   // Bootstraps the object heap with the core set of objects required to run.
    912   // Returns whether it succeeded.
    913   bool CreateHeapObjects();
    914 
    915   // Destroys all memory allocated by the heap.
    916   void TearDown();
    917 
    918   // Returns whether SetUp has been called.
    919   bool HasBeenSetUp();
    920 
    921   // ===========================================================================
    922   // Getters for spaces. =======================================================
    923   // ===========================================================================
    924 
    925   Address NewSpaceTop() { return new_space_.top(); }
    926 
    927   NewSpace* new_space() { return &new_space_; }
    928   OldSpace* old_space() { return old_space_; }
    929   OldSpace* code_space() { return code_space_; }
    930   MapSpace* map_space() { return map_space_; }
    931   LargeObjectSpace* lo_space() { return lo_space_; }
    932 
    933   PagedSpace* paged_space(int idx) {
    934     switch (idx) {
    935       case OLD_SPACE:
    936         return old_space();
    937       case MAP_SPACE:
    938         return map_space();
    939       case CODE_SPACE:
    940         return code_space();
    941       case NEW_SPACE:
    942       case LO_SPACE:
    943         UNREACHABLE();
    944     }
    945     return NULL;
    946   }
    947 
    948   Space* space(int idx) {
    949     switch (idx) {
    950       case NEW_SPACE:
    951         return new_space();
    952       case LO_SPACE:
    953         return lo_space();
    954       default:
    955         return paged_space(idx);
    956     }
    957   }
    958 
    959   // Returns name of the space.
    960   const char* GetSpaceName(int idx);
    961 
    962   // ===========================================================================
    963   // Getters to other components. ==============================================
    964   // ===========================================================================
    965 
    966   GCTracer* tracer() { return tracer_; }
    967 
    968   MemoryAllocator* memory_allocator() { return memory_allocator_; }
    969 
    970   PromotionQueue* promotion_queue() { return &promotion_queue_; }
    971 
    972   inline Isolate* isolate();
    973 
    974   MarkCompactCollector* mark_compact_collector() {
    975     return mark_compact_collector_;
    976   }
    977 
    978   // ===========================================================================
    979   // Root set access. ==========================================================
    980   // ===========================================================================
    981 
    982   // Heap root getters.
    983 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
    984   ROOT_LIST(ROOT_ACCESSOR)
    985 #undef ROOT_ACCESSOR
    986 
    987   // Utility type maps.
    988 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
    989   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
    990 #undef STRUCT_MAP_ACCESSOR
    991 
    992 #define STRING_ACCESSOR(name, str) inline String* name();
    993   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
    994 #undef STRING_ACCESSOR
    995 
    996 #define SYMBOL_ACCESSOR(name) inline Symbol* name();
    997   PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
    998 #undef SYMBOL_ACCESSOR
    999 
   1000 #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
   1001   PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
   1002   WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
   1003 #undef SYMBOL_ACCESSOR
   1004 
   1005   Object* root(RootListIndex index) { return roots_[index]; }
   1006   Handle<Object> root_handle(RootListIndex index) {
   1007     return Handle<Object>(&roots_[index]);
   1008   }
   1009 
   1010   // Generated code can embed this address to get access to the roots.
   1011   Object** roots_array_start() { return roots_; }
   1012 
   1013   // Sets the stub_cache_ (only used when expanding the dictionary).
   1014   void SetRootCodeStubs(UnseededNumberDictionary* value) {
   1015     roots_[kCodeStubsRootIndex] = value;
   1016   }
   1017 
   1018   void SetRootMaterializedObjects(FixedArray* objects) {
   1019     roots_[kMaterializedObjectsRootIndex] = objects;
   1020   }
   1021 
   1022   void SetRootScriptList(Object* value) {
   1023     roots_[kScriptListRootIndex] = value;
   1024   }
   1025 
   1026   void SetRootStringTable(StringTable* value) {
   1027     roots_[kStringTableRootIndex] = value;
   1028   }
   1029 
   1030   void SetRootNoScriptSharedFunctionInfos(Object* value) {
   1031     roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
   1032   }
   1033 
   1034   // Set the stack limit in the roots_ array.  Some architectures generate
   1035   // code that looks here, because it is faster than loading from the static
   1036   // jslimit_/real_jslimit_ variable in the StackGuard.
   1037   void SetStackLimits();
   1038 
   1039   // The stack limit is thread-dependent. To be able to reproduce the same
   1040   // snapshot blob, we need to reset it before serializing.
   1041   void ClearStackLimits();
   1042 
   1043   // Generated code can treat direct references to this root as constant.
   1044   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
   1045 
   1046   Map* MapForFixedTypedArray(ExternalArrayType array_type);
   1047   RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
   1048 
   1049   RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
   1050   FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
   1051 
   1052   void RegisterStrongRoots(Object** start, Object** end);
   1053   void UnregisterStrongRoots(Object** start);
   1054 
   1055   // ===========================================================================
   1056   // Inline allocation. ========================================================
   1057   // ===========================================================================
   1058 
   1059   // Indicates whether inline bump-pointer allocation has been disabled.
   1060   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
   1061 
   1062   // Switch whether inline bump-pointer allocation should be used.
   1063   void EnableInlineAllocation();
   1064   void DisableInlineAllocation();
   1065 
   1066   // ===========================================================================
   1067   // Methods triggering GCs. ===================================================
   1068   // ===========================================================================
   1069 
   1070   // Performs garbage collection operation.
   1071   // Returns whether there is a chance that another major GC could
   1072   // collect more garbage.
   1073   inline bool CollectGarbage(
   1074       AllocationSpace space, const char* gc_reason = NULL,
   1075       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1076 
   1077   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
   1078   // non-zero, then the slower precise sweeper is used, which leaves the heap
   1079   // in a state where we can iterate over the heap visiting all objects.
   1080   void CollectAllGarbage(
   1081       int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
   1082       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1083 
   1084   // Last hope GC, should try to squeeze as much as possible.
   1085   void CollectAllAvailableGarbage(const char* gc_reason = NULL);
   1086 
   1087   // Reports and external memory pressure event, either performs a major GC or
   1088   // completes incremental marking in order to free external resources.
   1089   void ReportExternalMemoryPressure(const char* gc_reason = NULL);
   1090 
   1091   // Invoked when GC was requested via the stack guard.
   1092   void HandleGCRequest();
   1093 
   1094   // ===========================================================================
   1095   // Iterators. ================================================================
   1096   // ===========================================================================
   1097 
   1098   // Iterates over all roots in the heap.
   1099   void IterateRoots(ObjectVisitor* v, VisitMode mode);
   1100   // Iterates over all strong roots in the heap.
   1101   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
   1102   // Iterates over entries in the smi roots list.  Only interesting to the
   1103   // serializer/deserializer, since GC does not care about smis.
   1104   void IterateSmiRoots(ObjectVisitor* v);
   1105   // Iterates over all the other roots in the heap.
   1106   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
   1107 
   1108   // Iterate pointers of promoted objects.
   1109   void IteratePromotedObject(HeapObject* target, int size,
   1110                              bool was_marked_black,
   1111                              ObjectSlotCallback callback);
   1112 
   1113   void IteratePromotedObjectPointers(HeapObject* object, Address start,
   1114                                      Address end, bool record_slots,
   1115                                      ObjectSlotCallback callback);
   1116 
   1117   // ===========================================================================
   1118   // Store buffer API. =========================================================
   1119   // ===========================================================================
   1120 
   1121   // Write barrier support for object[offset] = o;
   1122   inline void RecordWrite(Object* object, int offset, Object* o);
   1123   inline void RecordFixedArrayElements(FixedArray* array, int offset,
   1124                                        int length);
   1125 
   1126   Address* store_buffer_top_address() { return store_buffer()->top_address(); }
   1127 
   1128   void ClearRecordedSlot(HeapObject* object, Object** slot);
   1129   void ClearRecordedSlotRange(Address start, Address end);
   1130 
   1131   // ===========================================================================
   1132   // Incremental marking API. ==================================================
   1133   // ===========================================================================
   1134 
   1135   // Start incremental marking and ensure that idle time handler can perform
   1136   // incremental steps.
   1137   void StartIdleIncrementalMarking();
   1138 
   1139   // Starts incremental marking assuming incremental marking is currently
   1140   // stopped.
   1141   void StartIncrementalMarking(int gc_flags = kNoGCFlags,
   1142                                const GCCallbackFlags gc_callback_flags =
   1143                                    GCCallbackFlags::kNoGCCallbackFlags,
   1144                                const char* reason = nullptr);
   1145 
   1146   void FinalizeIncrementalMarkingIfComplete(const char* comment);
   1147 
   1148   bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
   1149 
   1150   void RegisterReservationsForBlackAllocation(Reservation* reservations);
   1151 
   1152   IncrementalMarking* incremental_marking() { return incremental_marking_; }
   1153 
   1154   // ===========================================================================
   1155   // External string table API. ================================================
   1156   // ===========================================================================
   1157 
   1158   // Registers an external string.
   1159   inline void RegisterExternalString(String* string);
   1160 
   1161   // Finalizes an external string by deleting the associated external
   1162   // data and clearing the resource pointer.
   1163   inline void FinalizeExternalString(String* string);
   1164 
   1165   // ===========================================================================
   1166   // Methods checking/returning the space of a given object/address. ===========
   1167   // ===========================================================================
   1168 
   1169   // Returns whether the object resides in new space.
   1170   inline bool InNewSpace(Object* object);
   1171   inline bool InFromSpace(Object* object);
   1172   inline bool InToSpace(Object* object);
   1173 
   1174   // Returns whether the object resides in old space.
   1175   inline bool InOldSpace(Object* object);
   1176 
   1177   // Checks whether an address/object in the heap (including auxiliary
   1178   // area and unused area).
   1179   bool Contains(HeapObject* value);
   1180 
   1181   // Checks whether an address/object in a space.
   1182   // Currently used by tests, serialization and heap verification only.
   1183   bool InSpace(HeapObject* value, AllocationSpace space);
   1184 
   1185   // Slow methods that can be used for verification as they can also be used
   1186   // with off-heap Addresses.
   1187   bool ContainsSlow(Address addr);
   1188   bool InSpaceSlow(Address addr, AllocationSpace space);
   1189   inline bool InNewSpaceSlow(Address address);
   1190   inline bool InOldSpaceSlow(Address address);
   1191 
   1192   // ===========================================================================
   1193   // Object statistics tracking. ===============================================
   1194   // ===========================================================================
   1195 
   1196   // Returns the number of buckets used by object statistics tracking during a
   1197   // major GC. Note that the following methods fail gracefully when the bounds
   1198   // are exceeded though.
   1199   size_t NumberOfTrackedHeapObjectTypes();
   1200 
   1201   // Returns object statistics about count and size at the last major GC.
   1202   // Objects are being grouped into buckets that roughly resemble existing
   1203   // instance types.
   1204   size_t ObjectCountAtLastGC(size_t index);
   1205   size_t ObjectSizeAtLastGC(size_t index);
   1206 
   1207   // Retrieves names of buckets used by object statistics tracking.
   1208   bool GetObjectTypeName(size_t index, const char** object_type,
   1209                          const char** object_sub_type);
   1210 
   1211   // ===========================================================================
   1212   // Code statistics. ==========================================================
   1213   // ===========================================================================
   1214 
   1215   // Collect code (Code and BytecodeArray objects) statistics.
   1216   void CollectCodeStatistics();
   1217 
   1218   // ===========================================================================
   1219   // GC statistics. ============================================================
   1220   // ===========================================================================
   1221 
   1222   // Returns the maximum amount of memory reserved for the heap.
   1223   intptr_t MaxReserved() {
   1224     return 2 * max_semi_space_size_ + max_old_generation_size_;
   1225   }
   1226   int MaxSemiSpaceSize() { return max_semi_space_size_; }
   1227   int InitialSemiSpaceSize() { return initial_semispace_size_; }
   1228   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
   1229   intptr_t MaxExecutableSize() { return max_executable_size_; }
   1230 
   1231   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
   1232   // more spaces are needed until it reaches the limit.
   1233   intptr_t Capacity();
   1234 
   1235   // Returns the capacity of the old generation.
   1236   intptr_t OldGenerationCapacity();
   1237 
   1238   // Returns the amount of memory currently committed for the heap.
   1239   intptr_t CommittedMemory();
   1240 
   1241   // Returns the amount of memory currently committed for the old space.
   1242   intptr_t CommittedOldGenerationMemory();
   1243 
   1244   // Returns the amount of executable memory currently committed for the heap.
   1245   intptr_t CommittedMemoryExecutable();
   1246 
   1247   // Returns the amount of phyical memory currently committed for the heap.
   1248   size_t CommittedPhysicalMemory();
   1249 
   1250   // Returns the maximum amount of memory ever committed for the heap.
   1251   intptr_t MaximumCommittedMemory() { return maximum_committed_; }
   1252 
   1253   // Updates the maximum committed memory for the heap. Should be called
   1254   // whenever a space grows.
   1255   void UpdateMaximumCommitted();
   1256 
   1257   // Returns the available bytes in space w/o growing.
   1258   // Heap doesn't guarantee that it can allocate an object that requires
   1259   // all available bytes. Check MaxHeapObjectSize() instead.
   1260   intptr_t Available();
   1261 
   1262   // Returns of size of all objects residing in the heap.
   1263   intptr_t SizeOfObjects();
   1264 
   1265   void UpdateSurvivalStatistics(int start_new_space_size);
   1266 
   1267   inline void IncrementPromotedObjectsSize(intptr_t object_size) {
   1268     DCHECK_GE(object_size, 0);
   1269     promoted_objects_size_ += object_size;
   1270   }
   1271   inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
   1272 
   1273   inline void IncrementSemiSpaceCopiedObjectSize(intptr_t object_size) {
   1274     DCHECK_GE(object_size, 0);
   1275     semi_space_copied_object_size_ += object_size;
   1276   }
   1277   inline intptr_t semi_space_copied_object_size() {
   1278     return semi_space_copied_object_size_;
   1279   }
   1280 
   1281   inline intptr_t SurvivedNewSpaceObjectSize() {
   1282     return promoted_objects_size_ + semi_space_copied_object_size_;
   1283   }
   1284 
   1285   inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
   1286 
   1287   inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
   1288 
   1289   inline void IncrementNodesPromoted() { nodes_promoted_++; }
   1290 
   1291   inline void IncrementYoungSurvivorsCounter(intptr_t survived) {
   1292     DCHECK_GE(survived, 0);
   1293     survived_last_scavenge_ = survived;
   1294     survived_since_last_expansion_ += survived;
   1295   }
   1296 
   1297   inline intptr_t PromotedTotalSize() {
   1298     int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
   1299     if (total > std::numeric_limits<intptr_t>::max()) {
   1300       // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
   1301       return std::numeric_limits<intptr_t>::max();
   1302     }
   1303     if (total < 0) return 0;
   1304     return static_cast<intptr_t>(total);
   1305   }
   1306 
   1307   void UpdateNewSpaceAllocationCounter() {
   1308     new_space_allocation_counter_ = NewSpaceAllocationCounter();
   1309   }
   1310 
   1311   size_t NewSpaceAllocationCounter() {
   1312     return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
   1313   }
   1314 
   1315   // This should be used only for testing.
   1316   void set_new_space_allocation_counter(size_t new_value) {
   1317     new_space_allocation_counter_ = new_value;
   1318   }
   1319 
   1320   void UpdateOldGenerationAllocationCounter() {
   1321     old_generation_allocation_counter_ = OldGenerationAllocationCounter();
   1322   }
   1323 
   1324   size_t OldGenerationAllocationCounter() {
   1325     return old_generation_allocation_counter_ + PromotedSinceLastGC();
   1326   }
   1327 
   1328   // This should be used only for testing.
   1329   void set_old_generation_allocation_counter(size_t new_value) {
   1330     old_generation_allocation_counter_ = new_value;
   1331   }
   1332 
   1333   size_t PromotedSinceLastGC() {
   1334     return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
   1335   }
   1336 
   1337   int gc_count() const { return gc_count_; }
   1338 
   1339   // Returns the size of objects residing in non new spaces.
   1340   intptr_t PromotedSpaceSizeOfObjects();
   1341 
   1342   double total_regexp_code_generated() { return total_regexp_code_generated_; }
   1343   void IncreaseTotalRegexpCodeGenerated(int size) {
   1344     total_regexp_code_generated_ += size;
   1345   }
   1346 
   1347   void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
   1348     if (is_crankshafted) {
   1349       crankshaft_codegen_bytes_generated_ += size;
   1350     } else {
   1351       full_codegen_bytes_generated_ += size;
   1352     }
   1353   }
   1354 
   1355   // ===========================================================================
   1356   // Prologue/epilogue callback methods.========================================
   1357   // ===========================================================================
   1358 
   1359   void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
   1360                              GCType gc_type_filter, bool pass_isolate = true);
   1361   void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
   1362 
   1363   void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
   1364                              GCType gc_type_filter, bool pass_isolate = true);
   1365   void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
   1366 
   1367   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1368   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1369 
   1370   // ===========================================================================
   1371   // Allocation methods. =======================================================
   1372   // ===========================================================================
   1373 
   1374   // Creates a filler object and returns a heap object immediately after it.
   1375   MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
   1376                                                 int filler_size);
   1377 
   1378   // Creates a filler object if needed for alignment and returns a heap object
   1379   // immediately after it. If any space is left after the returned object,
   1380   // another filler object is created so the over allocated memory is iterable.
   1381   MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
   1382                                               int object_size,
   1383                                               int allocation_size,
   1384                                               AllocationAlignment alignment);
   1385 
   1386   // ===========================================================================
   1387   // ArrayBuffer tracking. =====================================================
   1388   // ===========================================================================
   1389 
   1390   void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
   1391   void UnregisterArrayBuffer(JSArrayBuffer* buffer);
   1392 
   1393   // ===========================================================================
   1394   // Allocation site tracking. =================================================
   1395   // ===========================================================================
   1396 
   1397   // Updates the AllocationSite of a given {object}. If the global prenuring
   1398   // storage is passed as {pretenuring_feedback} the memento found count on
   1399   // the corresponding allocation site is immediately updated and an entry
   1400   // in the hash map is created. Otherwise the entry (including a the count
   1401   // value) is cached on the local pretenuring feedback.
   1402   template <UpdateAllocationSiteMode mode>
   1403   inline void UpdateAllocationSite(HeapObject* object,
   1404                                    base::HashMap* pretenuring_feedback);
   1405 
   1406   // Removes an entry from the global pretenuring storage.
   1407   inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
   1408 
   1409   // Merges local pretenuring feedback into the global one. Note that this
   1410   // method needs to be called after evacuation, as allocation sites may be
   1411   // evacuated and this method resolves forward pointers accordingly.
   1412   void MergeAllocationSitePretenuringFeedback(
   1413       const base::HashMap& local_pretenuring_feedback);
   1414 
   1415 // =============================================================================
   1416 
   1417 #ifdef VERIFY_HEAP
   1418   // Verify the heap is in its normal state before or after a GC.
   1419   void Verify();
   1420 #endif
   1421 
   1422 #ifdef DEBUG
   1423   void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
   1424 
   1425   void TracePathToObjectFrom(Object* target, Object* root);
   1426   void TracePathToObject(Object* target);
   1427   void TracePathToGlobal();
   1428 
   1429   void Print();
   1430   void PrintHandles();
   1431 
   1432   // Report heap statistics.
   1433   void ReportHeapStatistics(const char* title);
   1434   void ReportCodeStatistics(const char* title);
   1435 #endif
   1436 
   1437  private:
   1438   class PretenuringScope;
   1439 
   1440   // External strings table is a place where all external strings are
   1441   // registered.  We need to keep track of such strings to properly
   1442   // finalize them.
   1443   class ExternalStringTable {
   1444    public:
   1445     // Registers an external string.
   1446     inline void AddString(String* string);
   1447 
   1448     inline void Iterate(ObjectVisitor* v);
   1449 
   1450     // Restores internal invariant and gets rid of collected strings.
   1451     // Must be called after each Iterate() that modified the strings.
   1452     void CleanUp();
   1453 
   1454     // Destroys all allocated memory.
   1455     void TearDown();
   1456 
   1457    private:
   1458     explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
   1459 
   1460     inline void Verify();
   1461 
   1462     inline void AddOldString(String* string);
   1463 
   1464     // Notifies the table that only a prefix of the new list is valid.
   1465     inline void ShrinkNewStrings(int position);
   1466 
   1467     // To speed up scavenge collections new space string are kept
   1468     // separate from old space strings.
   1469     List<Object*> new_space_strings_;
   1470     List<Object*> old_space_strings_;
   1471 
   1472     Heap* heap_;
   1473 
   1474     friend class Heap;
   1475 
   1476     DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
   1477   };
   1478 
   1479   struct StrongRootsList;
   1480 
   1481   struct StringTypeTable {
   1482     InstanceType type;
   1483     int size;
   1484     RootListIndex index;
   1485   };
   1486 
   1487   struct ConstantStringTable {
   1488     const char* contents;
   1489     RootListIndex index;
   1490   };
   1491 
   1492   struct StructTable {
   1493     InstanceType type;
   1494     int size;
   1495     RootListIndex index;
   1496   };
   1497 
   1498   struct GCCallbackPair {
   1499     GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
   1500                    bool pass_isolate)
   1501         : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
   1502 
   1503     bool operator==(const GCCallbackPair& other) const {
   1504       return other.callback == callback;
   1505     }
   1506 
   1507     v8::Isolate::GCCallback callback;
   1508     GCType gc_type;
   1509     bool pass_isolate;
   1510   };
   1511 
   1512   typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
   1513                                                         Object** pointer);
   1514 
   1515   static const int kInitialStringTableSize = 2048;
   1516   static const int kInitialEvalCacheSize = 64;
   1517   static const int kInitialNumberStringCacheSize = 256;
   1518 
   1519   static const int kRememberedUnmappedPages = 128;
   1520 
   1521   static const StringTypeTable string_type_table[];
   1522   static const ConstantStringTable constant_string_table[];
   1523   static const StructTable struct_table[];
   1524 
   1525   static const int kYoungSurvivalRateHighThreshold = 90;
   1526   static const int kYoungSurvivalRateAllowedDeviation = 15;
   1527   static const int kOldSurvivalRateLowThreshold = 10;
   1528 
   1529   static const int kMaxMarkCompactsInIdleRound = 7;
   1530   static const int kIdleScavengeThreshold = 5;
   1531 
   1532   static const int kInitialFeedbackCapacity = 256;
   1533 
   1534   Heap();
   1535 
   1536   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
   1537       Heap* heap, Object** pointer);
   1538 
   1539   // Selects the proper allocation space based on the pretenuring decision.
   1540   static AllocationSpace SelectSpace(PretenureFlag pretenure) {
   1541     return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
   1542   }
   1543 
   1544 #define ROOT_ACCESSOR(type, name, camel_name) \
   1545   inline void set_##name(type* value);
   1546   ROOT_LIST(ROOT_ACCESSOR)
   1547 #undef ROOT_ACCESSOR
   1548 
   1549   StoreBuffer* store_buffer() { return &store_buffer_; }
   1550 
   1551   void set_current_gc_flags(int flags) {
   1552     current_gc_flags_ = flags;
   1553     DCHECK(!ShouldFinalizeIncrementalMarking() ||
   1554            !ShouldAbortIncrementalMarking());
   1555   }
   1556 
   1557   inline bool ShouldReduceMemory() const {
   1558     return current_gc_flags_ & kReduceMemoryFootprintMask;
   1559   }
   1560 
   1561   inline bool ShouldAbortIncrementalMarking() const {
   1562     return current_gc_flags_ & kAbortIncrementalMarkingMask;
   1563   }
   1564 
   1565   inline bool ShouldFinalizeIncrementalMarking() const {
   1566     return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
   1567   }
   1568 
   1569   void PreprocessStackTraces();
   1570 
   1571   // Checks whether a global GC is necessary
   1572   GarbageCollector SelectGarbageCollector(AllocationSpace space,
   1573                                           const char** reason);
   1574 
   1575   // Make sure there is a filler value behind the top of the new space
   1576   // so that the GC does not confuse some unintialized/stale memory
   1577   // with the allocation memento of the object at the top
   1578   void EnsureFillerObjectAtTop();
   1579 
   1580   // Ensure that we have swept all spaces in such a way that we can iterate
   1581   // over all objects.  May cause a GC.
   1582   void MakeHeapIterable();
   1583 
   1584   // Performs garbage collection operation.
   1585   // Returns whether there is a chance that another major GC could
   1586   // collect more garbage.
   1587   bool CollectGarbage(
   1588       GarbageCollector collector, const char* gc_reason,
   1589       const char* collector_reason,
   1590       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1591 
   1592   // Performs garbage collection
   1593   // Returns whether there is a chance another major GC could
   1594   // collect more garbage.
   1595   bool PerformGarbageCollection(
   1596       GarbageCollector collector,
   1597       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1598 
   1599   inline void UpdateOldSpaceLimits();
   1600 
   1601   // Initializes a JSObject based on its map.
   1602   void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
   1603                                  Map* map);
   1604 
   1605   // Initializes JSObject body starting at given offset.
   1606   void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
   1607 
   1608   void InitializeAllocationMemento(AllocationMemento* memento,
   1609                                    AllocationSite* allocation_site);
   1610 
   1611   bool CreateInitialMaps();
   1612   void CreateInitialObjects();
   1613 
   1614   // These five Create*EntryStub functions are here and forced to not be inlined
   1615   // because of a gcc-4.4 bug that assigns wrong vtable entries.
   1616   NO_INLINE(void CreateJSEntryStub());
   1617   NO_INLINE(void CreateJSConstructEntryStub());
   1618 
   1619   void CreateFixedStubs();
   1620 
   1621   HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
   1622 
   1623   // Commits from space if it is uncommitted.
   1624   void EnsureFromSpaceIsCommitted();
   1625 
   1626   // Uncommit unused semi space.
   1627   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
   1628 
   1629   // Fill in bogus values in from space
   1630   void ZapFromSpace();
   1631 
   1632   // Deopts all code that contains allocation instruction which are tenured or
   1633   // not tenured. Moreover it clears the pretenuring allocation site statistics.
   1634   void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
   1635 
   1636   // Evaluates local pretenuring for the old space and calls
   1637   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
   1638   // the old space.
   1639   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
   1640 
   1641   // Record statistics before and after garbage collection.
   1642   void ReportStatisticsBeforeGC();
   1643   void ReportStatisticsAfterGC();
   1644 
   1645   // Creates and installs the full-sized number string cache.
   1646   int FullSizeNumberStringCacheLength();
   1647   // Flush the number to string cache.
   1648   void FlushNumberStringCache();
   1649 
   1650   // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
   1651   // Re-visit incremental marking heuristics.
   1652   bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
   1653 
   1654   void ConfigureInitialOldGenerationSize();
   1655 
   1656   bool HasLowYoungGenerationAllocationRate();
   1657   bool HasLowOldGenerationAllocationRate();
   1658   double YoungGenerationMutatorUtilization();
   1659   double OldGenerationMutatorUtilization();
   1660 
   1661   void ReduceNewSpaceSize();
   1662 
   1663   bool TryFinalizeIdleIncrementalMarking(
   1664       double idle_time_in_ms, size_t size_of_objects,
   1665       size_t mark_compact_speed_in_bytes_per_ms);
   1666 
   1667   GCIdleTimeHeapState ComputeHeapState();
   1668 
   1669   bool PerformIdleTimeAction(GCIdleTimeAction action,
   1670                              GCIdleTimeHeapState heap_state,
   1671                              double deadline_in_ms);
   1672 
   1673   void IdleNotificationEpilogue(GCIdleTimeAction action,
   1674                                 GCIdleTimeHeapState heap_state, double start_ms,
   1675                                 double deadline_in_ms);
   1676 
   1677   inline void UpdateAllocationsHash(HeapObject* object);
   1678   inline void UpdateAllocationsHash(uint32_t value);
   1679   void PrintAlloctionsHash();
   1680 
   1681   void AddToRingBuffer(const char* string);
   1682   void GetFromRingBuffer(char* buffer);
   1683 
   1684   void CompactRetainedMaps(ArrayList* retained_maps);
   1685 
   1686   void CollectGarbageOnMemoryPressure(const char* source);
   1687 
   1688   // Attempt to over-approximate the weak closure by marking object groups and
   1689   // implicit references from global handles, but don't atomically complete
   1690   // marking. If we continue to mark incrementally, we might have marked
   1691   // objects that die later.
   1692   void FinalizeIncrementalMarking(const char* gc_reason);
   1693 
   1694   // Returns the timer used for a given GC type.
   1695   // - GCScavenger: young generation GC
   1696   // - GCCompactor: full GC
   1697   // - GCFinalzeMC: finalization of incremental full GC
   1698   // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
   1699   // memory reduction
   1700   HistogramTimer* GCTypeTimer(GarbageCollector collector);
   1701 
   1702   // ===========================================================================
   1703   // Pretenuring. ==============================================================
   1704   // ===========================================================================
   1705 
   1706   // Pretenuring decisions are made based on feedback collected during new space
   1707   // evacuation. Note that between feedback collection and calling this method
   1708   // object in old space must not move.
   1709   void ProcessPretenuringFeedback();
   1710 
   1711   // ===========================================================================
   1712   // Actual GC. ================================================================
   1713   // ===========================================================================
   1714 
   1715   // Code that should be run before and after each GC.  Includes some
   1716   // reporting/verification activities when compiled with DEBUG set.
   1717   void GarbageCollectionPrologue();
   1718   void GarbageCollectionEpilogue();
   1719 
   1720   // Performs a major collection in the whole heap.
   1721   void MarkCompact();
   1722 
   1723   // Code to be run before and after mark-compact.
   1724   void MarkCompactPrologue();
   1725   void MarkCompactEpilogue();
   1726 
   1727   // Performs a minor collection in new generation.
   1728   void Scavenge();
   1729 
   1730   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front,
   1731                      PromotionMode promotion_mode);
   1732 
   1733   void UpdateNewSpaceReferencesInExternalStringTable(
   1734       ExternalStringTableUpdaterCallback updater_func);
   1735 
   1736   void UpdateReferencesInExternalStringTable(
   1737       ExternalStringTableUpdaterCallback updater_func);
   1738 
   1739   void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
   1740   void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
   1741   void ProcessNativeContexts(WeakObjectRetainer* retainer);
   1742   void ProcessAllocationSites(WeakObjectRetainer* retainer);
   1743   void ProcessWeakListRoots(WeakObjectRetainer* retainer);
   1744 
   1745   // ===========================================================================
   1746   // GC statistics. ============================================================
   1747   // ===========================================================================
   1748 
   1749   inline intptr_t OldGenerationSpaceAvailable() {
   1750     return old_generation_allocation_limit_ - PromotedTotalSize();
   1751   }
   1752 
   1753   // Returns maximum GC pause.
   1754   double get_max_gc_pause() { return max_gc_pause_; }
   1755 
   1756   // Returns maximum size of objects alive after GC.
   1757   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
   1758 
   1759   // Returns minimal interval between two subsequent collections.
   1760   double get_min_in_mutator() { return min_in_mutator_; }
   1761 
   1762   // Update GC statistics that are tracked on the Heap.
   1763   void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
   1764                                     double marking_time);
   1765 
   1766   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
   1767 
   1768   // ===========================================================================
   1769   // Growing strategy. =========================================================
   1770   // ===========================================================================
   1771 
   1772   // Decrease the allocation limit if the new limit based on the given
   1773   // parameters is lower than the current limit.
   1774   void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
   1775                                           double gc_speed,
   1776                                           double mutator_speed);
   1777 
   1778 
   1779   // Calculates the allocation limit based on a given growing factor and a
   1780   // given old generation size.
   1781   intptr_t CalculateOldGenerationAllocationLimit(double factor,
   1782                                                  intptr_t old_gen_size);
   1783 
   1784   // Sets the allocation limit to trigger the next full garbage collection.
   1785   void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
   1786                                        double mutator_speed);
   1787 
   1788   // ===========================================================================
   1789   // Idle notification. ========================================================
   1790   // ===========================================================================
   1791 
   1792   bool RecentIdleNotificationHappened();
   1793   void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
   1794 
   1795   // ===========================================================================
   1796   // HeapIterator helpers. =====================================================
   1797   // ===========================================================================
   1798 
   1799   void heap_iterator_start() { heap_iterator_depth_++; }
   1800 
   1801   void heap_iterator_end() { heap_iterator_depth_--; }
   1802 
   1803   bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
   1804 
   1805   // ===========================================================================
   1806   // Allocation methods. =======================================================
   1807   // ===========================================================================
   1808 
   1809   // Returns a deep copy of the JavaScript object.
   1810   // Properties and elements are copied too.
   1811   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
   1812   MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
   1813                                                 AllocationSite* site = NULL);
   1814 
   1815   // Allocates a JS Map in the heap.
   1816   MUST_USE_RESULT AllocationResult
   1817   AllocateMap(InstanceType instance_type, int instance_size,
   1818               ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
   1819 
   1820   // Allocates and initializes a new JavaScript object based on a
   1821   // constructor.
   1822   // If allocation_site is non-null, then a memento is emitted after the object
   1823   // that points to the site.
   1824   MUST_USE_RESULT AllocationResult AllocateJSObject(
   1825       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
   1826       AllocationSite* allocation_site = NULL);
   1827 
   1828   // Allocates and initializes a new JavaScript object based on a map.
   1829   // Passing an allocation site means that a memento will be created that
   1830   // points to the site.
   1831   MUST_USE_RESULT AllocationResult
   1832   AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
   1833                           AllocationSite* allocation_site = NULL);
   1834 
   1835   // Allocates a HeapNumber from value.
   1836   MUST_USE_RESULT AllocationResult
   1837   AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
   1838                      PretenureFlag pretenure = NOT_TENURED);
   1839 
   1840 // Allocates SIMD values from the given lane values.
   1841 #define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
   1842   AllocationResult Allocate##Type(lane_type lanes[lane_count],             \
   1843                                   PretenureFlag pretenure = NOT_TENURED);
   1844   SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
   1845 #undef SIMD_ALLOCATE_DECLARATION
   1846 
   1847   // Allocates a byte array of the specified length
   1848   MUST_USE_RESULT AllocationResult
   1849   AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
   1850 
   1851   // Allocates a bytecode array with given contents.
   1852   MUST_USE_RESULT AllocationResult
   1853   AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
   1854                         int parameter_count, FixedArray* constant_pool);
   1855 
   1856   MUST_USE_RESULT AllocationResult CopyCode(Code* code);
   1857 
   1858   MUST_USE_RESULT AllocationResult
   1859   CopyBytecodeArray(BytecodeArray* bytecode_array);
   1860 
   1861   // Allocates a fixed array initialized with undefined values
   1862   MUST_USE_RESULT AllocationResult
   1863   AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
   1864 
   1865   // Allocate an uninitialized object.  The memory is non-executable if the
   1866   // hardware and OS allow.  This is the single choke-point for allocations
   1867   // performed by the runtime and should not be bypassed (to extend this to
   1868   // inlined allocations, use the Heap::DisableInlineAllocation() support).
   1869   MUST_USE_RESULT inline AllocationResult AllocateRaw(
   1870       int size_in_bytes, AllocationSpace space,
   1871       AllocationAlignment aligment = kWordAligned);
   1872 
   1873   // Allocates a heap object based on the map.
   1874   MUST_USE_RESULT AllocationResult
   1875       Allocate(Map* map, AllocationSpace space,
   1876                AllocationSite* allocation_site = NULL);
   1877 
   1878   // Allocates a partial map for bootstrapping.
   1879   MUST_USE_RESULT AllocationResult
   1880       AllocatePartialMap(InstanceType instance_type, int instance_size);
   1881 
   1882   // Allocate a block of memory in the given space (filled with a filler).
   1883   // Used as a fall-back for generated code when the space is full.
   1884   MUST_USE_RESULT AllocationResult
   1885       AllocateFillerObject(int size, bool double_align, AllocationSpace space);
   1886 
   1887   // Allocate an uninitialized fixed array.
   1888   MUST_USE_RESULT AllocationResult
   1889       AllocateRawFixedArray(int length, PretenureFlag pretenure);
   1890 
   1891   // Allocate an uninitialized fixed double array.
   1892   MUST_USE_RESULT AllocationResult
   1893       AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
   1894 
   1895   // Allocate an initialized fixed array with the given filler value.
   1896   MUST_USE_RESULT AllocationResult
   1897       AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
   1898                                    Object* filler);
   1899 
   1900   // Allocate and partially initializes a String.  There are two String
   1901   // encodings: one-byte and two-byte.  These functions allocate a string of
   1902   // the given length and set its map and length fields.  The characters of
   1903   // the string are uninitialized.
   1904   MUST_USE_RESULT AllocationResult
   1905       AllocateRawOneByteString(int length, PretenureFlag pretenure);
   1906   MUST_USE_RESULT AllocationResult
   1907       AllocateRawTwoByteString(int length, PretenureFlag pretenure);
   1908 
   1909   // Allocates an internalized string in old space based on the character
   1910   // stream.
   1911   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
   1912       Vector<const char> str, int chars, uint32_t hash_field);
   1913 
   1914   MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
   1915       Vector<const uint8_t> str, uint32_t hash_field);
   1916 
   1917   MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
   1918       Vector<const uc16> str, uint32_t hash_field);
   1919 
   1920   template <bool is_one_byte, typename T>
   1921   MUST_USE_RESULT AllocationResult
   1922       AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
   1923 
   1924   template <typename T>
   1925   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
   1926       T t, int chars, uint32_t hash_field);
   1927 
   1928   // Allocates an uninitialized fixed array. It must be filled by the caller.
   1929   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
   1930 
   1931   // Make a copy of src and return it.
   1932   MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
   1933 
   1934   // Make a copy of src, also grow the copy, and return the copy.
   1935   MUST_USE_RESULT AllocationResult
   1936   CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
   1937 
   1938   // Make a copy of src, also grow the copy, and return the copy.
   1939   MUST_USE_RESULT AllocationResult CopyFixedArrayUpTo(FixedArray* src,
   1940                                                       int new_len,
   1941                                                       PretenureFlag pretenure);
   1942 
   1943   // Make a copy of src, set the map, and return the copy.
   1944   MUST_USE_RESULT AllocationResult
   1945       CopyFixedArrayWithMap(FixedArray* src, Map* map);
   1946 
   1947   // Make a copy of src and return it.
   1948   MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
   1949       FixedDoubleArray* src);
   1950 
   1951   // Computes a single character string where the character has code.
   1952   // A cache is used for one-byte (Latin1) codes.
   1953   MUST_USE_RESULT AllocationResult
   1954       LookupSingleCharacterStringFromCode(uint16_t code);
   1955 
   1956   // Allocate a symbol in old space.
   1957   MUST_USE_RESULT AllocationResult AllocateSymbol();
   1958 
   1959   // Allocates an external array of the specified length and type.
   1960   MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
   1961       int length, ExternalArrayType array_type, void* external_pointer,
   1962       PretenureFlag pretenure);
   1963 
   1964   // Allocates a fixed typed array of the specified length and type.
   1965   MUST_USE_RESULT AllocationResult
   1966   AllocateFixedTypedArray(int length, ExternalArrayType array_type,
   1967                           bool initialize, PretenureFlag pretenure);
   1968 
   1969   // Make a copy of src and return it.
   1970   MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
   1971 
   1972   // Make a copy of src, set the map, and return the copy.
   1973   MUST_USE_RESULT AllocationResult
   1974       CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
   1975 
   1976   // Allocates a fixed double array with uninitialized values. Returns
   1977   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
   1978       int length, PretenureFlag pretenure = NOT_TENURED);
   1979 
   1980   // Allocate empty fixed array.
   1981   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
   1982 
   1983   // Allocate empty fixed typed array of given type.
   1984   MUST_USE_RESULT AllocationResult
   1985       AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
   1986 
   1987   // Allocate a tenured simple cell.
   1988   MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
   1989 
   1990   // Allocate a tenured JS global property cell initialized with the hole.
   1991   MUST_USE_RESULT AllocationResult AllocatePropertyCell();
   1992 
   1993   MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
   1994 
   1995   MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
   1996 
   1997   // Allocates a new utility object in the old generation.
   1998   MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
   1999 
   2000   // Allocates a new foreign object.
   2001   MUST_USE_RESULT AllocationResult
   2002       AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
   2003 
   2004   MUST_USE_RESULT AllocationResult
   2005       AllocateCode(int object_size, bool immovable);
   2006 
   2007   MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
   2008 
   2009   MUST_USE_RESULT AllocationResult InternalizeString(String* str);
   2010 
   2011   // ===========================================================================
   2012 
   2013   void set_force_oom(bool value) { force_oom_ = value; }
   2014 
   2015   // The amount of external memory registered through the API.
   2016   int64_t external_memory_;
   2017 
   2018   // The limit when to trigger memory pressure from the API.
   2019   int64_t external_memory_limit_;
   2020 
   2021   // Caches the amount of external memory registered at the last MC.
   2022   int64_t external_memory_at_last_mark_compact_;
   2023 
   2024   // The amount of memory that has been freed concurrently.
   2025   base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
   2026 
   2027   // This can be calculated directly from a pointer to the heap; however, it is
   2028   // more expedient to get at the isolate directly from within Heap methods.
   2029   Isolate* isolate_;
   2030 
   2031   Object* roots_[kRootListLength];
   2032 
   2033   size_t code_range_size_;
   2034   int max_semi_space_size_;
   2035   int initial_semispace_size_;
   2036   intptr_t max_old_generation_size_;
   2037   intptr_t initial_old_generation_size_;
   2038   bool old_generation_size_configured_;
   2039   intptr_t max_executable_size_;
   2040   intptr_t maximum_committed_;
   2041 
   2042   // For keeping track of how much data has survived
   2043   // scavenge since last new space expansion.
   2044   intptr_t survived_since_last_expansion_;
   2045 
   2046   // ... and since the last scavenge.
   2047   intptr_t survived_last_scavenge_;
   2048 
   2049   // This is not the depth of nested AlwaysAllocateScope's but rather a single
   2050   // count, as scopes can be acquired from multiple tasks (read: threads).
   2051   base::AtomicNumber<size_t> always_allocate_scope_count_;
   2052 
   2053   // Stores the memory pressure level that set by MemoryPressureNotification
   2054   // and reset by a mark-compact garbage collection.
   2055   base::AtomicValue<MemoryPressureLevel> memory_pressure_level_;
   2056 
   2057   // For keeping track of context disposals.
   2058   int contexts_disposed_;
   2059 
   2060   // The length of the retained_maps array at the time of context disposal.
   2061   // This separates maps in the retained_maps array that were created before
   2062   // and after context disposal.
   2063   int number_of_disposed_maps_;
   2064 
   2065   int global_ic_age_;
   2066 
   2067   NewSpace new_space_;
   2068   OldSpace* old_space_;
   2069   OldSpace* code_space_;
   2070   MapSpace* map_space_;
   2071   LargeObjectSpace* lo_space_;
   2072   HeapState gc_state_;
   2073   int gc_post_processing_depth_;
   2074   Address new_space_top_after_last_gc_;
   2075 
   2076   // Returns the amount of external memory registered since last global gc.
   2077   int64_t PromotedExternalMemorySize();
   2078 
   2079   // How many "runtime allocations" happened.
   2080   uint32_t allocations_count_;
   2081 
   2082   // Running hash over allocations performed.
   2083   uint32_t raw_allocations_hash_;
   2084 
   2085   // How many mark-sweep collections happened.
   2086   unsigned int ms_count_;
   2087 
   2088   // How many gc happened.
   2089   unsigned int gc_count_;
   2090 
   2091   // For post mortem debugging.
   2092   int remembered_unmapped_pages_index_;
   2093   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
   2094 
   2095 #ifdef DEBUG
   2096   // If the --gc-interval flag is set to a positive value, this
   2097   // variable holds the value indicating the number of allocations
   2098   // remain until the next failure and garbage collection.
   2099   int allocation_timeout_;
   2100 #endif  // DEBUG
   2101 
   2102   // Limit that triggers a global GC on the next (normally caused) GC.  This
   2103   // is checked when we have already decided to do a GC to help determine
   2104   // which collector to invoke, before expanding a paged space in the old
   2105   // generation and on every allocation in large object space.
   2106   intptr_t old_generation_allocation_limit_;
   2107 
   2108   // Indicates that an allocation has failed in the old generation since the
   2109   // last GC.
   2110   bool old_gen_exhausted_;
   2111 
   2112   // Indicates that memory usage is more important than latency.
   2113   // TODO(ulan): Merge it with memory reducer once chromium:490559 is fixed.
   2114   bool optimize_for_memory_usage_;
   2115 
   2116   // Indicates that inline bump-pointer allocation has been globally disabled
   2117   // for all spaces. This is used to disable allocations in generated code.
   2118   bool inline_allocation_disabled_;
   2119 
   2120   // Weak list heads, threaded through the objects.
   2121   // List heads are initialized lazily and contain the undefined_value at start.
   2122   Object* native_contexts_list_;
   2123   Object* allocation_sites_list_;
   2124 
   2125   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
   2126   // marking. It is initialized during marking, destroyed after marking and
   2127   // contains Smi(0) while marking is not active.
   2128   Object* encountered_weak_collections_;
   2129 
   2130   Object* encountered_weak_cells_;
   2131 
   2132   Object* encountered_transition_arrays_;
   2133 
   2134   List<GCCallbackPair> gc_epilogue_callbacks_;
   2135   List<GCCallbackPair> gc_prologue_callbacks_;
   2136 
   2137   // Total RegExp code ever generated
   2138   double total_regexp_code_generated_;
   2139 
   2140   int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
   2141 
   2142   GCTracer* tracer_;
   2143 
   2144   int high_survival_rate_period_length_;
   2145   intptr_t promoted_objects_size_;
   2146   double promotion_ratio_;
   2147   double promotion_rate_;
   2148   intptr_t semi_space_copied_object_size_;
   2149   intptr_t previous_semi_space_copied_object_size_;
   2150   double semi_space_copied_rate_;
   2151   int nodes_died_in_new_space_;
   2152   int nodes_copied_in_new_space_;
   2153   int nodes_promoted_;
   2154 
   2155   // This is the pretenuring trigger for allocation sites that are in maybe
   2156   // tenure state. When we switched to the maximum new space size we deoptimize
   2157   // the code that belongs to the allocation site and derive the lifetime
   2158   // of the allocation site.
   2159   unsigned int maximum_size_scavenges_;
   2160 
   2161   // Maximum GC pause.
   2162   double max_gc_pause_;
   2163 
   2164   // Total time spent in GC.
   2165   double total_gc_time_ms_;
   2166 
   2167   // Maximum size of objects alive after GC.
   2168   intptr_t max_alive_after_gc_;
   2169 
   2170   // Minimal interval between two subsequent collections.
   2171   double min_in_mutator_;
   2172 
   2173   // Cumulative GC time spent in marking.
   2174   double marking_time_;
   2175 
   2176   // Cumulative GC time spent in sweeping.
   2177   double sweeping_time_;
   2178 
   2179   // Last time an idle notification happened.
   2180   double last_idle_notification_time_;
   2181 
   2182   // Last time a garbage collection happened.
   2183   double last_gc_time_;
   2184 
   2185   Scavenger* scavenge_collector_;
   2186 
   2187   MarkCompactCollector* mark_compact_collector_;
   2188 
   2189   MemoryAllocator* memory_allocator_;
   2190 
   2191   StoreBuffer store_buffer_;
   2192 
   2193   IncrementalMarking* incremental_marking_;
   2194 
   2195   GCIdleTimeHandler* gc_idle_time_handler_;
   2196 
   2197   MemoryReducer* memory_reducer_;
   2198 
   2199   ObjectStats* object_stats_;
   2200 
   2201   ScavengeJob* scavenge_job_;
   2202 
   2203   AllocationObserver* idle_scavenge_observer_;
   2204 
   2205   // These two counters are monotomically increasing and never reset.
   2206   size_t full_codegen_bytes_generated_;
   2207   size_t crankshaft_codegen_bytes_generated_;
   2208 
   2209   // This counter is increased before each GC and never reset.
   2210   // To account for the bytes allocated since the last GC, use the
   2211   // NewSpaceAllocationCounter() function.
   2212   size_t new_space_allocation_counter_;
   2213 
   2214   // This counter is increased before each GC and never reset. To
   2215   // account for the bytes allocated since the last GC, use the
   2216   // OldGenerationAllocationCounter() function.
   2217   size_t old_generation_allocation_counter_;
   2218 
   2219   // The size of objects in old generation after the last MarkCompact GC.
   2220   size_t old_generation_size_at_last_gc_;
   2221 
   2222   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
   2223   // this variable holds the number of garbage collections since the last
   2224   // deoptimization triggered by garbage collection.
   2225   int gcs_since_last_deopt_;
   2226 
   2227   // The feedback storage is used to store allocation sites (keys) and how often
   2228   // they have been visited (values) by finding a memento behind an object. The
   2229   // storage is only alive temporary during a GC. The invariant is that all
   2230   // pointers in this map are already fixed, i.e., they do not point to
   2231   // forwarding pointers.
   2232   base::HashMap* global_pretenuring_feedback_;
   2233 
   2234   char trace_ring_buffer_[kTraceRingBufferSize];
   2235   // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
   2236   // full then the data is from ring_buffer_end_ to the end of the buffer and
   2237   // from 0 to ring_buffer_end_.
   2238   bool ring_buffer_full_;
   2239   size_t ring_buffer_end_;
   2240 
   2241   // Shared state read by the scavenge collector and set by ScavengeObject.
   2242   PromotionQueue promotion_queue_;
   2243 
   2244   // Flag is set when the heap has been configured.  The heap can be repeatedly
   2245   // configured through the API until it is set up.
   2246   bool configured_;
   2247 
   2248   // Currently set GC flags that are respected by all GC components.
   2249   int current_gc_flags_;
   2250 
   2251   // Currently set GC callback flags that are used to pass information between
   2252   // the embedder and V8's GC.
   2253   GCCallbackFlags current_gc_callback_flags_;
   2254 
   2255   ExternalStringTable external_string_table_;
   2256 
   2257   base::Mutex relocation_mutex_;
   2258 
   2259   int gc_callbacks_depth_;
   2260 
   2261   bool deserialization_complete_;
   2262 
   2263   StrongRootsList* strong_roots_list_;
   2264 
   2265   // The depth of HeapIterator nestings.
   2266   int heap_iterator_depth_;
   2267 
   2268   // Used for testing purposes.
   2269   bool force_oom_;
   2270 
   2271   // Classes in "heap" can be friends.
   2272   friend class AlwaysAllocateScope;
   2273   friend class GCCallbacksScope;
   2274   friend class GCTracer;
   2275   friend class HeapIterator;
   2276   friend class IdleScavengeObserver;
   2277   friend class IncrementalMarking;
   2278   friend class IteratePromotedObjectsVisitor;
   2279   friend class MarkCompactCollector;
   2280   friend class MarkCompactMarkingVisitor;
   2281   friend class NewSpace;
   2282   friend class ObjectStatsCollector;
   2283   friend class Page;
   2284   friend class Scavenger;
   2285   friend class StoreBuffer;
   2286   friend class TestMemoryAllocatorScope;
   2287 
   2288   // The allocator interface.
   2289   friend class Factory;
   2290 
   2291   // The Isolate constructs us.
   2292   friend class Isolate;
   2293 
   2294   // Used in cctest.
   2295   friend class HeapTester;
   2296 
   2297   DISALLOW_COPY_AND_ASSIGN(Heap);
   2298 };
   2299 
   2300 
   2301 class HeapStats {
   2302  public:
   2303   static const int kStartMarker = 0xDECADE00;
   2304   static const int kEndMarker = 0xDECADE01;
   2305 
   2306   int* start_marker;                       //  0
   2307   int* new_space_size;                     //  1
   2308   int* new_space_capacity;                 //  2
   2309   intptr_t* old_space_size;                //  3
   2310   intptr_t* old_space_capacity;            //  4
   2311   intptr_t* code_space_size;               //  5
   2312   intptr_t* code_space_capacity;           //  6
   2313   intptr_t* map_space_size;                //  7
   2314   intptr_t* map_space_capacity;            //  8
   2315   intptr_t* lo_space_size;                 //  9
   2316   int* global_handle_count;                // 10
   2317   int* weak_global_handle_count;           // 11
   2318   int* pending_global_handle_count;        // 12
   2319   int* near_death_global_handle_count;     // 13
   2320   int* free_global_handle_count;           // 14
   2321   intptr_t* memory_allocator_size;         // 15
   2322   intptr_t* memory_allocator_capacity;     // 16
   2323   int* objects_per_type;                   // 17
   2324   int* size_per_type;                      // 18
   2325   int* os_error;                           // 19
   2326   char* last_few_messages;                 // 20
   2327   char* js_stacktrace;                     // 21
   2328   int* end_marker;                         // 22
   2329 };
   2330 
   2331 
   2332 class AlwaysAllocateScope {
   2333  public:
   2334   explicit inline AlwaysAllocateScope(Isolate* isolate);
   2335   inline ~AlwaysAllocateScope();
   2336 
   2337  private:
   2338   Heap* heap_;
   2339 };
   2340 
   2341 
   2342 // Visitor class to verify interior pointers in spaces that do not contain
   2343 // or care about intergenerational references. All heap object pointers have to
   2344 // point into the heap to a location that has a map pointer at its first word.
   2345 // Caveat: Heap::Contains is an approximation because it can return true for
   2346 // objects in a heap space but above the allocation pointer.
   2347 class VerifyPointersVisitor : public ObjectVisitor {
   2348  public:
   2349   inline void VisitPointers(Object** start, Object** end) override;
   2350 };
   2351 
   2352 
   2353 // Verify that all objects are Smis.
   2354 class VerifySmisVisitor : public ObjectVisitor {
   2355  public:
   2356   inline void VisitPointers(Object** start, Object** end) override;
   2357 };
   2358 
   2359 
   2360 // Space iterator for iterating over all spaces of the heap.  Returns each space
   2361 // in turn, and null when it is done.
   2362 class AllSpaces BASE_EMBEDDED {
   2363  public:
   2364   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
   2365   Space* next();
   2366 
   2367  private:
   2368   Heap* heap_;
   2369   int counter_;
   2370 };
   2371 
   2372 
   2373 // Space iterator for iterating over all old spaces of the heap: Old space
   2374 // and code space.  Returns each space in turn, and null when it is done.
   2375 class OldSpaces BASE_EMBEDDED {
   2376  public:
   2377   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   2378   OldSpace* next();
   2379 
   2380  private:
   2381   Heap* heap_;
   2382   int counter_;
   2383 };
   2384 
   2385 
   2386 // Space iterator for iterating over all the paged spaces of the heap: Map
   2387 // space, old space, code space and cell space.  Returns
   2388 // each space in turn, and null when it is done.
   2389 class PagedSpaces BASE_EMBEDDED {
   2390  public:
   2391   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   2392   PagedSpace* next();
   2393 
   2394  private:
   2395   Heap* heap_;
   2396   int counter_;
   2397 };
   2398 
   2399 
   2400 // Space iterator for iterating over all spaces of the heap.
   2401 // For each space an object iterator is provided. The deallocation of the
   2402 // returned object iterators is handled by the space iterator.
   2403 class SpaceIterator : public Malloced {
   2404  public:
   2405   explicit SpaceIterator(Heap* heap);
   2406   virtual ~SpaceIterator();
   2407 
   2408   bool has_next();
   2409   ObjectIterator* next();
   2410 
   2411  private:
   2412   ObjectIterator* CreateIterator();
   2413 
   2414   Heap* heap_;
   2415   int current_space_;         // from enum AllocationSpace.
   2416   ObjectIterator* iterator_;  // object iterator for the current space.
   2417 };
   2418 
   2419 
   2420 // A HeapIterator provides iteration over the whole heap. It
   2421 // aggregates the specific iterators for the different spaces as
   2422 // these can only iterate over one space only.
   2423 //
   2424 // HeapIterator ensures there is no allocation during its lifetime
   2425 // (using an embedded DisallowHeapAllocation instance).
   2426 //
   2427 // HeapIterator can skip free list nodes (that is, de-allocated heap
   2428 // objects that still remain in the heap). As implementation of free
   2429 // nodes filtering uses GC marks, it can't be used during MS/MC GC
   2430 // phases. Also, it is forbidden to interrupt iteration in this mode,
   2431 // as this will leave heap objects marked (and thus, unusable).
   2432 class HeapIterator BASE_EMBEDDED {
   2433  public:
   2434   enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
   2435 
   2436   explicit HeapIterator(Heap* heap,
   2437                         HeapObjectsFiltering filtering = kNoFiltering);
   2438   ~HeapIterator();
   2439 
   2440   HeapObject* next();
   2441 
   2442  private:
   2443   struct MakeHeapIterableHelper {
   2444     explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
   2445   };
   2446 
   2447   HeapObject* NextObject();
   2448 
   2449   // The following two fields need to be declared in this order. Initialization
   2450   // order guarantees that we first make the heap iterable (which may involve
   2451   // allocations) and only then lock it down by not allowing further
   2452   // allocations.
   2453   MakeHeapIterableHelper make_heap_iterable_helper_;
   2454   DisallowHeapAllocation no_heap_allocation_;
   2455 
   2456   Heap* heap_;
   2457   HeapObjectsFiltering filtering_;
   2458   HeapObjectsFilter* filter_;
   2459   // Space iterator for iterating all the spaces.
   2460   SpaceIterator* space_iterator_;
   2461   // Object iterator for the space currently being iterated.
   2462   ObjectIterator* object_iterator_;
   2463 };
   2464 
   2465 
   2466 // Cache for mapping (map, property name) into field offset.
   2467 // Cleared at startup and prior to mark sweep collection.
   2468 class KeyedLookupCache {
   2469  public:
   2470   // Lookup field offset for (map, name). If absent, -1 is returned.
   2471   int Lookup(Handle<Map> map, Handle<Name> name);
   2472 
   2473   // Update an element in the cache.
   2474   void Update(Handle<Map> map, Handle<Name> name, int field_offset);
   2475 
   2476   // Clear the cache.
   2477   void Clear();
   2478 
   2479   static const int kLength = 256;
   2480   static const int kCapacityMask = kLength - 1;
   2481   static const int kMapHashShift = 5;
   2482   static const int kHashMask = -4;  // Zero the last two bits.
   2483   static const int kEntriesPerBucket = 4;
   2484   static const int kEntryLength = 2;
   2485   static const int kMapIndex = 0;
   2486   static const int kKeyIndex = 1;
   2487   static const int kNotFound = -1;
   2488 
   2489   // kEntriesPerBucket should be a power of 2.
   2490   STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
   2491   STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
   2492 
   2493  private:
   2494   KeyedLookupCache() {
   2495     for (int i = 0; i < kLength; ++i) {
   2496       keys_[i].map = NULL;
   2497       keys_[i].name = NULL;
   2498       field_offsets_[i] = kNotFound;
   2499     }
   2500   }
   2501 
   2502   static inline int Hash(Handle<Map> map, Handle<Name> name);
   2503 
   2504   // Get the address of the keys and field_offsets arrays.  Used in
   2505   // generated code to perform cache lookups.
   2506   Address keys_address() { return reinterpret_cast<Address>(&keys_); }
   2507 
   2508   Address field_offsets_address() {
   2509     return reinterpret_cast<Address>(&field_offsets_);
   2510   }
   2511 
   2512   struct Key {
   2513     Map* map;
   2514     Name* name;
   2515   };
   2516 
   2517   Key keys_[kLength];
   2518   int field_offsets_[kLength];
   2519 
   2520   friend class ExternalReference;
   2521   friend class Isolate;
   2522   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
   2523 };
   2524 
   2525 
   2526 // Cache for mapping (map, property name) into descriptor index.
   2527 // The cache contains both positive and negative results.
   2528 // Descriptor index equals kNotFound means the property is absent.
   2529 // Cleared at startup and prior to any gc.
   2530 class DescriptorLookupCache {
   2531  public:
   2532   // Lookup descriptor index for (map, name).
   2533   // If absent, kAbsent is returned.
   2534   inline int Lookup(Map* source, Name* name);
   2535 
   2536   // Update an element in the cache.
   2537   inline void Update(Map* source, Name* name, int result);
   2538 
   2539   // Clear the cache.
   2540   void Clear();
   2541 
   2542   static const int kAbsent = -2;
   2543 
   2544  private:
   2545   DescriptorLookupCache() {
   2546     for (int i = 0; i < kLength; ++i) {
   2547       keys_[i].source = NULL;
   2548       keys_[i].name = NULL;
   2549       results_[i] = kAbsent;
   2550     }
   2551   }
   2552 
   2553   static inline int Hash(Object* source, Name* name);
   2554 
   2555   static const int kLength = 64;
   2556   struct Key {
   2557     Map* source;
   2558     Name* name;
   2559   };
   2560 
   2561   Key keys_[kLength];
   2562   int results_[kLength];
   2563 
   2564   friend class Isolate;
   2565   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
   2566 };
   2567 
   2568 
   2569 // Abstract base class for checking whether a weak object should be retained.
   2570 class WeakObjectRetainer {
   2571  public:
   2572   virtual ~WeakObjectRetainer() {}
   2573 
   2574   // Return whether this object should be retained. If NULL is returned the
   2575   // object has no references. Otherwise the address of the retained object
   2576   // should be returned as in some GC situations the object has been moved.
   2577   virtual Object* RetainAs(Object* object) = 0;
   2578 };
   2579 
   2580 
   2581 #ifdef DEBUG
   2582 // Helper class for tracing paths to a search target Object from all roots.
   2583 // The TracePathFrom() method can be used to trace paths from a specific
   2584 // object to the search target object.
   2585 class PathTracer : public ObjectVisitor {
   2586  public:
   2587   enum WhatToFind {
   2588     FIND_ALL,   // Will find all matches.
   2589     FIND_FIRST  // Will stop the search after first match.
   2590   };
   2591 
   2592   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
   2593   static const int kMarkTag = 2;
   2594 
   2595   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
   2596   // after the first match.  If FIND_ALL is specified, then tracing will be
   2597   // done for all matches.
   2598   PathTracer(Object* search_target, WhatToFind what_to_find,
   2599              VisitMode visit_mode)
   2600       : search_target_(search_target),
   2601         found_target_(false),
   2602         found_target_in_trace_(false),
   2603         what_to_find_(what_to_find),
   2604         visit_mode_(visit_mode),
   2605         object_stack_(20),
   2606         no_allocation() {}
   2607 
   2608   void VisitPointers(Object** start, Object** end) override;
   2609 
   2610   void Reset();
   2611   void TracePathFrom(Object** root);
   2612 
   2613   bool found() const { return found_target_; }
   2614 
   2615   static Object* const kAnyGlobalObject;
   2616 
   2617  protected:
   2618   class MarkVisitor;
   2619   class UnmarkVisitor;
   2620 
   2621   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
   2622   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
   2623   virtual void ProcessResults();
   2624 
   2625   Object* search_target_;
   2626   bool found_target_;
   2627   bool found_target_in_trace_;
   2628   WhatToFind what_to_find_;
   2629   VisitMode visit_mode_;
   2630   List<Object*> object_stack_;
   2631 
   2632   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
   2633 
   2634  private:
   2635   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
   2636 };
   2637 #endif  // DEBUG
   2638 
   2639 // -----------------------------------------------------------------------------
   2640 // Allows observation of allocations.
   2641 class AllocationObserver {
   2642  public:
   2643   explicit AllocationObserver(intptr_t step_size)
   2644       : step_size_(step_size), bytes_to_next_step_(step_size) {
   2645     DCHECK(step_size >= kPointerSize);
   2646   }
   2647   virtual ~AllocationObserver() {}
   2648 
   2649   // Called each time the observed space does an allocation step. This may be
   2650   // more frequently than the step_size we are monitoring (e.g. when there are
   2651   // multiple observers, or when page or space boundary is encountered.)
   2652   void AllocationStep(int bytes_allocated, Address soon_object, size_t size) {
   2653     bytes_to_next_step_ -= bytes_allocated;
   2654     if (bytes_to_next_step_ <= 0) {
   2655       Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
   2656            size);
   2657       step_size_ = GetNextStepSize();
   2658       bytes_to_next_step_ = step_size_;
   2659     }
   2660   }
   2661 
   2662  protected:
   2663   intptr_t step_size() const { return step_size_; }
   2664   intptr_t bytes_to_next_step() const { return bytes_to_next_step_; }
   2665 
   2666   // Pure virtual method provided by the subclasses that gets called when at
   2667   // least step_size bytes have been allocated. soon_object is the address just
   2668   // allocated (but not yet initialized.) size is the size of the object as
   2669   // requested (i.e. w/o the alignment fillers). Some complexities to be aware
   2670   // of:
   2671   // 1) soon_object will be nullptr in cases where we end up observing an
   2672   //    allocation that happens to be a filler space (e.g. page boundaries.)
   2673   // 2) size is the requested size at the time of allocation. Right-trimming
   2674   //    may change the object size dynamically.
   2675   // 3) soon_object may actually be the first object in an allocation-folding
   2676   //    group. In such a case size is the size of the group rather than the
   2677   //    first object.
   2678   virtual void Step(int bytes_allocated, Address soon_object, size_t size) = 0;
   2679 
   2680   // Subclasses can override this method to make step size dynamic.
   2681   virtual intptr_t GetNextStepSize() { return step_size_; }
   2682 
   2683   intptr_t step_size_;
   2684   intptr_t bytes_to_next_step_;
   2685 
   2686  private:
   2687   friend class LargeObjectSpace;
   2688   friend class NewSpace;
   2689   friend class PagedSpace;
   2690   DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
   2691 };
   2692 
   2693 }  // namespace internal
   2694 }  // namespace v8
   2695 
   2696 #endif  // V8_HEAP_HEAP_H_
   2697