Home | History | Annotate | Download | only in heap
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_HEAP_H_
      6 #define V8_HEAP_HEAP_H_
      7 
      8 #include <cmath>
      9 #include <map>
     10 
     11 // Clients of this interface shouldn't depend on lots of heap internals.
     12 // Do not include anything from src/heap here!
     13 #include "include/v8.h"
     14 #include "src/allocation.h"
     15 #include "src/assert-scope.h"
     16 #include "src/base/atomic-utils.h"
     17 #include "src/debug/debug-interface.h"
     18 #include "src/globals.h"
     19 #include "src/heap-symbols.h"
     20 #include "src/list.h"
     21 #include "src/objects.h"
     22 
     23 namespace v8 {
     24 namespace internal {
     25 
     26 using v8::MemoryPressureLevel;
     27 
     28 // Defines all the roots in Heap.
     29 #define STRONG_ROOT_LIST(V)                                                    \
     30   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
     31   /* The first 32 entries are most often used in the startup snapshot and   */ \
     32   /* can use a shorter representation in the serialization format.          */ \
     33   V(Map, free_space_map, FreeSpaceMap)                                         \
     34   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
     35   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
     36   V(Oddball, uninitialized_value, UninitializedValue)                          \
     37   V(Oddball, undefined_value, UndefinedValue)                                  \
     38   V(Oddball, the_hole_value, TheHoleValue)                                     \
     39   V(Oddball, null_value, NullValue)                                            \
     40   V(Oddball, true_value, TrueValue)                                            \
     41   V(Oddball, false_value, FalseValue)                                          \
     42   V(String, empty_string, empty_string)                                        \
     43   V(Map, meta_map, MetaMap)                                                    \
     44   V(Map, byte_array_map, ByteArrayMap)                                         \
     45   V(Map, fixed_array_map, FixedArrayMap)                                       \
     46   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
     47   V(Map, hash_table_map, HashTableMap)                                         \
     48   V(Map, symbol_map, SymbolMap)                                                \
     49   V(Map, one_byte_string_map, OneByteStringMap)                                \
     50   V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
     51   V(Map, scope_info_map, ScopeInfoMap)                                         \
     52   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
     53   V(Map, code_map, CodeMap)                                                    \
     54   V(Map, function_context_map, FunctionContextMap)                             \
     55   V(Map, cell_map, CellMap)                                                    \
     56   V(Map, weak_cell_map, WeakCellMap)                                           \
     57   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
     58   V(Map, foreign_map, ForeignMap)                                              \
     59   V(Map, heap_number_map, HeapNumberMap)                                       \
     60   V(Map, transition_array_map, TransitionArrayMap)                             \
     61   V(Map, feedback_vector_map, FeedbackVectorMap)                               \
     62   V(ScopeInfo, empty_scope_info, EmptyScopeInfo)                               \
     63   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
     64   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
     65   /* Entries beyond the first 32                                            */ \
     66   /* The roots above this line should be boring from a GC point of view.    */ \
     67   /* This means they are never in new space and never on a page that is     */ \
     68   /* being compacted.                                                       */ \
     69   /* Oddballs */                                                               \
     70   V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
     71   V(Oddball, arguments_marker, ArgumentsMarker)                                \
     72   V(Oddball, exception, Exception)                                             \
     73   V(Oddball, termination_exception, TerminationException)                      \
     74   V(Oddball, optimized_out, OptimizedOut)                                      \
     75   V(Oddball, stale_register, StaleRegister)                                    \
     76   /* Context maps */                                                           \
     77   V(Map, native_context_map, NativeContextMap)                                 \
     78   V(Map, module_context_map, ModuleContextMap)                                 \
     79   V(Map, eval_context_map, EvalContextMap)                                     \
     80   V(Map, script_context_map, ScriptContextMap)                                 \
     81   V(Map, block_context_map, BlockContextMap)                                   \
     82   V(Map, catch_context_map, CatchContextMap)                                   \
     83   V(Map, with_context_map, WithContextMap)                                     \
     84   V(Map, debug_evaluate_context_map, DebugEvaluateContextMap)                  \
     85   V(Map, script_context_table_map, ScriptContextTableMap)                      \
     86   /* Maps */                                                                   \
     87   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
     88   V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
     89   V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
     90   V(Map, unseeded_number_dictionary_map, UnseededNumberDictionaryMap)          \
     91   V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
     92   V(Map, message_object_map, JSMessageObjectMap)                               \
     93   V(Map, external_map, ExternalMap)                                            \
     94   V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
     95   V(Map, module_info_map, ModuleInfoMap)                                       \
     96   V(Map, no_closures_cell_map, NoClosuresCellMap)                              \
     97   V(Map, one_closure_cell_map, OneClosureCellMap)                              \
     98   V(Map, many_closures_cell_map, ManyClosuresCellMap)                          \
     99   /* String maps */                                                            \
    100   V(Map, native_source_string_map, NativeSourceStringMap)                      \
    101   V(Map, string_map, StringMap)                                                \
    102   V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
    103   V(Map, cons_string_map, ConsStringMap)                                       \
    104   V(Map, thin_one_byte_string_map, ThinOneByteStringMap)                       \
    105   V(Map, thin_string_map, ThinStringMap)                                       \
    106   V(Map, sliced_string_map, SlicedStringMap)                                   \
    107   V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
    108   V(Map, external_string_map, ExternalStringMap)                               \
    109   V(Map, external_string_with_one_byte_data_map,                               \
    110     ExternalStringWithOneByteDataMap)                                          \
    111   V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
    112   V(Map, short_external_string_map, ShortExternalStringMap)                    \
    113   V(Map, short_external_string_with_one_byte_data_map,                         \
    114     ShortExternalStringWithOneByteDataMap)                                     \
    115   V(Map, internalized_string_map, InternalizedStringMap)                       \
    116   V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
    117   V(Map, external_internalized_string_with_one_byte_data_map,                  \
    118     ExternalInternalizedStringWithOneByteDataMap)                              \
    119   V(Map, external_one_byte_internalized_string_map,                            \
    120     ExternalOneByteInternalizedStringMap)                                      \
    121   V(Map, short_external_internalized_string_map,                               \
    122     ShortExternalInternalizedStringMap)                                        \
    123   V(Map, short_external_internalized_string_with_one_byte_data_map,            \
    124     ShortExternalInternalizedStringWithOneByteDataMap)                         \
    125   V(Map, short_external_one_byte_internalized_string_map,                      \
    126     ShortExternalOneByteInternalizedStringMap)                                 \
    127   V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
    128   /* Array element maps */                                                     \
    129   V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
    130   V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
    131   V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
    132   V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
    133   V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
    134   V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
    135   V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
    136   V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
    137   V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
    138   /* Canonical empty values */                                                 \
    139   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
    140   V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
    141   V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
    142   V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
    143   V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
    144   V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
    145   V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
    146   V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
    147   V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
    148   V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
    149     EmptyFixedUint8ClampedArray)                                               \
    150   V(Script, empty_script, EmptyScript)                                         \
    151   V(Cell, undefined_cell, UndefinedCell)                                       \
    152   V(FixedArray, empty_sloppy_arguments_elements, EmptySloppyArgumentsElements) \
    153   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
    154     EmptySlowElementDictionary)                                                \
    155   V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
    156   V(WeakCell, empty_weak_cell, EmptyWeakCell)                                  \
    157   /* Protectors */                                                             \
    158   V(PropertyCell, array_protector, ArrayProtector)                             \
    159   V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector)         \
    160   V(Cell, species_protector, SpeciesProtector)                                 \
    161   V(PropertyCell, string_length_protector, StringLengthProtector)              \
    162   V(Cell, fast_array_iteration_protector, FastArrayIterationProtector)         \
    163   V(PropertyCell, array_iterator_protector, ArrayIteratorProtector)            \
    164   V(PropertyCell, array_buffer_neutering_protector,                            \
    165     ArrayBufferNeuteringProtector)                                             \
    166   /* Special numbers */                                                        \
    167   V(HeapNumber, nan_value, NanValue)                                           \
    168   V(HeapNumber, hole_nan_value, HoleNanValue)                                  \
    169   V(HeapNumber, infinity_value, InfinityValue)                                 \
    170   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
    171   V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
    172   /* Caches */                                                                 \
    173   V(FixedArray, number_string_cache, NumberStringCache)                        \
    174   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
    175   V(FixedArray, string_split_cache, StringSplitCache)                          \
    176   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
    177   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
    178   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
    179   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
    180   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
    181   V(FixedArray, experimental_natives_source_cache,                             \
    182     ExperimentalNativesSourceCache)                                            \
    183   V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache)           \
    184   V(FixedArray, experimental_extra_natives_source_cache,                       \
    185     ExperimentalExtraNativesSourceCache)                                       \
    186   /* Lists and dictionaries */                                                 \
    187   V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary)    \
    188   V(NameDictionary, public_symbol_table, PublicSymbolTable)                    \
    189   V(NameDictionary, api_symbol_table, ApiSymbolTable)                          \
    190   V(NameDictionary, api_private_symbol_table, ApiPrivateSymbolTable)           \
    191   V(Object, script_list, ScriptList)                                           \
    192   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
    193   V(FixedArray, materialized_objects, MaterializedObjects)                     \
    194   V(FixedArray, microtask_queue, MicrotaskQueue)                               \
    195   V(FixedArray, detached_contexts, DetachedContexts)                           \
    196   V(ArrayList, retained_maps, RetainedMaps)                                    \
    197   V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
    198   /* weak_new_space_object_to_code_list is an array of weak cells, where */    \
    199   /* slots with even indices refer to the weak object, and the subsequent */   \
    200   /* slots refer to the code with the reference to the weak object. */         \
    201   V(ArrayList, weak_new_space_object_to_code_list,                             \
    202     WeakNewSpaceObjectToCodeList)                                              \
    203   /* List to hold onto feedback vectors that we need for code coverage */      \
    204   V(Object, code_coverage_list, CodeCoverageList)                              \
    205   V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
    206   V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos)       \
    207   V(FixedArray, serialized_templates, SerializedTemplates)                     \
    208   V(FixedArray, serialized_global_proxy_sizes, SerializedGlobalProxySizes)     \
    209   /* Configured values */                                                      \
    210   V(TemplateList, message_listeners, MessageListeners)                         \
    211   V(InterceptorInfo, noop_interceptor_info, NoOpInterceptorInfo)               \
    212   V(Code, js_entry_code, JsEntryCode)                                          \
    213   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
    214   /* Oddball maps */                                                           \
    215   V(Map, undefined_map, UndefinedMap)                                          \
    216   V(Map, the_hole_map, TheHoleMap)                                             \
    217   V(Map, null_map, NullMap)                                                    \
    218   V(Map, boolean_map, BooleanMap)                                              \
    219   V(Map, uninitialized_map, UninitializedMap)                                  \
    220   V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
    221   V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
    222   V(Map, exception_map, ExceptionMap)                                          \
    223   V(Map, termination_exception_map, TerminationExceptionMap)                   \
    224   V(Map, optimized_out_map, OptimizedOutMap)                                   \
    225   V(Map, stale_register_map, StaleRegisterMap)                                 \
    226   /* per-Isolate map for JSPromiseCapability. */                               \
    227   /* TODO(caitp): Make this a Struct */                                        \
    228   V(Map, js_promise_capability_map, JSPromiseCapabilityMap)
    229 
    230 // Entries in this list are limited to Smis and are not visited during GC.
    231 #define SMI_ROOT_LIST(V)                                                       \
    232   V(Smi, stack_limit, StackLimit)                                              \
    233   V(Smi, real_stack_limit, RealStackLimit)                                     \
    234   V(Smi, last_script_id, LastScriptId)                                         \
    235   V(Smi, hash_seed, HashSeed)                                                  \
    236   /* To distinguish the function templates, so that we can find them in the */ \
    237   /* function cache of the native context. */                                  \
    238   V(Smi, next_template_serial_number, NextTemplateSerialNumber)                \
    239   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
    240   V(Smi, construct_stub_create_deopt_pc_offset,                                \
    241     ConstructStubCreateDeoptPCOffset)                                          \
    242   V(Smi, construct_stub_invoke_deopt_pc_offset,                                \
    243     ConstructStubInvokeDeoptPCOffset)                                          \
    244   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
    245   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
    246   V(Smi, interpreter_entry_return_pc_offset, InterpreterEntryReturnPCOffset)
    247 
    248 #define ROOT_LIST(V)  \
    249   STRONG_ROOT_LIST(V) \
    250   SMI_ROOT_LIST(V)    \
    251   V(StringTable, string_table, StringTable)
    252 
    253 
    254 // Heap roots that are known to be immortal immovable, for which we can safely
    255 // skip write barriers. This list is not complete and has omissions.
    256 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
    257   V(ByteArrayMap)                       \
    258   V(BytecodeArrayMap)                   \
    259   V(FreeSpaceMap)                       \
    260   V(OnePointerFillerMap)                \
    261   V(TwoPointerFillerMap)                \
    262   V(UndefinedValue)                     \
    263   V(TheHoleValue)                       \
    264   V(NullValue)                          \
    265   V(TrueValue)                          \
    266   V(FalseValue)                         \
    267   V(UninitializedValue)                 \
    268   V(CellMap)                            \
    269   V(GlobalPropertyCellMap)              \
    270   V(SharedFunctionInfoMap)              \
    271   V(MetaMap)                            \
    272   V(HeapNumberMap)                      \
    273   V(MutableHeapNumberMap)               \
    274   V(NativeContextMap)                   \
    275   V(FixedArrayMap)                      \
    276   V(CodeMap)                            \
    277   V(ScopeInfoMap)                       \
    278   V(ModuleInfoMap)                      \
    279   V(FixedCOWArrayMap)                   \
    280   V(FixedDoubleArrayMap)                \
    281   V(WeakCellMap)                        \
    282   V(TransitionArrayMap)                 \
    283   V(NoInterceptorResultSentinel)        \
    284   V(HashTableMap)                       \
    285   V(OrderedHashTableMap)                \
    286   V(EmptyFixedArray)                    \
    287   V(EmptyByteArray)                     \
    288   V(EmptyDescriptorArray)               \
    289   V(ArgumentsMarker)                    \
    290   V(SymbolMap)                          \
    291   V(SloppyArgumentsElementsMap)         \
    292   V(FunctionContextMap)                 \
    293   V(CatchContextMap)                    \
    294   V(WithContextMap)                     \
    295   V(BlockContextMap)                    \
    296   V(ModuleContextMap)                   \
    297   V(EvalContextMap)                     \
    298   V(ScriptContextMap)                   \
    299   V(UndefinedMap)                       \
    300   V(TheHoleMap)                         \
    301   V(NullMap)                            \
    302   V(BooleanMap)                         \
    303   V(UninitializedMap)                   \
    304   V(ArgumentsMarkerMap)                 \
    305   V(JSMessageObjectMap)                 \
    306   V(ForeignMap)                         \
    307   V(NoClosuresCellMap)                  \
    308   V(OneClosureCellMap)                  \
    309   V(ManyClosuresCellMap)                \
    310   V(NanValue)                           \
    311   V(InfinityValue)                      \
    312   V(MinusZeroValue)                     \
    313   V(MinusInfinityValue)                 \
    314   V(EmptyWeakCell)                      \
    315   V(empty_string)                       \
    316   PRIVATE_SYMBOL_LIST(V)
    317 
    318 // Forward declarations.
    319 class AllocationObserver;
    320 class ArrayBufferTracker;
    321 class GCIdleTimeAction;
    322 class GCIdleTimeHandler;
    323 class GCIdleTimeHeapState;
    324 class GCTracer;
    325 class HeapObjectsFilter;
    326 class HeapStats;
    327 class HistogramTimer;
    328 class Isolate;
    329 class LocalEmbedderHeapTracer;
    330 class MemoryAllocator;
    331 class MemoryReducer;
    332 class ObjectIterator;
    333 class ObjectStats;
    334 class Page;
    335 class PagedSpace;
    336 class Scavenger;
    337 class ScavengeJob;
    338 class Space;
    339 class StoreBuffer;
    340 class TracePossibleWrapperReporter;
    341 class WeakObjectRetainer;
    342 
    343 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
    344 
    345 enum ArrayStorageAllocationMode {
    346   DONT_INITIALIZE_ARRAY_ELEMENTS,
    347   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
    348 };
    349 
    350 enum class ClearRecordedSlots { kYes, kNo };
    351 
    352 enum class GarbageCollectionReason {
    353   kUnknown = 0,
    354   kAllocationFailure = 1,
    355   kAllocationLimit = 2,
    356   kContextDisposal = 3,
    357   kCountersExtension = 4,
    358   kDebugger = 5,
    359   kDeserializer = 6,
    360   kExternalMemoryPressure = 7,
    361   kFinalizeMarkingViaStackGuard = 8,
    362   kFinalizeMarkingViaTask = 9,
    363   kFullHashtable = 10,
    364   kHeapProfiler = 11,
    365   kIdleTask = 12,
    366   kLastResort = 13,
    367   kLowMemoryNotification = 14,
    368   kMakeHeapIterable = 15,
    369   kMemoryPressure = 16,
    370   kMemoryReducer = 17,
    371   kRuntime = 18,
    372   kSamplingProfiler = 19,
    373   kSnapshotCreator = 20,
    374   kTesting = 21
    375   // If you add new items here, then update the incremental_marking_reason,
    376   // mark_compact_reason, and scavenge_reason counters in counters.h.
    377   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
    378 };
    379 
    380 enum class YoungGenerationHandling {
    381   kRegularScavenge = 0,
    382   kFastPromotionDuringScavenge = 1,
    383   // Histogram::InspectConstructionArguments in chromium requires us to have at
    384   // least three buckets.
    385   kUnusedBucket = 2,
    386   // If you add new items here, then update the young_generation_handling in
    387   // counters.h.
    388   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
    389 };
    390 
    391 // A queue of objects promoted during scavenge. Each object is accompanied by
    392 // its size to avoid dereferencing a map pointer for scanning. The last page in
    393 // to-space is used for the promotion queue. On conflict during scavenge, the
    394 // promotion queue is allocated externally and all entries are copied to the
    395 // external queue.
    396 class PromotionQueue {
    397  public:
    398   explicit PromotionQueue(Heap* heap)
    399       : front_(nullptr),
    400         rear_(nullptr),
    401         limit_(nullptr),
    402         emergency_stack_(nullptr),
    403         heap_(heap) {}
    404 
    405   void Initialize();
    406   void Destroy();
    407 
    408   inline void SetNewLimit(Address limit);
    409   inline bool IsBelowPromotionQueue(Address to_space_top);
    410 
    411   inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
    412   inline void remove(HeapObject** target, int32_t* size,
    413                      bool* was_marked_black);
    414 
    415   bool is_empty() {
    416     return (front_ == rear_) &&
    417            (emergency_stack_ == nullptr || emergency_stack_->length() == 0);
    418   }
    419 
    420  private:
    421   struct Entry {
    422     Entry(HeapObject* obj, int32_t size, bool was_marked_black)
    423         : obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
    424 
    425     HeapObject* obj_;
    426     int32_t size_ : 31;
    427     bool was_marked_black_ : 1;
    428   };
    429 
    430   inline Page* GetHeadPage();
    431 
    432   void RelocateQueueHead();
    433 
    434   // The front of the queue is higher in the memory page chain than the rear.
    435   struct Entry* front_;
    436   struct Entry* rear_;
    437   struct Entry* limit_;
    438 
    439   List<Entry>* emergency_stack_;
    440   Heap* heap_;
    441 
    442   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
    443 };
    444 
    445 class AllocationResult {
    446  public:
    447   static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
    448     return AllocationResult(space);
    449   }
    450 
    451   // Implicit constructor from Object*.
    452   AllocationResult(Object* object)  // NOLINT
    453       : object_(object) {
    454     // AllocationResults can't return Smis, which are used to represent
    455     // failure and the space to retry in.
    456     CHECK(!object->IsSmi());
    457   }
    458 
    459   AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
    460 
    461   inline bool IsRetry() { return object_->IsSmi(); }
    462   inline HeapObject* ToObjectChecked();
    463   inline AllocationSpace RetrySpace();
    464 
    465   template <typename T>
    466   bool To(T** obj) {
    467     if (IsRetry()) return false;
    468     *obj = T::cast(object_);
    469     return true;
    470   }
    471 
    472  private:
    473   explicit AllocationResult(AllocationSpace space)
    474       : object_(Smi::FromInt(static_cast<int>(space))) {}
    475 
    476   Object* object_;
    477 };
    478 
    479 STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
    480 
    481 #ifdef DEBUG
    482 struct CommentStatistic {
    483   const char* comment;
    484   int size;
    485   int count;
    486   void Clear() {
    487     comment = NULL;
    488     size = 0;
    489     count = 0;
    490   }
    491   // Must be small, since an iteration is used for lookup.
    492   static const int kMaxComments = 64;
    493 };
    494 #endif
    495 
    496 class NumberAndSizeInfo BASE_EMBEDDED {
    497  public:
    498   NumberAndSizeInfo() : number_(0), bytes_(0) {}
    499 
    500   int number() const { return number_; }
    501   void increment_number(int num) { number_ += num; }
    502 
    503   int bytes() const { return bytes_; }
    504   void increment_bytes(int size) { bytes_ += size; }
    505 
    506   void clear() {
    507     number_ = 0;
    508     bytes_ = 0;
    509   }
    510 
    511  private:
    512   int number_;
    513   int bytes_;
    514 };
    515 
    516 // HistogramInfo class for recording a single "bar" of a histogram.  This
    517 // class is used for collecting statistics to print to the log file.
    518 class HistogramInfo : public NumberAndSizeInfo {
    519  public:
    520   HistogramInfo() : NumberAndSizeInfo(), name_(nullptr) {}
    521 
    522   const char* name() { return name_; }
    523   void set_name(const char* name) { name_ = name; }
    524 
    525  private:
    526   const char* name_;
    527 };
    528 
    529 class Heap {
    530  public:
    531   // Declare all the root indices.  This defines the root list order.
    532   enum RootListIndex {
    533 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    534     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
    535 #undef ROOT_INDEX_DECLARATION
    536 
    537 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
    538         INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
    539 #undef STRING_DECLARATION
    540 
    541 #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
    542             PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    543 #undef SYMBOL_INDEX_DECLARATION
    544 
    545 #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
    546                 PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    547                     WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
    548 #undef SYMBOL_INDEX_DECLARATION
    549 
    550 // Utility type maps
    551 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
    552                         STRUCT_LIST(DECLARE_STRUCT_MAP)
    553 #undef DECLARE_STRUCT_MAP
    554                             kStringTableRootIndex,
    555 
    556 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
    557     SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
    558 #undef ROOT_INDEX_DECLARATION
    559         kRootListLength,
    560     kStrongRootListLength = kStringTableRootIndex,
    561     kSmiRootsStart = kStringTableRootIndex + 1
    562   };
    563 
    564   enum FindMementoMode { kForRuntime, kForGC };
    565 
    566   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
    567 
    568   enum UpdateAllocationSiteMode { kGlobal, kCached };
    569 
    570   // Taking this lock prevents the GC from entering a phase that relocates
    571   // object references.
    572   class RelocationLock {
    573    public:
    574     explicit RelocationLock(Heap* heap) : heap_(heap) {
    575       heap_->relocation_mutex_.Lock();
    576     }
    577 
    578     ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
    579 
    580    private:
    581     Heap* heap_;
    582   };
    583 
    584   // Support for partial snapshots.  After calling this we have a linear
    585   // space to write objects in each space.
    586   struct Chunk {
    587     uint32_t size;
    588     Address start;
    589     Address end;
    590   };
    591   typedef List<Chunk> Reservation;
    592 
    593   static const int kInitalOldGenerationLimitFactor = 2;
    594 
    595 #if V8_OS_ANDROID
    596   // Don't apply pointer multiplier on Android since it has no swap space and
    597   // should instead adapt it's heap size based on available physical memory.
    598   static const int kPointerMultiplier = 1;
    599 #else
    600   static const int kPointerMultiplier = i::kPointerSize / 4;
    601 #endif
    602 
    603   // The new space size has to be a power of 2. Sizes are in MB.
    604   static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
    605   static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
    606   static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
    607   static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
    608 
    609   // The old space size has to be a multiple of Page::kPageSize.
    610   // Sizes are in MB.
    611   static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
    612   static const int kMaxOldSpaceSizeMediumMemoryDevice =
    613       256 * kPointerMultiplier;
    614   static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
    615   static const int kMaxOldSpaceSizeHugeMemoryDevice = 1024 * kPointerMultiplier;
    616 
    617   // The executable size has to be a multiple of Page::kPageSize.
    618   // Sizes are in MB.
    619   static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
    620   static const int kMaxExecutableSizeMediumMemoryDevice =
    621       192 * kPointerMultiplier;
    622   static const int kMaxExecutableSizeHighMemoryDevice =
    623       256 * kPointerMultiplier;
    624   static const int kMaxExecutableSizeHugeMemoryDevice =
    625       256 * kPointerMultiplier;
    626 
    627   static const int kTraceRingBufferSize = 512;
    628   static const int kStacktraceBufferSize = 512;
    629 
    630   V8_EXPORT_PRIVATE static const double kMinHeapGrowingFactor;
    631   V8_EXPORT_PRIVATE static const double kMaxHeapGrowingFactor;
    632   static const double kMaxHeapGrowingFactorMemoryConstrained;
    633   static const double kMaxHeapGrowingFactorIdle;
    634   static const double kConservativeHeapGrowingFactor;
    635   static const double kTargetMutatorUtilization;
    636 
    637   static const int kNoGCFlags = 0;
    638   static const int kReduceMemoryFootprintMask = 1;
    639   static const int kAbortIncrementalMarkingMask = 2;
    640   static const int kFinalizeIncrementalMarkingMask = 4;
    641 
    642   // Making the heap iterable requires us to abort incremental marking.
    643   static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
    644 
    645   // The roots that have an index less than this are always in old space.
    646   static const int kOldSpaceRoots = 0x20;
    647 
    648   // The minimum size of a HeapObject on the heap.
    649   static const int kMinObjectSizeInWords = 2;
    650 
    651   static const int kMinPromotedPercentForFastPromotionMode = 90;
    652 
    653   STATIC_ASSERT(kUndefinedValueRootIndex ==
    654                 Internals::kUndefinedValueRootIndex);
    655   STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
    656   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
    657   STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
    658   STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
    659   STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
    660 
    661   // Calculates the maximum amount of filler that could be required by the
    662   // given alignment.
    663   static int GetMaximumFillToAlign(AllocationAlignment alignment);
    664   // Calculates the actual amount of filler required for a given address at the
    665   // given alignment.
    666   static int GetFillToAlign(Address address, AllocationAlignment alignment);
    667 
    668   template <typename T>
    669   static inline bool IsOneByte(T t, int chars);
    670 
    671   static void FatalProcessOutOfMemory(const char* location,
    672                                       bool is_heap_oom = false);
    673 
    674   static bool RootIsImmortalImmovable(int root_index);
    675 
    676   // Checks whether the space is valid.
    677   static bool IsValidAllocationSpace(AllocationSpace space);
    678 
    679   // Generated code can embed direct references to non-writable roots if
    680   // they are in new space.
    681   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
    682 
    683   static bool IsUnmodifiedHeapObject(Object** p);
    684 
    685   // Zapping is needed for verify heap, and always done in debug builds.
    686   static inline bool ShouldZapGarbage() {
    687 #ifdef DEBUG
    688     return true;
    689 #else
    690 #ifdef VERIFY_HEAP
    691     return FLAG_verify_heap;
    692 #else
    693     return false;
    694 #endif
    695 #endif
    696   }
    697 
    698   static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
    699     return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
    700   }
    701 
    702   static inline GarbageCollector YoungGenerationCollector() {
    703     return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
    704   }
    705 
    706   static inline const char* CollectorName(GarbageCollector collector) {
    707     switch (collector) {
    708       case SCAVENGER:
    709         return "Scavenger";
    710       case MARK_COMPACTOR:
    711         return "Mark-Compact";
    712       case MINOR_MARK_COMPACTOR:
    713         return "Minor Mark-Compact";
    714     }
    715     return "Unknown collector";
    716   }
    717 
    718   V8_EXPORT_PRIVATE static double HeapGrowingFactor(double gc_speed,
    719                                                     double mutator_speed);
    720 
    721   // Copy block of memory from src to dst. Size of block should be aligned
    722   // by pointer size.
    723   static inline void CopyBlock(Address dst, Address src, int byte_size);
    724 
    725   // Determines a static visitor id based on the given {map} that can then be
    726   // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
    727   static int GetStaticVisitorIdForMap(Map* map);
    728 
    729   // Notifies the heap that is ok to start marking or other activities that
    730   // should not happen during deserialization.
    731   void NotifyDeserializationComplete();
    732 
    733   inline Address* NewSpaceAllocationTopAddress();
    734   inline Address* NewSpaceAllocationLimitAddress();
    735   inline Address* OldSpaceAllocationTopAddress();
    736   inline Address* OldSpaceAllocationLimitAddress();
    737 
    738   // Clear the Instanceof cache (used when a prototype changes).
    739   inline void ClearInstanceofCache();
    740 
    741   // FreeSpace objects have a null map after deserialization. Update the map.
    742   void RepairFreeListsAfterDeserialization();
    743 
    744   // Move len elements within a given array from src_index index to dst_index
    745   // index.
    746   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
    747 
    748   // Initialize a filler object to keep the ability to iterate over the heap
    749   // when introducing gaps within pages. If slots could have been recorded in
    750   // the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
    751   // pass ClearRecordedSlots::kNo.
    752   HeapObject* CreateFillerObjectAt(Address addr, int size,
    753                                    ClearRecordedSlots mode);
    754 
    755   bool CanMoveObjectStart(HeapObject* object);
    756 
    757   static bool IsImmovable(HeapObject* object);
    758 
    759   // Maintain consistency of live bytes during incremental marking.
    760   void AdjustLiveBytes(HeapObject* object, int by);
    761 
    762   // Trim the given array from the left. Note that this relocates the object
    763   // start and hence is only valid if there is only a single reference to it.
    764   FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
    765 
    766   // Trim the given array from the right.
    767   void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
    768 
    769   // Converts the given boolean condition to JavaScript boolean value.
    770   inline Oddball* ToBoolean(bool condition);
    771 
    772   // Notify the heap that a context has been disposed.
    773   int NotifyContextDisposed(bool dependant_context);
    774 
    775   void set_native_contexts_list(Object* object) {
    776     native_contexts_list_ = object;
    777   }
    778   Object* native_contexts_list() const { return native_contexts_list_; }
    779 
    780   void set_allocation_sites_list(Object* object) {
    781     allocation_sites_list_ = object;
    782   }
    783   Object* allocation_sites_list() { return allocation_sites_list_; }
    784 
    785   // Used in CreateAllocationSiteStub and the (de)serializer.
    786   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
    787 
    788   void set_encountered_weak_collections(Object* weak_collection) {
    789     encountered_weak_collections_ = weak_collection;
    790   }
    791   Object* encountered_weak_collections() const {
    792     return encountered_weak_collections_;
    793   }
    794   void VisitEncounteredWeakCollections(ObjectVisitor* visitor) {
    795     visitor->VisitPointer(&encountered_weak_collections_);
    796   }
    797 
    798   void set_encountered_weak_cells(Object* weak_cell) {
    799     encountered_weak_cells_ = weak_cell;
    800   }
    801   Object* encountered_weak_cells() const { return encountered_weak_cells_; }
    802 
    803   void set_encountered_transition_arrays(Object* transition_array) {
    804     encountered_transition_arrays_ = transition_array;
    805   }
    806   Object* encountered_transition_arrays() const {
    807     return encountered_transition_arrays_;
    808   }
    809 
    810   // Number of mark-sweeps.
    811   int ms_count() const { return ms_count_; }
    812 
    813   // Checks whether the given object is allowed to be migrated from it's
    814   // current space into the given destination space. Used for debugging.
    815   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
    816 
    817   void CheckHandleCount();
    818 
    819   // Number of "runtime allocations" done so far.
    820   uint32_t allocations_count() { return allocations_count_; }
    821 
    822   // Print short heap statistics.
    823   void PrintShortHeapStatistics();
    824 
    825   inline HeapState gc_state() { return gc_state_; }
    826   void SetGCState(HeapState state);
    827 
    828   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
    829 
    830   // If an object has an AllocationMemento trailing it, return it, otherwise
    831   // return NULL;
    832   template <FindMementoMode mode>
    833   inline AllocationMemento* FindAllocationMemento(HeapObject* object);
    834 
    835   // Returns false if not able to reserve.
    836   bool ReserveSpace(Reservation* reservations, List<Address>* maps);
    837 
    838   //
    839   // Support for the API.
    840   //
    841 
    842   bool CreateApiObjects();
    843 
    844   // Implements the corresponding V8 API function.
    845   bool IdleNotification(double deadline_in_seconds);
    846   bool IdleNotification(int idle_time_in_ms);
    847 
    848   void MemoryPressureNotification(MemoryPressureLevel level,
    849                                   bool is_isolate_locked);
    850   void CheckMemoryPressure();
    851 
    852   void SetOutOfMemoryCallback(v8::debug::OutOfMemoryCallback callback,
    853                               void* data);
    854 
    855   double MonotonicallyIncreasingTimeInMs();
    856 
    857   void RecordStats(HeapStats* stats, bool take_snapshot = false);
    858 
    859   // Check new space expansion criteria and expand semispaces if it was hit.
    860   void CheckNewSpaceExpansionCriteria();
    861 
    862   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
    863 
    864   // An object should be promoted if the object has survived a
    865   // scavenge operation.
    866   inline bool ShouldBePromoted(Address old_address, int object_size);
    867 
    868   void ClearNormalizedMapCaches();
    869 
    870   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
    871 
    872   // Completely clear the Instanceof cache (to stop it keeping objects alive
    873   // around a GC).
    874   inline void CompletelyClearInstanceofCache();
    875 
    876   inline uint32_t HashSeed();
    877 
    878   inline int NextScriptId();
    879 
    880   inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
    881   inline void SetConstructStubCreateDeoptPCOffset(int pc_offset);
    882   inline void SetConstructStubInvokeDeoptPCOffset(int pc_offset);
    883   inline void SetGetterStubDeoptPCOffset(int pc_offset);
    884   inline void SetSetterStubDeoptPCOffset(int pc_offset);
    885   inline void SetInterpreterEntryReturnPCOffset(int pc_offset);
    886   inline int GetNextTemplateSerialNumber();
    887 
    888   inline void SetSerializedTemplates(FixedArray* templates);
    889   inline void SetSerializedGlobalProxySizes(FixedArray* sizes);
    890 
    891   // For post mortem debugging.
    892   void RememberUnmappedPage(Address page, bool compacted);
    893 
    894   // Global inline caching age: it is incremented on some GCs after context
    895   // disposal. We use it to flush inline caches.
    896   int global_ic_age() { return global_ic_age_; }
    897 
    898   void AgeInlineCaches() {
    899     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
    900   }
    901 
    902   int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
    903 
    904   int64_t external_memory() { return external_memory_; }
    905   void update_external_memory(int64_t delta) { external_memory_ += delta; }
    906 
    907   void update_external_memory_concurrently_freed(intptr_t freed) {
    908     external_memory_concurrently_freed_.Increment(freed);
    909   }
    910 
    911   void account_external_memory_concurrently_freed() {
    912     external_memory_ -= external_memory_concurrently_freed_.Value();
    913     external_memory_concurrently_freed_.SetValue(0);
    914   }
    915 
    916   void DeoptMarkedAllocationSites();
    917 
    918   inline bool DeoptMaybeTenuredAllocationSites();
    919 
    920   void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
    921                                              Handle<WeakCell> code);
    922 
    923   void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
    924                                      Handle<DependentCode> dep);
    925 
    926   DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
    927 
    928   void CompactWeakFixedArrays();
    929 
    930   void AddRetainedMap(Handle<Map> map);
    931 
    932   // This event is triggered after successful allocation of a new object made
    933   // by runtime. Allocations of target space for object evacuation do not
    934   // trigger the event. In order to track ALL allocations one must turn off
    935   // FLAG_inline_new and FLAG_use_allocation_folding.
    936   inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
    937 
    938   // This event is triggered after object is moved to a new place.
    939   inline void OnMoveEvent(HeapObject* target, HeapObject* source,
    940                           int size_in_bytes);
    941 
    942   bool deserialization_complete() const { return deserialization_complete_; }
    943 
    944   bool HasLowAllocationRate();
    945   bool HasHighFragmentation();
    946   bool HasHighFragmentation(size_t used, size_t committed);
    947 
    948   void ActivateMemoryReducerIfNeeded();
    949 
    950   bool ShouldOptimizeForMemoryUsage();
    951 
    952   bool IsLowMemoryDevice() {
    953     return max_old_generation_size_ <= kMaxOldSpaceSizeLowMemoryDevice;
    954   }
    955 
    956   bool IsMemoryConstrainedDevice() {
    957     return max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice;
    958   }
    959 
    960   bool HighMemoryPressure() {
    961     return memory_pressure_level_.Value() != MemoryPressureLevel::kNone;
    962   }
    963 
    964   size_t HeapLimitForDebugging() {
    965     const size_t kDebugHeapSizeFactor = 4;
    966     size_t max_limit = std::numeric_limits<size_t>::max() / 4;
    967     return Min(max_limit,
    968                initial_max_old_generation_size_ * kDebugHeapSizeFactor);
    969   }
    970 
    971   void IncreaseHeapLimitForDebugging() {
    972     max_old_generation_size_ =
    973         Max(max_old_generation_size_, HeapLimitForDebugging());
    974   }
    975 
    976   void RestoreOriginalHeapLimit() {
    977     // Do not set the limit lower than the live size + some slack.
    978     size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
    979     max_old_generation_size_ =
    980         Min(max_old_generation_size_,
    981             Max(initial_max_old_generation_size_, min_limit));
    982   }
    983 
    984   bool IsHeapLimitIncreasedForDebugging() {
    985     return max_old_generation_size_ == HeapLimitForDebugging();
    986   }
    987 
    988   // ===========================================================================
    989   // Initialization. ===========================================================
    990   // ===========================================================================
    991 
    992   // Configure heap size in MB before setup. Return false if the heap has been
    993   // set up already.
    994   bool ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
    995                      size_t max_executable_size, size_t code_range_size);
    996   bool ConfigureHeapDefault();
    997 
    998   // Prepares the heap, setting up memory areas that are needed in the isolate
    999   // without actually creating any objects.
   1000   bool SetUp();
   1001 
   1002   // Bootstraps the object heap with the core set of objects required to run.
   1003   // Returns whether it succeeded.
   1004   bool CreateHeapObjects();
   1005 
   1006   // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
   1007   V8_INLINE void CreateObjectStats();
   1008 
   1009   // Destroys all memory allocated by the heap.
   1010   void TearDown();
   1011 
   1012   // Returns whether SetUp has been called.
   1013   bool HasBeenSetUp();
   1014 
   1015   // ===========================================================================
   1016   // Getters for spaces. =======================================================
   1017   // ===========================================================================
   1018 
   1019   inline Address NewSpaceTop();
   1020 
   1021   NewSpace* new_space() { return new_space_; }
   1022   OldSpace* old_space() { return old_space_; }
   1023   OldSpace* code_space() { return code_space_; }
   1024   MapSpace* map_space() { return map_space_; }
   1025   LargeObjectSpace* lo_space() { return lo_space_; }
   1026 
   1027   inline PagedSpace* paged_space(int idx);
   1028   inline Space* space(int idx);
   1029 
   1030   // Returns name of the space.
   1031   const char* GetSpaceName(int idx);
   1032 
   1033   // ===========================================================================
   1034   // Getters to other components. ==============================================
   1035   // ===========================================================================
   1036 
   1037   GCTracer* tracer() { return tracer_; }
   1038 
   1039   MemoryAllocator* memory_allocator() { return memory_allocator_; }
   1040 
   1041   PromotionQueue* promotion_queue() { return &promotion_queue_; }
   1042 
   1043   inline Isolate* isolate();
   1044 
   1045   MarkCompactCollector* mark_compact_collector() {
   1046     return mark_compact_collector_;
   1047   }
   1048 
   1049   // ===========================================================================
   1050   // Root set access. ==========================================================
   1051   // ===========================================================================
   1052 
   1053   // Heap root getters.
   1054 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
   1055   ROOT_LIST(ROOT_ACCESSOR)
   1056 #undef ROOT_ACCESSOR
   1057 
   1058   // Utility type maps.
   1059 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
   1060   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
   1061 #undef STRUCT_MAP_ACCESSOR
   1062 
   1063 #define STRING_ACCESSOR(name, str) inline String* name();
   1064   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
   1065 #undef STRING_ACCESSOR
   1066 
   1067 #define SYMBOL_ACCESSOR(name) inline Symbol* name();
   1068   PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
   1069 #undef SYMBOL_ACCESSOR
   1070 
   1071 #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
   1072   PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
   1073   WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
   1074 #undef SYMBOL_ACCESSOR
   1075 
   1076   Object* root(RootListIndex index) { return roots_[index]; }
   1077   Handle<Object> root_handle(RootListIndex index) {
   1078     return Handle<Object>(&roots_[index]);
   1079   }
   1080   template <typename T>
   1081   bool IsRootHandle(Handle<T> handle, RootListIndex* index) const {
   1082     Object** const handle_location = bit_cast<Object**>(handle.address());
   1083     if (handle_location >= &roots_[kRootListLength]) return false;
   1084     if (handle_location < &roots_[0]) return false;
   1085     *index = static_cast<RootListIndex>(handle_location - &roots_[0]);
   1086     return true;
   1087   }
   1088 
   1089   // Generated code can embed this address to get access to the roots.
   1090   Object** roots_array_start() { return roots_; }
   1091 
   1092   // Sets the stub_cache_ (only used when expanding the dictionary).
   1093   void SetRootCodeStubs(UnseededNumberDictionary* value) {
   1094     roots_[kCodeStubsRootIndex] = value;
   1095   }
   1096 
   1097   void SetRootMaterializedObjects(FixedArray* objects) {
   1098     roots_[kMaterializedObjectsRootIndex] = objects;
   1099   }
   1100 
   1101   void SetRootScriptList(Object* value) {
   1102     roots_[kScriptListRootIndex] = value;
   1103   }
   1104 
   1105   void SetRootStringTable(StringTable* value) {
   1106     roots_[kStringTableRootIndex] = value;
   1107   }
   1108 
   1109   void SetRootNoScriptSharedFunctionInfos(Object* value) {
   1110     roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
   1111   }
   1112 
   1113   void SetMessageListeners(TemplateList* value) {
   1114     roots_[kMessageListenersRootIndex] = value;
   1115   }
   1116 
   1117   // Set the stack limit in the roots_ array.  Some architectures generate
   1118   // code that looks here, because it is faster than loading from the static
   1119   // jslimit_/real_jslimit_ variable in the StackGuard.
   1120   void SetStackLimits();
   1121 
   1122   // The stack limit is thread-dependent. To be able to reproduce the same
   1123   // snapshot blob, we need to reset it before serializing.
   1124   void ClearStackLimits();
   1125 
   1126   // Generated code can treat direct references to this root as constant.
   1127   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
   1128 
   1129   Map* MapForFixedTypedArray(ExternalArrayType array_type);
   1130   RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
   1131 
   1132   RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
   1133   FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
   1134 
   1135   void RegisterStrongRoots(Object** start, Object** end);
   1136   void UnregisterStrongRoots(Object** start);
   1137 
   1138   // ===========================================================================
   1139   // Inline allocation. ========================================================
   1140   // ===========================================================================
   1141 
   1142   // Indicates whether inline bump-pointer allocation has been disabled.
   1143   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
   1144 
   1145   // Switch whether inline bump-pointer allocation should be used.
   1146   void EnableInlineAllocation();
   1147   void DisableInlineAllocation();
   1148 
   1149   // ===========================================================================
   1150   // Methods triggering GCs. ===================================================
   1151   // ===========================================================================
   1152 
   1153   // Performs garbage collection operation.
   1154   // Returns whether there is a chance that another major GC could
   1155   // collect more garbage.
   1156   inline bool CollectGarbage(
   1157       AllocationSpace space, GarbageCollectionReason gc_reason,
   1158       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1159 
   1160   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
   1161   // non-zero, then the slower precise sweeper is used, which leaves the heap
   1162   // in a state where we can iterate over the heap visiting all objects.
   1163   void CollectAllGarbage(
   1164       int flags, GarbageCollectionReason gc_reason,
   1165       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1166 
   1167   // Last hope GC, should try to squeeze as much as possible.
   1168   void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
   1169 
   1170   // Reports and external memory pressure event, either performs a major GC or
   1171   // completes incremental marking in order to free external resources.
   1172   void ReportExternalMemoryPressure();
   1173 
   1174   // Invoked when GC was requested via the stack guard.
   1175   void HandleGCRequest();
   1176 
   1177   // ===========================================================================
   1178   // Iterators. ================================================================
   1179   // ===========================================================================
   1180 
   1181   // Iterates over all roots in the heap.
   1182   void IterateRoots(ObjectVisitor* v, VisitMode mode);
   1183   // Iterates over all strong roots in the heap.
   1184   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
   1185   // Iterates over entries in the smi roots list.  Only interesting to the
   1186   // serializer/deserializer, since GC does not care about smis.
   1187   void IterateSmiRoots(ObjectVisitor* v);
   1188   // Iterates over all the other roots in the heap.
   1189   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
   1190 
   1191   // Iterate pointers of promoted objects.
   1192   void IterateAndScavengePromotedObject(HeapObject* target, int size,
   1193                                         bool was_marked_black);
   1194 
   1195   // ===========================================================================
   1196   // Store buffer API. =========================================================
   1197   // ===========================================================================
   1198 
   1199   // Write barrier support for object[offset] = o;
   1200   inline void RecordWrite(Object* object, int offset, Object* o);
   1201   inline void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* target);
   1202   void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* target);
   1203   void RecordWritesIntoCode(Code* code);
   1204   inline void RecordFixedArrayElements(FixedArray* array, int offset,
   1205                                        int length);
   1206 
   1207   inline Address* store_buffer_top_address();
   1208 
   1209   void ClearRecordedSlot(HeapObject* object, Object** slot);
   1210   void ClearRecordedSlotRange(Address start, Address end);
   1211 
   1212   bool HasRecordedSlot(HeapObject* object, Object** slot);
   1213 
   1214   // ===========================================================================
   1215   // Incremental marking API. ==================================================
   1216   // ===========================================================================
   1217 
   1218   // Start incremental marking and ensure that idle time handler can perform
   1219   // incremental steps.
   1220   void StartIdleIncrementalMarking(GarbageCollectionReason gc_reason);
   1221 
   1222   // Starts incremental marking assuming incremental marking is currently
   1223   // stopped.
   1224   void StartIncrementalMarking(
   1225       int gc_flags, GarbageCollectionReason gc_reason,
   1226       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
   1227 
   1228   void StartIncrementalMarkingIfAllocationLimitIsReached(
   1229       int gc_flags,
   1230       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
   1231 
   1232   void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
   1233 
   1234   bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms,
   1235                                          GarbageCollectionReason gc_reason);
   1236 
   1237   void RegisterReservationsForBlackAllocation(Reservation* reservations);
   1238 
   1239   IncrementalMarking* incremental_marking() { return incremental_marking_; }
   1240 
   1241   // The runtime uses this function to notify potentially unsafe object layout
   1242   // changes that require special synchronization with the concurrent marker.
   1243   // A layout change is unsafe if
   1244   // - it removes a tagged in-object field.
   1245   // - it replaces a tagged in-objects field with an untagged in-object field.
   1246   void NotifyObjectLayoutChange(HeapObject* object,
   1247                                 const DisallowHeapAllocation&);
   1248 #ifdef VERIFY_HEAP
   1249   // This function checks that either
   1250   // - the map transition is safe,
   1251   // - or it was communicated to GC using NotifyObjectLayoutChange.
   1252   void VerifyObjectLayoutChange(HeapObject* object, Map* new_map);
   1253 #endif
   1254 
   1255   // ===========================================================================
   1256   // Embedder heap tracer support. =============================================
   1257   // ===========================================================================
   1258 
   1259   LocalEmbedderHeapTracer* local_embedder_heap_tracer() {
   1260     return local_embedder_heap_tracer_;
   1261   }
   1262   void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
   1263   void TracePossibleWrapper(JSObject* js_object);
   1264   void RegisterExternallyReferencedObject(Object** object);
   1265 
   1266   // ===========================================================================
   1267   // External string table API. ================================================
   1268   // ===========================================================================
   1269 
   1270   // Registers an external string.
   1271   inline void RegisterExternalString(String* string);
   1272 
   1273   // Finalizes an external string by deleting the associated external
   1274   // data and clearing the resource pointer.
   1275   inline void FinalizeExternalString(String* string);
   1276 
   1277   // ===========================================================================
   1278   // Methods checking/returning the space of a given object/address. ===========
   1279   // ===========================================================================
   1280 
   1281   // Returns whether the object resides in new space.
   1282   inline bool InNewSpace(Object* object);
   1283   inline bool InFromSpace(Object* object);
   1284   inline bool InToSpace(Object* object);
   1285 
   1286   // Returns whether the object resides in old space.
   1287   inline bool InOldSpace(Object* object);
   1288 
   1289   // Checks whether an address/object in the heap (including auxiliary
   1290   // area and unused area).
   1291   bool Contains(HeapObject* value);
   1292 
   1293   // Checks whether an address/object in a space.
   1294   // Currently used by tests, serialization and heap verification only.
   1295   bool InSpace(HeapObject* value, AllocationSpace space);
   1296 
   1297   // Slow methods that can be used for verification as they can also be used
   1298   // with off-heap Addresses.
   1299   bool ContainsSlow(Address addr);
   1300   bool InSpaceSlow(Address addr, AllocationSpace space);
   1301   inline bool InNewSpaceSlow(Address address);
   1302   inline bool InOldSpaceSlow(Address address);
   1303 
   1304   // ===========================================================================
   1305   // Object statistics tracking. ===============================================
   1306   // ===========================================================================
   1307 
   1308   // Returns the number of buckets used by object statistics tracking during a
   1309   // major GC. Note that the following methods fail gracefully when the bounds
   1310   // are exceeded though.
   1311   size_t NumberOfTrackedHeapObjectTypes();
   1312 
   1313   // Returns object statistics about count and size at the last major GC.
   1314   // Objects are being grouped into buckets that roughly resemble existing
   1315   // instance types.
   1316   size_t ObjectCountAtLastGC(size_t index);
   1317   size_t ObjectSizeAtLastGC(size_t index);
   1318 
   1319   // Retrieves names of buckets used by object statistics tracking.
   1320   bool GetObjectTypeName(size_t index, const char** object_type,
   1321                          const char** object_sub_type);
   1322 
   1323   // ===========================================================================
   1324   // Code statistics. ==========================================================
   1325   // ===========================================================================
   1326 
   1327   // Collect code (Code and BytecodeArray objects) statistics.
   1328   void CollectCodeStatistics();
   1329 
   1330   // ===========================================================================
   1331   // GC statistics. ============================================================
   1332   // ===========================================================================
   1333 
   1334   // Returns the maximum amount of memory reserved for the heap.
   1335   size_t MaxReserved() {
   1336     return 2 * max_semi_space_size_ + max_old_generation_size_;
   1337   }
   1338   size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
   1339   size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
   1340   size_t MaxOldGenerationSize() { return max_old_generation_size_; }
   1341   size_t MaxExecutableSize() { return max_executable_size_; }
   1342 
   1343   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
   1344   // more spaces are needed until it reaches the limit.
   1345   size_t Capacity();
   1346 
   1347   // Returns the capacity of the old generation.
   1348   size_t OldGenerationCapacity();
   1349 
   1350   // Returns the amount of memory currently committed for the heap.
   1351   size_t CommittedMemory();
   1352 
   1353   // Returns the amount of memory currently committed for the old space.
   1354   size_t CommittedOldGenerationMemory();
   1355 
   1356   // Returns the amount of executable memory currently committed for the heap.
   1357   size_t CommittedMemoryExecutable();
   1358 
   1359   // Returns the amount of phyical memory currently committed for the heap.
   1360   size_t CommittedPhysicalMemory();
   1361 
   1362   // Returns the maximum amount of memory ever committed for the heap.
   1363   size_t MaximumCommittedMemory() { return maximum_committed_; }
   1364 
   1365   // Updates the maximum committed memory for the heap. Should be called
   1366   // whenever a space grows.
   1367   void UpdateMaximumCommitted();
   1368 
   1369   // Returns the available bytes in space w/o growing.
   1370   // Heap doesn't guarantee that it can allocate an object that requires
   1371   // all available bytes. Check MaxHeapObjectSize() instead.
   1372   size_t Available();
   1373 
   1374   // Returns of size of all objects residing in the heap.
   1375   size_t SizeOfObjects();
   1376 
   1377   void UpdateSurvivalStatistics(int start_new_space_size);
   1378 
   1379   inline void IncrementPromotedObjectsSize(size_t object_size) {
   1380     promoted_objects_size_ += object_size;
   1381   }
   1382   inline size_t promoted_objects_size() { return promoted_objects_size_; }
   1383 
   1384   inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) {
   1385     semi_space_copied_object_size_ += object_size;
   1386   }
   1387   inline size_t semi_space_copied_object_size() {
   1388     return semi_space_copied_object_size_;
   1389   }
   1390 
   1391   inline size_t SurvivedNewSpaceObjectSize() {
   1392     return promoted_objects_size_ + semi_space_copied_object_size_;
   1393   }
   1394 
   1395   inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
   1396 
   1397   inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
   1398 
   1399   inline void IncrementNodesPromoted() { nodes_promoted_++; }
   1400 
   1401   inline void IncrementYoungSurvivorsCounter(size_t survived) {
   1402     survived_last_scavenge_ = survived;
   1403     survived_since_last_expansion_ += survived;
   1404   }
   1405 
   1406   inline uint64_t PromotedTotalSize() {
   1407     return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
   1408   }
   1409 
   1410   inline void UpdateNewSpaceAllocationCounter();
   1411 
   1412   inline size_t NewSpaceAllocationCounter();
   1413 
   1414   // This should be used only for testing.
   1415   void set_new_space_allocation_counter(size_t new_value) {
   1416     new_space_allocation_counter_ = new_value;
   1417   }
   1418 
   1419   void UpdateOldGenerationAllocationCounter() {
   1420     old_generation_allocation_counter_at_last_gc_ =
   1421         OldGenerationAllocationCounter();
   1422   }
   1423 
   1424   size_t OldGenerationAllocationCounter() {
   1425     return old_generation_allocation_counter_at_last_gc_ +
   1426            PromotedSinceLastGC();
   1427   }
   1428 
   1429   // This should be used only for testing.
   1430   void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
   1431     old_generation_allocation_counter_at_last_gc_ = new_value;
   1432   }
   1433 
   1434   size_t PromotedSinceLastGC() {
   1435     return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
   1436   }
   1437 
   1438   int gc_count() const { return gc_count_; }
   1439 
   1440   // Returns the size of objects residing in non new spaces.
   1441   size_t PromotedSpaceSizeOfObjects();
   1442 
   1443   // ===========================================================================
   1444   // Prologue/epilogue callback methods.========================================
   1445   // ===========================================================================
   1446 
   1447   void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
   1448                              GCType gc_type_filter, bool pass_isolate = true);
   1449   void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
   1450 
   1451   void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
   1452                              GCType gc_type_filter, bool pass_isolate = true);
   1453   void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
   1454 
   1455   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1456   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1457 
   1458   // ===========================================================================
   1459   // Allocation methods. =======================================================
   1460   // ===========================================================================
   1461 
   1462   // Creates a filler object and returns a heap object immediately after it.
   1463   MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
   1464                                                 int filler_size);
   1465 
   1466   // Creates a filler object if needed for alignment and returns a heap object
   1467   // immediately after it. If any space is left after the returned object,
   1468   // another filler object is created so the over allocated memory is iterable.
   1469   MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
   1470                                               int object_size,
   1471                                               int allocation_size,
   1472                                               AllocationAlignment alignment);
   1473 
   1474   // ===========================================================================
   1475   // ArrayBuffer tracking. =====================================================
   1476   // ===========================================================================
   1477 
   1478   // TODO(gc): API usability: encapsulate mutation of JSArrayBuffer::is_external
   1479   // in the registration/unregistration APIs. Consider dropping the "New" from
   1480   // "RegisterNewArrayBuffer" because one can re-register a previously
   1481   // unregistered buffer, too, and the name is confusing.
   1482   void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
   1483   void UnregisterArrayBuffer(JSArrayBuffer* buffer);
   1484 
   1485   // ===========================================================================
   1486   // Allocation site tracking. =================================================
   1487   // ===========================================================================
   1488 
   1489   // Updates the AllocationSite of a given {object}. If the global prenuring
   1490   // storage is passed as {pretenuring_feedback} the memento found count on
   1491   // the corresponding allocation site is immediately updated and an entry
   1492   // in the hash map is created. Otherwise the entry (including a the count
   1493   // value) is cached on the local pretenuring feedback.
   1494   template <UpdateAllocationSiteMode mode>
   1495   inline void UpdateAllocationSite(HeapObject* object,
   1496                                    base::HashMap* pretenuring_feedback);
   1497 
   1498   // Removes an entry from the global pretenuring storage.
   1499   inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
   1500 
   1501   // Merges local pretenuring feedback into the global one. Note that this
   1502   // method needs to be called after evacuation, as allocation sites may be
   1503   // evacuated and this method resolves forward pointers accordingly.
   1504   void MergeAllocationSitePretenuringFeedback(
   1505       const base::HashMap& local_pretenuring_feedback);
   1506 
   1507 // =============================================================================
   1508 
   1509 #ifdef VERIFY_HEAP
   1510   // Verify the heap is in its normal state before or after a GC.
   1511   void Verify();
   1512 #endif
   1513 
   1514 #ifdef DEBUG
   1515   void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
   1516 
   1517   void Print();
   1518   void PrintHandles();
   1519 
   1520   // Report heap statistics.
   1521   void ReportHeapStatistics(const char* title);
   1522   void ReportCodeStatistics(const char* title);
   1523 #endif
   1524 
   1525   static const char* GarbageCollectionReasonToString(
   1526       GarbageCollectionReason gc_reason);
   1527 
   1528  private:
   1529   class SkipStoreBufferScope;
   1530   class PretenuringScope;
   1531 
   1532   // External strings table is a place where all external strings are
   1533   // registered.  We need to keep track of such strings to properly
   1534   // finalize them.
   1535   class ExternalStringTable {
   1536    public:
   1537     // Registers an external string.
   1538     inline void AddString(String* string);
   1539 
   1540     inline void IterateAll(ObjectVisitor* v);
   1541     inline void IterateNewSpaceStrings(ObjectVisitor* v);
   1542     inline void PromoteAllNewSpaceStrings();
   1543 
   1544     // Restores internal invariant and gets rid of collected strings. Must be
   1545     // called after each Iterate*() that modified the strings.
   1546     void CleanUpAll();
   1547     void CleanUpNewSpaceStrings();
   1548 
   1549     // Destroys all allocated memory.
   1550     void TearDown();
   1551 
   1552    private:
   1553     explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
   1554 
   1555     inline void Verify();
   1556 
   1557     inline void AddOldString(String* string);
   1558 
   1559     // Notifies the table that only a prefix of the new list is valid.
   1560     inline void ShrinkNewStrings(int position);
   1561 
   1562     // To speed up scavenge collections new space string are kept
   1563     // separate from old space strings.
   1564     List<Object*> new_space_strings_;
   1565     List<Object*> old_space_strings_;
   1566 
   1567     Heap* heap_;
   1568 
   1569     friend class Heap;
   1570 
   1571     DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
   1572   };
   1573 
   1574   struct StrongRootsList;
   1575 
   1576   struct StringTypeTable {
   1577     InstanceType type;
   1578     int size;
   1579     RootListIndex index;
   1580   };
   1581 
   1582   struct ConstantStringTable {
   1583     const char* contents;
   1584     RootListIndex index;
   1585   };
   1586 
   1587   struct StructTable {
   1588     InstanceType type;
   1589     int size;
   1590     RootListIndex index;
   1591   };
   1592 
   1593   struct GCCallbackPair {
   1594     GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
   1595                    bool pass_isolate)
   1596         : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
   1597 
   1598     bool operator==(const GCCallbackPair& other) const {
   1599       return other.callback == callback;
   1600     }
   1601 
   1602     v8::Isolate::GCCallback callback;
   1603     GCType gc_type;
   1604     bool pass_isolate;
   1605   };
   1606 
   1607   typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
   1608                                                         Object** pointer);
   1609 
   1610   static const int kInitialStringTableSize = 2048;
   1611   static const int kInitialEvalCacheSize = 64;
   1612   static const int kInitialNumberStringCacheSize = 256;
   1613 
   1614   static const int kRememberedUnmappedPages = 128;
   1615 
   1616   static const StringTypeTable string_type_table[];
   1617   static const ConstantStringTable constant_string_table[];
   1618   static const StructTable struct_table[];
   1619 
   1620   static const int kYoungSurvivalRateHighThreshold = 90;
   1621   static const int kYoungSurvivalRateAllowedDeviation = 15;
   1622   static const int kOldSurvivalRateLowThreshold = 10;
   1623 
   1624   static const int kMaxMarkCompactsInIdleRound = 7;
   1625   static const int kIdleScavengeThreshold = 5;
   1626 
   1627   static const int kInitialFeedbackCapacity = 256;
   1628 
   1629   Heap();
   1630 
   1631   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
   1632       Heap* heap, Object** pointer);
   1633 
   1634   // Selects the proper allocation space based on the pretenuring decision.
   1635   static AllocationSpace SelectSpace(PretenureFlag pretenure) {
   1636     return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
   1637   }
   1638 
   1639 #define ROOT_ACCESSOR(type, name, camel_name) \
   1640   inline void set_##name(type* value);
   1641   ROOT_LIST(ROOT_ACCESSOR)
   1642 #undef ROOT_ACCESSOR
   1643 
   1644   StoreBuffer* store_buffer() { return store_buffer_; }
   1645 
   1646   void set_current_gc_flags(int flags) {
   1647     current_gc_flags_ = flags;
   1648     DCHECK(!ShouldFinalizeIncrementalMarking() ||
   1649            !ShouldAbortIncrementalMarking());
   1650   }
   1651 
   1652   inline bool ShouldReduceMemory() const {
   1653     return current_gc_flags_ & kReduceMemoryFootprintMask;
   1654   }
   1655 
   1656   inline bool ShouldAbortIncrementalMarking() const {
   1657     return current_gc_flags_ & kAbortIncrementalMarkingMask;
   1658   }
   1659 
   1660   inline bool ShouldFinalizeIncrementalMarking() const {
   1661     return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
   1662   }
   1663 
   1664   void PreprocessStackTraces();
   1665 
   1666   // Checks whether a global GC is necessary
   1667   GarbageCollector SelectGarbageCollector(AllocationSpace space,
   1668                                           const char** reason);
   1669 
   1670   // Make sure there is a filler value behind the top of the new space
   1671   // so that the GC does not confuse some unintialized/stale memory
   1672   // with the allocation memento of the object at the top
   1673   void EnsureFillerObjectAtTop();
   1674 
   1675   // Ensure that we have swept all spaces in such a way that we can iterate
   1676   // over all objects.  May cause a GC.
   1677   void MakeHeapIterable();
   1678 
   1679   // Performs garbage collection operation.
   1680   // Returns whether there is a chance that another major GC could
   1681   // collect more garbage.
   1682   bool CollectGarbage(
   1683       GarbageCollector collector, GarbageCollectionReason gc_reason,
   1684       const char* collector_reason,
   1685       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1686 
   1687   // Performs garbage collection
   1688   // Returns whether there is a chance another major GC could
   1689   // collect more garbage.
   1690   bool PerformGarbageCollection(
   1691       GarbageCollector collector,
   1692       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
   1693 
   1694   inline void UpdateOldSpaceLimits();
   1695 
   1696   // Initializes a JSObject based on its map.
   1697   void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
   1698                                  Map* map);
   1699 
   1700   // Initializes JSObject body starting at given offset.
   1701   void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
   1702 
   1703   void InitializeAllocationMemento(AllocationMemento* memento,
   1704                                    AllocationSite* allocation_site);
   1705 
   1706   bool CreateInitialMaps();
   1707   void CreateInitialObjects();
   1708 
   1709   // These five Create*EntryStub functions are here and forced to not be inlined
   1710   // because of a gcc-4.4 bug that assigns wrong vtable entries.
   1711   NO_INLINE(void CreateJSEntryStub());
   1712   NO_INLINE(void CreateJSConstructEntryStub());
   1713 
   1714   void CreateFixedStubs();
   1715 
   1716   HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
   1717 
   1718   // Commits from space if it is uncommitted.
   1719   void EnsureFromSpaceIsCommitted();
   1720 
   1721   // Uncommit unused semi space.
   1722   bool UncommitFromSpace();
   1723 
   1724   // Fill in bogus values in from space
   1725   void ZapFromSpace();
   1726 
   1727   // Deopts all code that contains allocation instruction which are tenured or
   1728   // not tenured. Moreover it clears the pretenuring allocation site statistics.
   1729   void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
   1730 
   1731   // Evaluates local pretenuring for the old space and calls
   1732   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
   1733   // the old space.
   1734   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
   1735 
   1736   // Record statistics before and after garbage collection.
   1737   void ReportStatisticsBeforeGC();
   1738   void ReportStatisticsAfterGC();
   1739 
   1740   // Creates and installs the full-sized number string cache.
   1741   int FullSizeNumberStringCacheLength();
   1742   // Flush the number to string cache.
   1743   void FlushNumberStringCache();
   1744 
   1745   void ConfigureInitialOldGenerationSize();
   1746 
   1747   bool HasLowYoungGenerationAllocationRate();
   1748   bool HasLowOldGenerationAllocationRate();
   1749   double YoungGenerationMutatorUtilization();
   1750   double OldGenerationMutatorUtilization();
   1751 
   1752   void ReduceNewSpaceSize();
   1753 
   1754   GCIdleTimeHeapState ComputeHeapState();
   1755 
   1756   bool PerformIdleTimeAction(GCIdleTimeAction action,
   1757                              GCIdleTimeHeapState heap_state,
   1758                              double deadline_in_ms);
   1759 
   1760   void IdleNotificationEpilogue(GCIdleTimeAction action,
   1761                                 GCIdleTimeHeapState heap_state, double start_ms,
   1762                                 double deadline_in_ms);
   1763 
   1764   inline void UpdateAllocationsHash(HeapObject* object);
   1765   inline void UpdateAllocationsHash(uint32_t value);
   1766   void PrintAlloctionsHash();
   1767 
   1768   void AddToRingBuffer(const char* string);
   1769   void GetFromRingBuffer(char* buffer);
   1770 
   1771   void CompactRetainedMaps(ArrayList* retained_maps);
   1772 
   1773   void CollectGarbageOnMemoryPressure();
   1774 
   1775   void InvokeOutOfMemoryCallback();
   1776 
   1777   void ComputeFastPromotionMode(double survival_rate);
   1778 
   1779   // Attempt to over-approximate the weak closure by marking object groups and
   1780   // implicit references from global handles, but don't atomically complete
   1781   // marking. If we continue to mark incrementally, we might have marked
   1782   // objects that die later.
   1783   void FinalizeIncrementalMarking(GarbageCollectionReason gc_reason);
   1784 
   1785   // Returns the timer used for a given GC type.
   1786   // - GCScavenger: young generation GC
   1787   // - GCCompactor: full GC
   1788   // - GCFinalzeMC: finalization of incremental full GC
   1789   // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
   1790   // memory reduction
   1791   HistogramTimer* GCTypeTimer(GarbageCollector collector);
   1792 
   1793   // ===========================================================================
   1794   // Pretenuring. ==============================================================
   1795   // ===========================================================================
   1796 
   1797   // Pretenuring decisions are made based on feedback collected during new space
   1798   // evacuation. Note that between feedback collection and calling this method
   1799   // object in old space must not move.
   1800   void ProcessPretenuringFeedback();
   1801 
   1802   // ===========================================================================
   1803   // Actual GC. ================================================================
   1804   // ===========================================================================
   1805 
   1806   // Code that should be run before and after each GC.  Includes some
   1807   // reporting/verification activities when compiled with DEBUG set.
   1808   void GarbageCollectionPrologue();
   1809   void GarbageCollectionEpilogue();
   1810 
   1811   // Performs a major collection in the whole heap.
   1812   void MarkCompact();
   1813   // Performs a minor collection of just the young generation.
   1814   void MinorMarkCompact();
   1815 
   1816   // Code to be run before and after mark-compact.
   1817   void MarkCompactPrologue();
   1818   void MarkCompactEpilogue();
   1819 
   1820   // Performs a minor collection in new generation.
   1821   void Scavenge();
   1822   void EvacuateYoungGeneration();
   1823 
   1824   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
   1825 
   1826   void UpdateNewSpaceReferencesInExternalStringTable(
   1827       ExternalStringTableUpdaterCallback updater_func);
   1828 
   1829   void UpdateReferencesInExternalStringTable(
   1830       ExternalStringTableUpdaterCallback updater_func);
   1831 
   1832   void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
   1833   void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
   1834   void ProcessNativeContexts(WeakObjectRetainer* retainer);
   1835   void ProcessAllocationSites(WeakObjectRetainer* retainer);
   1836   void ProcessWeakListRoots(WeakObjectRetainer* retainer);
   1837 
   1838   // ===========================================================================
   1839   // GC statistics. ============================================================
   1840   // ===========================================================================
   1841 
   1842   inline size_t OldGenerationSpaceAvailable() {
   1843     if (old_generation_allocation_limit_ <= PromotedTotalSize()) return 0;
   1844     return old_generation_allocation_limit_ -
   1845            static_cast<size_t>(PromotedTotalSize());
   1846   }
   1847 
   1848   // We allow incremental marking to overshoot the allocation limit for
   1849   // performace reasons. If the overshoot is too large then we are more
   1850   // eager to finalize incremental marking.
   1851   inline bool AllocationLimitOvershotByLargeMargin() {
   1852     // This guards against too eager finalization in small heaps.
   1853     // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
   1854     size_t kMarginForSmallHeaps = 32u * MB;
   1855     if (old_generation_allocation_limit_ >= PromotedTotalSize()) return false;
   1856     uint64_t overshoot = PromotedTotalSize() - old_generation_allocation_limit_;
   1857     // Overshoot margin is 50% of allocation limit or half-way to the max heap
   1858     // with special handling of small heaps.
   1859     uint64_t margin =
   1860         Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
   1861             (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
   1862     return overshoot >= margin;
   1863   }
   1864 
   1865   void UpdateTotalGCTime(double duration);
   1866 
   1867   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
   1868 
   1869   // ===========================================================================
   1870   // Growing strategy. =========================================================
   1871   // ===========================================================================
   1872 
   1873   // For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
   1874   // This constant limits the effect of load RAIL mode on GC.
   1875   // The value is arbitrary and chosen as the largest load time observed in
   1876   // v8 browsing benchmarks.
   1877   static const int kMaxLoadTimeMs = 7000;
   1878 
   1879   bool ShouldOptimizeForLoadTime();
   1880 
   1881   // Decrease the allocation limit if the new limit based on the given
   1882   // parameters is lower than the current limit.
   1883   void DampenOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
   1884                                           double mutator_speed);
   1885 
   1886   // Calculates the allocation limit based on a given growing factor and a
   1887   // given old generation size.
   1888   size_t CalculateOldGenerationAllocationLimit(double factor,
   1889                                                size_t old_gen_size);
   1890 
   1891   // Sets the allocation limit to trigger the next full garbage collection.
   1892   void SetOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
   1893                                        double mutator_speed);
   1894 
   1895   size_t MinimumAllocationLimitGrowingStep();
   1896 
   1897   size_t old_generation_allocation_limit() const {
   1898     return old_generation_allocation_limit_;
   1899   }
   1900 
   1901   bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
   1902 
   1903   bool CanExpandOldGeneration(size_t size) {
   1904     if (force_oom_) return false;
   1905     return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
   1906   }
   1907 
   1908   bool IsCloseToOutOfMemory(size_t slack) {
   1909     return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
   1910   }
   1911 
   1912   bool ShouldExpandOldGenerationOnSlowAllocation();
   1913 
   1914   enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
   1915   IncrementalMarkingLimit IncrementalMarkingLimitReached();
   1916 
   1917   // ===========================================================================
   1918   // Idle notification. ========================================================
   1919   // ===========================================================================
   1920 
   1921   bool RecentIdleNotificationHappened();
   1922   void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
   1923 
   1924   // ===========================================================================
   1925   // HeapIterator helpers. =====================================================
   1926   // ===========================================================================
   1927 
   1928   void heap_iterator_start() { heap_iterator_depth_++; }
   1929 
   1930   void heap_iterator_end() { heap_iterator_depth_--; }
   1931 
   1932   bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
   1933 
   1934   // ===========================================================================
   1935   // Allocation methods. =======================================================
   1936   // ===========================================================================
   1937 
   1938   // Returns a deep copy of the JavaScript object.
   1939   // Properties and elements are copied too.
   1940   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
   1941   MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
   1942                                                 AllocationSite* site = NULL);
   1943 
   1944   // Allocates a JS Map in the heap.
   1945   MUST_USE_RESULT AllocationResult
   1946   AllocateMap(InstanceType instance_type, int instance_size,
   1947               ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
   1948 
   1949   // Allocates and initializes a new JavaScript object based on a
   1950   // constructor.
   1951   // If allocation_site is non-null, then a memento is emitted after the object
   1952   // that points to the site.
   1953   MUST_USE_RESULT AllocationResult AllocateJSObject(
   1954       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
   1955       AllocationSite* allocation_site = NULL);
   1956 
   1957   // Allocates and initializes a new JavaScript object based on a map.
   1958   // Passing an allocation site means that a memento will be created that
   1959   // points to the site.
   1960   MUST_USE_RESULT AllocationResult
   1961   AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
   1962                           AllocationSite* allocation_site = NULL);
   1963 
   1964   // Allocates a HeapNumber from value.
   1965   MUST_USE_RESULT AllocationResult AllocateHeapNumber(
   1966       MutableMode mode = IMMUTABLE, PretenureFlag pretenure = NOT_TENURED);
   1967 
   1968   // Allocates a byte array of the specified length
   1969   MUST_USE_RESULT AllocationResult
   1970   AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
   1971 
   1972   // Allocates a bytecode array with given contents.
   1973   MUST_USE_RESULT AllocationResult
   1974   AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
   1975                         int parameter_count, FixedArray* constant_pool);
   1976 
   1977   MUST_USE_RESULT AllocationResult CopyCode(Code* code);
   1978 
   1979   MUST_USE_RESULT AllocationResult
   1980   CopyBytecodeArray(BytecodeArray* bytecode_array);
   1981 
   1982   // Allocates a fixed array initialized with undefined values
   1983   MUST_USE_RESULT AllocationResult
   1984   AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
   1985 
   1986   // Allocate an uninitialized object.  The memory is non-executable if the
   1987   // hardware and OS allow.  This is the single choke-point for allocations
   1988   // performed by the runtime and should not be bypassed (to extend this to
   1989   // inlined allocations, use the Heap::DisableInlineAllocation() support).
   1990   MUST_USE_RESULT inline AllocationResult AllocateRaw(
   1991       int size_in_bytes, AllocationSpace space,
   1992       AllocationAlignment aligment = kWordAligned);
   1993 
   1994   // Allocates a heap object based on the map.
   1995   MUST_USE_RESULT AllocationResult
   1996       Allocate(Map* map, AllocationSpace space,
   1997                AllocationSite* allocation_site = NULL);
   1998 
   1999   // Allocates a partial map for bootstrapping.
   2000   MUST_USE_RESULT AllocationResult
   2001       AllocatePartialMap(InstanceType instance_type, int instance_size);
   2002 
   2003   // Allocate a block of memory in the given space (filled with a filler).
   2004   // Used as a fall-back for generated code when the space is full.
   2005   MUST_USE_RESULT AllocationResult
   2006       AllocateFillerObject(int size, bool double_align, AllocationSpace space);
   2007 
   2008   // Allocate an uninitialized fixed array.
   2009   MUST_USE_RESULT AllocationResult
   2010       AllocateRawFixedArray(int length, PretenureFlag pretenure);
   2011 
   2012   // Allocate an uninitialized fixed double array.
   2013   MUST_USE_RESULT AllocationResult
   2014       AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
   2015 
   2016   // Allocate an initialized fixed array with the given filler value.
   2017   MUST_USE_RESULT AllocationResult
   2018       AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
   2019                                    Object* filler);
   2020 
   2021   // Allocate and partially initializes a String.  There are two String
   2022   // encodings: one-byte and two-byte.  These functions allocate a string of
   2023   // the given length and set its map and length fields.  The characters of
   2024   // the string are uninitialized.
   2025   MUST_USE_RESULT AllocationResult
   2026       AllocateRawOneByteString(int length, PretenureFlag pretenure);
   2027   MUST_USE_RESULT AllocationResult
   2028       AllocateRawTwoByteString(int length, PretenureFlag pretenure);
   2029 
   2030   // Allocates an internalized string in old space based on the character
   2031   // stream.
   2032   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
   2033       Vector<const char> str, int chars, uint32_t hash_field);
   2034 
   2035   MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
   2036       Vector<const uint8_t> str, uint32_t hash_field);
   2037 
   2038   MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
   2039       Vector<const uc16> str, uint32_t hash_field);
   2040 
   2041   template <bool is_one_byte, typename T>
   2042   MUST_USE_RESULT AllocationResult
   2043       AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
   2044 
   2045   template <typename T>
   2046   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
   2047       T t, int chars, uint32_t hash_field);
   2048 
   2049   // Allocates an uninitialized fixed array. It must be filled by the caller.
   2050   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
   2051 
   2052   // Make a copy of src and return it.
   2053   MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
   2054 
   2055   // Make a copy of src, also grow the copy, and return the copy.
   2056   MUST_USE_RESULT AllocationResult
   2057   CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
   2058 
   2059   // Make a copy of src, also grow the copy, and return the copy.
   2060   MUST_USE_RESULT AllocationResult CopyFixedArrayUpTo(FixedArray* src,
   2061                                                       int new_len,
   2062                                                       PretenureFlag pretenure);
   2063 
   2064   // Make a copy of src, set the map, and return the copy.
   2065   MUST_USE_RESULT AllocationResult
   2066       CopyFixedArrayWithMap(FixedArray* src, Map* map);
   2067 
   2068   // Make a copy of src and return it.
   2069   MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
   2070       FixedDoubleArray* src);
   2071 
   2072   // Computes a single character string where the character has code.
   2073   // A cache is used for one-byte (Latin1) codes.
   2074   MUST_USE_RESULT AllocationResult
   2075       LookupSingleCharacterStringFromCode(uint16_t code);
   2076 
   2077   // Allocate a symbol in old space.
   2078   MUST_USE_RESULT AllocationResult AllocateSymbol();
   2079 
   2080   // Allocates an external array of the specified length and type.
   2081   MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
   2082       int length, ExternalArrayType array_type, void* external_pointer,
   2083       PretenureFlag pretenure);
   2084 
   2085   // Allocates a fixed typed array of the specified length and type.
   2086   MUST_USE_RESULT AllocationResult
   2087   AllocateFixedTypedArray(int length, ExternalArrayType array_type,
   2088                           bool initialize, PretenureFlag pretenure);
   2089 
   2090   // Make a copy of src and return it.
   2091   MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
   2092 
   2093   // Make a copy of src, set the map, and return the copy.
   2094   MUST_USE_RESULT AllocationResult
   2095       CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
   2096 
   2097   // Allocates a fixed double array with uninitialized values. Returns
   2098   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
   2099       int length, PretenureFlag pretenure = NOT_TENURED);
   2100 
   2101   // Allocate empty fixed array.
   2102   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
   2103 
   2104   // Allocate empty scope info.
   2105   MUST_USE_RESULT AllocationResult AllocateEmptyScopeInfo();
   2106 
   2107   // Allocate empty fixed typed array of given type.
   2108   MUST_USE_RESULT AllocationResult
   2109       AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
   2110 
   2111   // Allocate a tenured simple cell.
   2112   MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
   2113 
   2114   // Allocate a tenured JS global property cell initialized with the hole.
   2115   MUST_USE_RESULT AllocationResult AllocatePropertyCell();
   2116 
   2117   MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
   2118 
   2119   MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
   2120 
   2121   // Allocates a new utility object in the old generation.
   2122   MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
   2123 
   2124   // Allocates a new foreign object.
   2125   MUST_USE_RESULT AllocationResult
   2126       AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
   2127 
   2128   MUST_USE_RESULT AllocationResult
   2129       AllocateCode(int object_size, bool immovable);
   2130 
   2131   // ===========================================================================
   2132 
   2133   void set_force_oom(bool value) { force_oom_ = value; }
   2134 
   2135   // The amount of external memory registered through the API.
   2136   int64_t external_memory_;
   2137 
   2138   // The limit when to trigger memory pressure from the API.
   2139   int64_t external_memory_limit_;
   2140 
   2141   // Caches the amount of external memory registered at the last MC.
   2142   int64_t external_memory_at_last_mark_compact_;
   2143 
   2144   // The amount of memory that has been freed concurrently.
   2145   base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
   2146 
   2147   // This can be calculated directly from a pointer to the heap; however, it is
   2148   // more expedient to get at the isolate directly from within Heap methods.
   2149   Isolate* isolate_;
   2150 
   2151   Object* roots_[kRootListLength];
   2152 
   2153   size_t code_range_size_;
   2154   size_t max_semi_space_size_;
   2155   size_t initial_semispace_size_;
   2156   size_t max_old_generation_size_;
   2157   size_t initial_max_old_generation_size_;
   2158   size_t initial_old_generation_size_;
   2159   bool old_generation_size_configured_;
   2160   size_t max_executable_size_;
   2161   size_t maximum_committed_;
   2162 
   2163   // For keeping track of how much data has survived
   2164   // scavenge since last new space expansion.
   2165   size_t survived_since_last_expansion_;
   2166 
   2167   // ... and since the last scavenge.
   2168   size_t survived_last_scavenge_;
   2169 
   2170   // This is not the depth of nested AlwaysAllocateScope's but rather a single
   2171   // count, as scopes can be acquired from multiple tasks (read: threads).
   2172   base::AtomicNumber<size_t> always_allocate_scope_count_;
   2173 
   2174   // Stores the memory pressure level that set by MemoryPressureNotification
   2175   // and reset by a mark-compact garbage collection.
   2176   base::AtomicValue<MemoryPressureLevel> memory_pressure_level_;
   2177 
   2178   v8::debug::OutOfMemoryCallback out_of_memory_callback_;
   2179   void* out_of_memory_callback_data_;
   2180 
   2181   // For keeping track of context disposals.
   2182   int contexts_disposed_;
   2183 
   2184   // The length of the retained_maps array at the time of context disposal.
   2185   // This separates maps in the retained_maps array that were created before
   2186   // and after context disposal.
   2187   int number_of_disposed_maps_;
   2188 
   2189   int global_ic_age_;
   2190 
   2191   NewSpace* new_space_;
   2192   OldSpace* old_space_;
   2193   OldSpace* code_space_;
   2194   MapSpace* map_space_;
   2195   LargeObjectSpace* lo_space_;
   2196   // Map from the space id to the space.
   2197   Space* space_[LAST_SPACE + 1];
   2198   HeapState gc_state_;
   2199   int gc_post_processing_depth_;
   2200   Address new_space_top_after_last_gc_;
   2201 
   2202   // Returns the amount of external memory registered since last global gc.
   2203   uint64_t PromotedExternalMemorySize();
   2204 
   2205   // How many "runtime allocations" happened.
   2206   uint32_t allocations_count_;
   2207 
   2208   // Running hash over allocations performed.
   2209   uint32_t raw_allocations_hash_;
   2210 
   2211   // How many mark-sweep collections happened.
   2212   unsigned int ms_count_;
   2213 
   2214   // How many gc happened.
   2215   unsigned int gc_count_;
   2216 
   2217   // For post mortem debugging.
   2218   int remembered_unmapped_pages_index_;
   2219   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
   2220 
   2221 #ifdef DEBUG
   2222   // If the --gc-interval flag is set to a positive value, this
   2223   // variable holds the value indicating the number of allocations
   2224   // remain until the next failure and garbage collection.
   2225   int allocation_timeout_;
   2226 #endif  // DEBUG
   2227 
   2228   // Limit that triggers a global GC on the next (normally caused) GC.  This
   2229   // is checked when we have already decided to do a GC to help determine
   2230   // which collector to invoke, before expanding a paged space in the old
   2231   // generation and on every allocation in large object space.
   2232   size_t old_generation_allocation_limit_;
   2233 
   2234   // Indicates that inline bump-pointer allocation has been globally disabled
   2235   // for all spaces. This is used to disable allocations in generated code.
   2236   bool inline_allocation_disabled_;
   2237 
   2238   // Weak list heads, threaded through the objects.
   2239   // List heads are initialized lazily and contain the undefined_value at start.
   2240   Object* native_contexts_list_;
   2241   Object* allocation_sites_list_;
   2242 
   2243   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
   2244   // marking. It is initialized during marking, destroyed after marking and
   2245   // contains Smi(0) while marking is not active.
   2246   Object* encountered_weak_collections_;
   2247 
   2248   Object* encountered_weak_cells_;
   2249 
   2250   Object* encountered_transition_arrays_;
   2251 
   2252   List<GCCallbackPair> gc_epilogue_callbacks_;
   2253   List<GCCallbackPair> gc_prologue_callbacks_;
   2254 
   2255   int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
   2256 
   2257   GCTracer* tracer_;
   2258 
   2259   size_t promoted_objects_size_;
   2260   double promotion_ratio_;
   2261   double promotion_rate_;
   2262   size_t semi_space_copied_object_size_;
   2263   size_t previous_semi_space_copied_object_size_;
   2264   double semi_space_copied_rate_;
   2265   int nodes_died_in_new_space_;
   2266   int nodes_copied_in_new_space_;
   2267   int nodes_promoted_;
   2268 
   2269   // This is the pretenuring trigger for allocation sites that are in maybe
   2270   // tenure state. When we switched to the maximum new space size we deoptimize
   2271   // the code that belongs to the allocation site and derive the lifetime
   2272   // of the allocation site.
   2273   unsigned int maximum_size_scavenges_;
   2274 
   2275   // Total time spent in GC.
   2276   double total_gc_time_ms_;
   2277 
   2278   // Last time an idle notification happened.
   2279   double last_idle_notification_time_;
   2280 
   2281   // Last time a garbage collection happened.
   2282   double last_gc_time_;
   2283 
   2284   Scavenger* scavenge_collector_;
   2285 
   2286   MarkCompactCollector* mark_compact_collector_;
   2287 
   2288   MemoryAllocator* memory_allocator_;
   2289 
   2290   StoreBuffer* store_buffer_;
   2291 
   2292   IncrementalMarking* incremental_marking_;
   2293 
   2294   GCIdleTimeHandler* gc_idle_time_handler_;
   2295 
   2296   MemoryReducer* memory_reducer_;
   2297 
   2298   ObjectStats* live_object_stats_;
   2299   ObjectStats* dead_object_stats_;
   2300 
   2301   ScavengeJob* scavenge_job_;
   2302 
   2303   AllocationObserver* idle_scavenge_observer_;
   2304 
   2305   // This counter is increased before each GC and never reset.
   2306   // To account for the bytes allocated since the last GC, use the
   2307   // NewSpaceAllocationCounter() function.
   2308   size_t new_space_allocation_counter_;
   2309 
   2310   // This counter is increased before each GC and never reset. To
   2311   // account for the bytes allocated since the last GC, use the
   2312   // OldGenerationAllocationCounter() function.
   2313   size_t old_generation_allocation_counter_at_last_gc_;
   2314 
   2315   // The size of objects in old generation after the last MarkCompact GC.
   2316   size_t old_generation_size_at_last_gc_;
   2317 
   2318   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
   2319   // this variable holds the number of garbage collections since the last
   2320   // deoptimization triggered by garbage collection.
   2321   int gcs_since_last_deopt_;
   2322 
   2323   // The feedback storage is used to store allocation sites (keys) and how often
   2324   // they have been visited (values) by finding a memento behind an object. The
   2325   // storage is only alive temporary during a GC. The invariant is that all
   2326   // pointers in this map are already fixed, i.e., they do not point to
   2327   // forwarding pointers.
   2328   base::HashMap* global_pretenuring_feedback_;
   2329 
   2330   char trace_ring_buffer_[kTraceRingBufferSize];
   2331   // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
   2332   // full then the data is from ring_buffer_end_ to the end of the buffer and
   2333   // from 0 to ring_buffer_end_.
   2334   bool ring_buffer_full_;
   2335   size_t ring_buffer_end_;
   2336 
   2337   // Shared state read by the scavenge collector and set by ScavengeObject.
   2338   PromotionQueue promotion_queue_;
   2339 
   2340   // Flag is set when the heap has been configured.  The heap can be repeatedly
   2341   // configured through the API until it is set up.
   2342   bool configured_;
   2343 
   2344   // Currently set GC flags that are respected by all GC components.
   2345   int current_gc_flags_;
   2346 
   2347   // Currently set GC callback flags that are used to pass information between
   2348   // the embedder and V8's GC.
   2349   GCCallbackFlags current_gc_callback_flags_;
   2350 
   2351   ExternalStringTable external_string_table_;
   2352 
   2353   base::Mutex relocation_mutex_;
   2354 
   2355   int gc_callbacks_depth_;
   2356 
   2357   bool deserialization_complete_;
   2358 
   2359   StrongRootsList* strong_roots_list_;
   2360 
   2361   // The depth of HeapIterator nestings.
   2362   int heap_iterator_depth_;
   2363 
   2364   LocalEmbedderHeapTracer* local_embedder_heap_tracer_;
   2365 
   2366   bool fast_promotion_mode_;
   2367 
   2368   // Used for testing purposes.
   2369   bool force_oom_;
   2370   bool delay_sweeper_tasks_for_testing_;
   2371 
   2372   HeapObject* pending_layout_change_object_;
   2373 
   2374   // Classes in "heap" can be friends.
   2375   friend class AlwaysAllocateScope;
   2376   friend class GCCallbacksScope;
   2377   friend class GCTracer;
   2378   friend class HeapIterator;
   2379   friend class IdleScavengeObserver;
   2380   friend class IncrementalMarking;
   2381   friend class IncrementalMarkingJob;
   2382   friend class LargeObjectSpace;
   2383   friend class MarkCompactCollector;
   2384   friend class MarkCompactMarkingVisitor;
   2385   friend class NewSpace;
   2386   friend class ObjectStatsCollector;
   2387   friend class Page;
   2388   friend class PagedSpace;
   2389   friend class Scavenger;
   2390   friend class StoreBuffer;
   2391   friend class TestMemoryAllocatorScope;
   2392 
   2393   // The allocator interface.
   2394   friend class Factory;
   2395 
   2396   // The Isolate constructs us.
   2397   friend class Isolate;
   2398 
   2399   // Used in cctest.
   2400   friend class HeapTester;
   2401 
   2402   DISALLOW_COPY_AND_ASSIGN(Heap);
   2403 };
   2404 
   2405 
   2406 class HeapStats {
   2407  public:
   2408   static const int kStartMarker = 0xDECADE00;
   2409   static const int kEndMarker = 0xDECADE01;
   2410 
   2411   intptr_t* start_marker;                  //  0
   2412   size_t* new_space_size;                  //  1
   2413   size_t* new_space_capacity;              //  2
   2414   size_t* old_space_size;                  //  3
   2415   size_t* old_space_capacity;              //  4
   2416   size_t* code_space_size;                 //  5
   2417   size_t* code_space_capacity;             //  6
   2418   size_t* map_space_size;                  //  7
   2419   size_t* map_space_capacity;              //  8
   2420   size_t* lo_space_size;                   //  9
   2421   size_t* global_handle_count;             // 10
   2422   size_t* weak_global_handle_count;        // 11
   2423   size_t* pending_global_handle_count;     // 12
   2424   size_t* near_death_global_handle_count;  // 13
   2425   size_t* free_global_handle_count;        // 14
   2426   size_t* memory_allocator_size;           // 15
   2427   size_t* memory_allocator_capacity;       // 16
   2428   size_t* malloced_memory;                 // 17
   2429   size_t* malloced_peak_memory;            // 18
   2430   size_t* objects_per_type;                // 19
   2431   size_t* size_per_type;                   // 20
   2432   int* os_error;                           // 21
   2433   char* last_few_messages;                 // 22
   2434   char* js_stacktrace;                     // 23
   2435   intptr_t* end_marker;                    // 24
   2436 };
   2437 
   2438 
   2439 class AlwaysAllocateScope {
   2440  public:
   2441   explicit inline AlwaysAllocateScope(Isolate* isolate);
   2442   inline ~AlwaysAllocateScope();
   2443 
   2444  private:
   2445   Heap* heap_;
   2446 };
   2447 
   2448 
   2449 // Visitor class to verify interior pointers in spaces that do not contain
   2450 // or care about intergenerational references. All heap object pointers have to
   2451 // point into the heap to a location that has a map pointer at its first word.
   2452 // Caveat: Heap::Contains is an approximation because it can return true for
   2453 // objects in a heap space but above the allocation pointer.
   2454 class VerifyPointersVisitor : public ObjectVisitor {
   2455  public:
   2456   inline void VisitPointers(Object** start, Object** end) override;
   2457 };
   2458 
   2459 
   2460 // Verify that all objects are Smis.
   2461 class VerifySmisVisitor : public ObjectVisitor {
   2462  public:
   2463   inline void VisitPointers(Object** start, Object** end) override;
   2464 };
   2465 
   2466 
   2467 // Space iterator for iterating over all spaces of the heap.  Returns each space
   2468 // in turn, and null when it is done.
   2469 class AllSpaces BASE_EMBEDDED {
   2470  public:
   2471   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
   2472   Space* next();
   2473 
   2474  private:
   2475   Heap* heap_;
   2476   int counter_;
   2477 };
   2478 
   2479 
   2480 // Space iterator for iterating over all old spaces of the heap: Old space
   2481 // and code space.  Returns each space in turn, and null when it is done.
   2482 class V8_EXPORT_PRIVATE OldSpaces BASE_EMBEDDED {
   2483  public:
   2484   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   2485   OldSpace* next();
   2486 
   2487  private:
   2488   Heap* heap_;
   2489   int counter_;
   2490 };
   2491 
   2492 
   2493 // Space iterator for iterating over all the paged spaces of the heap: Map
   2494 // space, old space, code space and cell space.  Returns
   2495 // each space in turn, and null when it is done.
   2496 class PagedSpaces BASE_EMBEDDED {
   2497  public:
   2498   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   2499   PagedSpace* next();
   2500 
   2501  private:
   2502   Heap* heap_;
   2503   int counter_;
   2504 };
   2505 
   2506 
   2507 class SpaceIterator : public Malloced {
   2508  public:
   2509   explicit SpaceIterator(Heap* heap);
   2510   virtual ~SpaceIterator();
   2511 
   2512   bool has_next();
   2513   Space* next();
   2514 
   2515  private:
   2516   Heap* heap_;
   2517   int current_space_;         // from enum AllocationSpace.
   2518 };
   2519 
   2520 
   2521 // A HeapIterator provides iteration over the whole heap. It
   2522 // aggregates the specific iterators for the different spaces as
   2523 // these can only iterate over one space only.
   2524 //
   2525 // HeapIterator ensures there is no allocation during its lifetime
   2526 // (using an embedded DisallowHeapAllocation instance).
   2527 //
   2528 // HeapIterator can skip free list nodes (that is, de-allocated heap
   2529 // objects that still remain in the heap). As implementation of free
   2530 // nodes filtering uses GC marks, it can't be used during MS/MC GC
   2531 // phases. Also, it is forbidden to interrupt iteration in this mode,
   2532 // as this will leave heap objects marked (and thus, unusable).
   2533 class HeapIterator BASE_EMBEDDED {
   2534  public:
   2535   enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
   2536 
   2537   explicit HeapIterator(Heap* heap,
   2538                         HeapObjectsFiltering filtering = kNoFiltering);
   2539   ~HeapIterator();
   2540 
   2541   HeapObject* next();
   2542 
   2543  private:
   2544   HeapObject* NextObject();
   2545 
   2546   DisallowHeapAllocation no_heap_allocation_;
   2547 
   2548   Heap* heap_;
   2549   HeapObjectsFiltering filtering_;
   2550   HeapObjectsFilter* filter_;
   2551   // Space iterator for iterating all the spaces.
   2552   SpaceIterator* space_iterator_;
   2553   // Object iterator for the space currently being iterated.
   2554   std::unique_ptr<ObjectIterator> object_iterator_;
   2555 };
   2556 
   2557 // Abstract base class for checking whether a weak object should be retained.
   2558 class WeakObjectRetainer {
   2559  public:
   2560   virtual ~WeakObjectRetainer() {}
   2561 
   2562   // Return whether this object should be retained. If NULL is returned the
   2563   // object has no references. Otherwise the address of the retained object
   2564   // should be returned as in some GC situations the object has been moved.
   2565   virtual Object* RetainAs(Object* object) = 0;
   2566 };
   2567 
   2568 
   2569 #ifdef DEBUG
   2570 // Helper class for tracing paths to a search target Object from all roots.
   2571 // The TracePathFrom() method can be used to trace paths from a specific
   2572 // object to the search target object.
   2573 class PathTracer : public ObjectVisitor {
   2574  public:
   2575   enum WhatToFind {
   2576     FIND_ALL,   // Will find all matches.
   2577     FIND_FIRST  // Will stop the search after first match.
   2578   };
   2579 
   2580   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
   2581   static const int kMarkTag = 2;
   2582 
   2583   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
   2584   // after the first match.  If FIND_ALL is specified, then tracing will be
   2585   // done for all matches.
   2586   PathTracer(Object* search_target, WhatToFind what_to_find,
   2587              VisitMode visit_mode)
   2588       : search_target_(search_target),
   2589         found_target_(false),
   2590         found_target_in_trace_(false),
   2591         what_to_find_(what_to_find),
   2592         visit_mode_(visit_mode),
   2593         object_stack_(20),
   2594         no_allocation() {}
   2595 
   2596   void VisitPointers(Object** start, Object** end) override;
   2597 
   2598   void Reset();
   2599   void TracePathFrom(Object** root);
   2600 
   2601   bool found() const { return found_target_; }
   2602 
   2603   static Object* const kAnyGlobalObject;
   2604 
   2605  protected:
   2606   class MarkVisitor;
   2607   class UnmarkVisitor;
   2608 
   2609   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
   2610   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
   2611   virtual void ProcessResults();
   2612 
   2613   Object* search_target_;
   2614   bool found_target_;
   2615   bool found_target_in_trace_;
   2616   WhatToFind what_to_find_;
   2617   VisitMode visit_mode_;
   2618   List<Object*> object_stack_;
   2619 
   2620   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
   2621 
   2622  private:
   2623   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
   2624 };
   2625 #endif  // DEBUG
   2626 
   2627 // -----------------------------------------------------------------------------
   2628 // Allows observation of allocations.
   2629 class AllocationObserver {
   2630  public:
   2631   explicit AllocationObserver(intptr_t step_size)
   2632       : step_size_(step_size), bytes_to_next_step_(step_size) {
   2633     DCHECK(step_size >= kPointerSize);
   2634   }
   2635   virtual ~AllocationObserver() {}
   2636 
   2637   // Called each time the observed space does an allocation step. This may be
   2638   // more frequently than the step_size we are monitoring (e.g. when there are
   2639   // multiple observers, or when page or space boundary is encountered.)
   2640   void AllocationStep(int bytes_allocated, Address soon_object, size_t size) {
   2641     bytes_to_next_step_ -= bytes_allocated;
   2642     if (bytes_to_next_step_ <= 0) {
   2643       Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
   2644            size);
   2645       step_size_ = GetNextStepSize();
   2646       bytes_to_next_step_ = step_size_;
   2647     }
   2648   }
   2649 
   2650  protected:
   2651   intptr_t step_size() const { return step_size_; }
   2652   intptr_t bytes_to_next_step() const { return bytes_to_next_step_; }
   2653 
   2654   // Pure virtual method provided by the subclasses that gets called when at
   2655   // least step_size bytes have been allocated. soon_object is the address just
   2656   // allocated (but not yet initialized.) size is the size of the object as
   2657   // requested (i.e. w/o the alignment fillers). Some complexities to be aware
   2658   // of:
   2659   // 1) soon_object will be nullptr in cases where we end up observing an
   2660   //    allocation that happens to be a filler space (e.g. page boundaries.)
   2661   // 2) size is the requested size at the time of allocation. Right-trimming
   2662   //    may change the object size dynamically.
   2663   // 3) soon_object may actually be the first object in an allocation-folding
   2664   //    group. In such a case size is the size of the group rather than the
   2665   //    first object.
   2666   virtual void Step(int bytes_allocated, Address soon_object, size_t size) = 0;
   2667 
   2668   // Subclasses can override this method to make step size dynamic.
   2669   virtual intptr_t GetNextStepSize() { return step_size_; }
   2670 
   2671   intptr_t step_size_;
   2672   intptr_t bytes_to_next_step_;
   2673 
   2674  private:
   2675   friend class LargeObjectSpace;
   2676   friend class NewSpace;
   2677   friend class PagedSpace;
   2678   DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
   2679 };
   2680 
   2681 }  // namespace internal
   2682 }  // namespace v8
   2683 
   2684 #endif  // V8_HEAP_HEAP_H_
   2685