Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_HEAP_H_
     29 #define V8_HEAP_H_
     30 
     31 #include <cmath>
     32 
     33 #include "allocation.h"
     34 #include "assert-scope.h"
     35 #include "globals.h"
     36 #include "incremental-marking.h"
     37 #include "list.h"
     38 #include "mark-compact.h"
     39 #include "objects-visiting.h"
     40 #include "spaces.h"
     41 #include "splay-tree-inl.h"
     42 #include "store-buffer.h"
     43 #include "v8-counters.h"
     44 #include "v8globals.h"
     45 
     46 namespace v8 {
     47 namespace internal {
     48 
     49 // Defines all the roots in Heap.
     50 #define STRONG_ROOT_LIST(V)                                                    \
     51   V(Map, byte_array_map, ByteArrayMap)                                         \
     52   V(Map, free_space_map, FreeSpaceMap)                                         \
     53   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
     54   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
     55   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
     56   V(Smi, store_buffer_top, StoreBufferTop)                                     \
     57   V(Oddball, undefined_value, UndefinedValue)                                  \
     58   V(Oddball, the_hole_value, TheHoleValue)                                     \
     59   V(Oddball, null_value, NullValue)                                            \
     60   V(Oddball, true_value, TrueValue)                                            \
     61   V(Oddball, false_value, FalseValue)                                          \
     62   V(Oddball, uninitialized_value, UninitializedValue)                          \
     63   V(Map, cell_map, CellMap)                                                    \
     64   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
     65   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
     66   V(Map, meta_map, MetaMap)                                                    \
     67   V(Map, heap_number_map, HeapNumberMap)                                       \
     68   V(Map, native_context_map, NativeContextMap)                                 \
     69   V(Map, fixed_array_map, FixedArrayMap)                                       \
     70   V(Map, code_map, CodeMap)                                                    \
     71   V(Map, scope_info_map, ScopeInfoMap)                                         \
     72   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
     73   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
     74   V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
     75   V(Map, hash_table_map, HashTableMap)                                         \
     76   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
     77   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
     78   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
     79   V(Smi, stack_limit, StackLimit)                                              \
     80   V(Oddball, arguments_marker, ArgumentsMarker)                                \
     81   /* The first 32 roots above this line should be boring from a GC point of */ \
     82   /* view.  This means they are never in new space and never on a page that */ \
     83   /* is being compacted.                                                    */ \
     84   V(FixedArray, number_string_cache, NumberStringCache)                        \
     85   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
     86   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
     87   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
     88   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
     89   V(FixedArray, string_split_cache, StringSplitCache)                          \
     90   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
     91   V(Object, termination_exception, TerminationException)                       \
     92   V(Smi, hash_seed, HashSeed)                                                  \
     93   V(Map, symbol_map, SymbolMap)                                                \
     94   V(Map, string_map, StringMap)                                                \
     95   V(Map, ascii_string_map, AsciiStringMap)                                     \
     96   V(Map, cons_string_map, ConsStringMap)                                       \
     97   V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
     98   V(Map, sliced_string_map, SlicedStringMap)                                   \
     99   V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
    100   V(Map, external_string_map, ExternalStringMap)                               \
    101   V(Map,                                                                       \
    102     external_string_with_one_byte_data_map,                                    \
    103     ExternalStringWithOneByteDataMap)                                          \
    104   V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
    105   V(Map, short_external_string_map, ShortExternalStringMap)                    \
    106   V(Map,                                                                       \
    107     short_external_string_with_one_byte_data_map,                              \
    108     ShortExternalStringWithOneByteDataMap)                                     \
    109   V(Map, internalized_string_map, InternalizedStringMap)                       \
    110   V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap)            \
    111   V(Map, cons_internalized_string_map, ConsInternalizedStringMap)              \
    112   V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap)   \
    113   V(Map,                                                                       \
    114     external_internalized_string_map,                                          \
    115     ExternalInternalizedStringMap)                                             \
    116   V(Map,                                                                       \
    117     external_internalized_string_with_one_byte_data_map,                       \
    118     ExternalInternalizedStringWithOneByteDataMap)                              \
    119   V(Map,                                                                       \
    120     external_ascii_internalized_string_map,                                    \
    121     ExternalAsciiInternalizedStringMap)                                        \
    122   V(Map,                                                                       \
    123     short_external_internalized_string_map,                                    \
    124     ShortExternalInternalizedStringMap)                                        \
    125   V(Map,                                                                       \
    126     short_external_internalized_string_with_one_byte_data_map,                 \
    127     ShortExternalInternalizedStringWithOneByteDataMap)                         \
    128   V(Map,                                                                       \
    129     short_external_ascii_internalized_string_map,                              \
    130     ShortExternalAsciiInternalizedStringMap)                                   \
    131   V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
    132   V(Map, undetectable_string_map, UndetectableStringMap)                       \
    133   V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
    134   V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
    135   V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
    136   V(Map, external_short_array_map, ExternalShortArrayMap)                      \
    137   V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
    138   V(Map, external_int_array_map, ExternalIntArrayMap)                          \
    139   V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
    140   V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
    141   V(Map, external_double_array_map, ExternalDoubleArrayMap)                    \
    142   V(Map, external_pixel_array_map, ExternalPixelArrayMap)                      \
    143   V(ExternalArray, empty_external_byte_array,                                  \
    144       EmptyExternalByteArray)                                                  \
    145   V(ExternalArray, empty_external_unsigned_byte_array,                         \
    146       EmptyExternalUnsignedByteArray)                                          \
    147   V(ExternalArray, empty_external_short_array, EmptyExternalShortArray)        \
    148   V(ExternalArray, empty_external_unsigned_short_array,                        \
    149       EmptyExternalUnsignedShortArray)                                         \
    150   V(ExternalArray, empty_external_int_array, EmptyExternalIntArray)            \
    151   V(ExternalArray, empty_external_unsigned_int_array,                          \
    152       EmptyExternalUnsignedIntArray)                                           \
    153   V(ExternalArray, empty_external_float_array, EmptyExternalFloatArray)        \
    154   V(ExternalArray, empty_external_double_array, EmptyExternalDoubleArray)      \
    155   V(ExternalArray, empty_external_pixel_array,                                 \
    156       EmptyExternalPixelArray)                                                 \
    157   V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap)     \
    158   V(Map, function_context_map, FunctionContextMap)                             \
    159   V(Map, catch_context_map, CatchContextMap)                                   \
    160   V(Map, with_context_map, WithContextMap)                                     \
    161   V(Map, block_context_map, BlockContextMap)                                   \
    162   V(Map, module_context_map, ModuleContextMap)                                 \
    163   V(Map, global_context_map, GlobalContextMap)                                 \
    164   V(Map, oddball_map, OddballMap)                                              \
    165   V(Map, message_object_map, JSMessageObjectMap)                               \
    166   V(Map, foreign_map, ForeignMap)                                              \
    167   V(HeapNumber, nan_value, NanValue)                                           \
    168   V(HeapNumber, infinity_value, InfinityValue)                                 \
    169   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
    170   V(Map, neander_map, NeanderMap)                                              \
    171   V(JSObject, message_listeners, MessageListeners)                             \
    172   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
    173   V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
    174   V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
    175   V(Code, js_entry_code, JsEntryCode)                                          \
    176   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
    177   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
    178   V(Smi, last_script_id, LastScriptId)                                         \
    179   V(Script, empty_script, EmptyScript)                                         \
    180   V(Smi, real_stack_limit, RealStackLimit)                                     \
    181   V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
    182   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
    183   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
    184   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
    185   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
    186   V(JSObject, observation_state, ObservationState)                             \
    187   V(Map, external_map, ExternalMap)                                            \
    188   V(Symbol, frozen_symbol, FrozenSymbol)                                       \
    189   V(Symbol, elements_transition_symbol, ElementsTransitionSymbol)              \
    190   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
    191       EmptySlowElementDictionary)                                              \
    192   V(Symbol, observed_symbol, ObservedSymbol)
    193 
    194 #define ROOT_LIST(V)                                  \
    195   STRONG_ROOT_LIST(V)                                 \
    196   V(StringTable, string_table, StringTable)
    197 
    198 #define INTERNALIZED_STRING_LIST(V)                                      \
    199   V(Array_string, "Array")                                               \
    200   V(Object_string, "Object")                                             \
    201   V(proto_string, "__proto__")                                           \
    202   V(StringImpl_string, "StringImpl")                                     \
    203   V(arguments_string, "arguments")                                       \
    204   V(Arguments_string, "Arguments")                                       \
    205   V(call_string, "call")                                                 \
    206   V(apply_string, "apply")                                               \
    207   V(caller_string, "caller")                                             \
    208   V(boolean_string, "boolean")                                           \
    209   V(Boolean_string, "Boolean")                                           \
    210   V(callee_string, "callee")                                             \
    211   V(constructor_string, "constructor")                                   \
    212   V(code_string, ".code")                                                \
    213   V(result_string, ".result")                                            \
    214   V(dot_for_string, ".for.")                                             \
    215   V(catch_var_string, ".catch-var")                                      \
    216   V(empty_string, "")                                                    \
    217   V(eval_string, "eval")                                                 \
    218   V(function_string, "function")                                         \
    219   V(length_string, "length")                                             \
    220   V(module_string, "module")                                             \
    221   V(name_string, "name")                                                 \
    222   V(native_string, "native")                                             \
    223   V(null_string, "null")                                                 \
    224   V(number_string, "number")                                             \
    225   V(Number_string, "Number")                                             \
    226   V(nan_string, "NaN")                                                   \
    227   V(RegExp_string, "RegExp")                                             \
    228   V(source_string, "source")                                             \
    229   V(global_string, "global")                                             \
    230   V(ignore_case_string, "ignoreCase")                                    \
    231   V(multiline_string, "multiline")                                       \
    232   V(input_string, "input")                                               \
    233   V(index_string, "index")                                               \
    234   V(last_index_string, "lastIndex")                                      \
    235   V(object_string, "object")                                             \
    236   V(payload_string, "payload")                                           \
    237   V(literals_string, "literals")                                         \
    238   V(prototype_string, "prototype")                                       \
    239   V(string_string, "string")                                             \
    240   V(String_string, "String")                                             \
    241   V(unknown_field_string, "unknownField")                                \
    242   V(symbol_string, "symbol")                                             \
    243   V(Symbol_string, "Symbol")                                             \
    244   V(Date_string, "Date")                                                 \
    245   V(this_string, "this")                                                 \
    246   V(to_string_string, "toString")                                        \
    247   V(char_at_string, "CharAt")                                            \
    248   V(undefined_string, "undefined")                                       \
    249   V(value_of_string, "valueOf")                                          \
    250   V(stack_string, "stack")                                               \
    251   V(toJSON_string, "toJSON")                                             \
    252   V(InitializeVarGlobal_string, "InitializeVarGlobal")                   \
    253   V(InitializeConstGlobal_string, "InitializeConstGlobal")               \
    254   V(KeyedLoadElementMonomorphic_string,                                  \
    255     "KeyedLoadElementMonomorphic")                                       \
    256   V(KeyedStoreElementMonomorphic_string,                                 \
    257     "KeyedStoreElementMonomorphic")                                      \
    258   V(stack_overflow_string, "kStackOverflowBoilerplate")                  \
    259   V(illegal_access_string, "illegal access")                             \
    260   V(out_of_memory_string, "out-of-memory")                               \
    261   V(illegal_execution_state_string, "illegal execution state")           \
    262   V(get_string, "get")                                                   \
    263   V(set_string, "set")                                                   \
    264   V(map_field_string, "%map")                                            \
    265   V(elements_field_string, "%elements")                                  \
    266   V(length_field_string, "%length")                                      \
    267   V(cell_value_string, "%cell_value")                                    \
    268   V(function_class_string, "Function")                                   \
    269   V(properties_field_symbol, "%properties")                              \
    270   V(payload_field_symbol, "%payload")                                    \
    271   V(illegal_argument_string, "illegal argument")                         \
    272   V(MakeReferenceError_string, "MakeReferenceError")                     \
    273   V(MakeSyntaxError_string, "MakeSyntaxError")                           \
    274   V(MakeTypeError_string, "MakeTypeError")                               \
    275   V(invalid_lhs_in_assignment_string, "invalid_lhs_in_assignment")       \
    276   V(invalid_lhs_in_for_in_string, "invalid_lhs_in_for_in")               \
    277   V(invalid_lhs_in_postfix_op_string, "invalid_lhs_in_postfix_op")       \
    278   V(invalid_lhs_in_prefix_op_string, "invalid_lhs_in_prefix_op")         \
    279   V(illegal_return_string, "illegal_return")                             \
    280   V(illegal_break_string, "illegal_break")                               \
    281   V(illegal_continue_string, "illegal_continue")                         \
    282   V(unknown_label_string, "unknown_label")                               \
    283   V(redeclaration_string, "redeclaration")                               \
    284   V(failure_string, "<failure>")                                         \
    285   V(space_string, " ")                                                   \
    286   V(exec_string, "exec")                                                 \
    287   V(zero_string, "0")                                                    \
    288   V(global_eval_string, "GlobalEval")                                    \
    289   V(identity_hash_string, "v8::IdentityHash")                            \
    290   V(closure_string, "(closure)")                                         \
    291   V(use_strict_string, "use strict")                                     \
    292   V(dot_string, ".")                                                     \
    293   V(anonymous_function_string, "(anonymous function)")                   \
    294   V(compare_ic_string, "==")                                             \
    295   V(strict_compare_ic_string, "===")                                     \
    296   V(infinity_string, "Infinity")                                         \
    297   V(minus_infinity_string, "-Infinity")                                  \
    298   V(hidden_stack_trace_string, "v8::hidden_stack_trace")                 \
    299   V(query_colon_string, "(?:)")                                          \
    300   V(Generator_string, "Generator")                                       \
    301   V(throw_string, "throw")                                               \
    302   V(done_string, "done")                                                 \
    303   V(value_string, "value")                                               \
    304   V(next_string, "next")
    305 
    306 // Forward declarations.
    307 class GCTracer;
    308 class HeapStats;
    309 class Isolate;
    310 class WeakObjectRetainer;
    311 
    312 
    313 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
    314                                                       Object** pointer);
    315 
    316 class StoreBufferRebuilder {
    317  public:
    318   explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
    319       : store_buffer_(store_buffer) {
    320   }
    321 
    322   void Callback(MemoryChunk* page, StoreBufferEvent event);
    323 
    324  private:
    325   StoreBuffer* store_buffer_;
    326 
    327   // We record in this variable how full the store buffer was when we started
    328   // iterating over the current page, finding pointers to new space.  If the
    329   // store buffer overflows again we can exempt the page from the store buffer
    330   // by rewinding to this point instead of having to search the store buffer.
    331   Object*** start_of_current_page_;
    332   // The current page we are scanning in the store buffer iterator.
    333   MemoryChunk* current_page_;
    334 };
    335 
    336 
    337 
    338 // A queue of objects promoted during scavenge. Each object is accompanied
    339 // by it's size to avoid dereferencing a map pointer for scanning.
    340 class PromotionQueue {
    341  public:
    342   explicit PromotionQueue(Heap* heap)
    343       : front_(NULL),
    344         rear_(NULL),
    345         limit_(NULL),
    346         emergency_stack_(0),
    347         heap_(heap) { }
    348 
    349   void Initialize();
    350 
    351   void Destroy() {
    352     ASSERT(is_empty());
    353     delete emergency_stack_;
    354     emergency_stack_ = NULL;
    355   }
    356 
    357   inline void ActivateGuardIfOnTheSamePage();
    358 
    359   Page* GetHeadPage() {
    360     return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
    361   }
    362 
    363   void SetNewLimit(Address limit) {
    364     if (!guard_) {
    365       return;
    366     }
    367 
    368     ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
    369     limit_ = reinterpret_cast<intptr_t*>(limit);
    370 
    371     if (limit_ <= rear_) {
    372       return;
    373     }
    374 
    375     RelocateQueueHead();
    376   }
    377 
    378   bool is_empty() {
    379     return (front_ == rear_) &&
    380         (emergency_stack_ == NULL || emergency_stack_->length() == 0);
    381   }
    382 
    383   inline void insert(HeapObject* target, int size);
    384 
    385   void remove(HeapObject** target, int* size) {
    386     ASSERT(!is_empty());
    387     if (front_ == rear_) {
    388       Entry e = emergency_stack_->RemoveLast();
    389       *target = e.obj_;
    390       *size = e.size_;
    391       return;
    392     }
    393 
    394     if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
    395       NewSpacePage* front_page =
    396           NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
    397       ASSERT(!front_page->prev_page()->is_anchor());
    398       front_ =
    399           reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
    400     }
    401     *target = reinterpret_cast<HeapObject*>(*(--front_));
    402     *size = static_cast<int>(*(--front_));
    403     // Assert no underflow.
    404     SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
    405                                 reinterpret_cast<Address>(front_));
    406   }
    407 
    408  private:
    409   // The front of the queue is higher in the memory page chain than the rear.
    410   intptr_t* front_;
    411   intptr_t* rear_;
    412   intptr_t* limit_;
    413 
    414   bool guard_;
    415 
    416   static const int kEntrySizeInWords = 2;
    417 
    418   struct Entry {
    419     Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
    420 
    421     HeapObject* obj_;
    422     int size_;
    423   };
    424   List<Entry>* emergency_stack_;
    425 
    426   Heap* heap_;
    427 
    428   void RelocateQueueHead();
    429 
    430   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
    431 };
    432 
    433 
    434 typedef void (*ScavengingCallback)(Map* map,
    435                                    HeapObject** slot,
    436                                    HeapObject* object);
    437 
    438 
    439 // External strings table is a place where all external strings are
    440 // registered.  We need to keep track of such strings to properly
    441 // finalize them.
    442 class ExternalStringTable {
    443  public:
    444   // Registers an external string.
    445   inline void AddString(String* string);
    446 
    447   inline void Iterate(ObjectVisitor* v);
    448 
    449   // Restores internal invariant and gets rid of collected strings.
    450   // Must be called after each Iterate() that modified the strings.
    451   void CleanUp();
    452 
    453   // Destroys all allocated memory.
    454   void TearDown();
    455 
    456  private:
    457   ExternalStringTable() { }
    458 
    459   friend class Heap;
    460 
    461   inline void Verify();
    462 
    463   inline void AddOldString(String* string);
    464 
    465   // Notifies the table that only a prefix of the new list is valid.
    466   inline void ShrinkNewStrings(int position);
    467 
    468   // To speed up scavenge collections new space string are kept
    469   // separate from old space strings.
    470   List<Object*> new_space_strings_;
    471   List<Object*> old_space_strings_;
    472 
    473   Heap* heap_;
    474 
    475   DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
    476 };
    477 
    478 
    479 enum ArrayStorageAllocationMode {
    480   DONT_INITIALIZE_ARRAY_ELEMENTS,
    481   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
    482 };
    483 
    484 
    485 class Heap {
    486  public:
    487   // Configure heap size before setup. Return false if the heap has been
    488   // set up already.
    489   bool ConfigureHeap(int max_semispace_size,
    490                      intptr_t max_old_gen_size,
    491                      intptr_t max_executable_size);
    492   bool ConfigureHeapDefault();
    493 
    494   // Prepares the heap, setting up memory areas that are needed in the isolate
    495   // without actually creating any objects.
    496   bool SetUp();
    497 
    498   // Bootstraps the object heap with the core set of objects required to run.
    499   // Returns whether it succeeded.
    500   bool CreateHeapObjects();
    501 
    502   // Destroys all memory allocated by the heap.
    503   void TearDown();
    504 
    505   // Set the stack limit in the roots_ array.  Some architectures generate
    506   // code that looks here, because it is faster than loading from the static
    507   // jslimit_/real_jslimit_ variable in the StackGuard.
    508   void SetStackLimits();
    509 
    510   // Returns whether SetUp has been called.
    511   bool HasBeenSetUp();
    512 
    513   // Returns the maximum amount of memory reserved for the heap.  For
    514   // the young generation, we reserve 4 times the amount needed for a
    515   // semi space.  The young generation consists of two semi spaces and
    516   // we reserve twice the amount needed for those in order to ensure
    517   // that new space can be aligned to its size.
    518   intptr_t MaxReserved() {
    519     return 4 * reserved_semispace_size_ + max_old_generation_size_;
    520   }
    521   int MaxSemiSpaceSize() { return max_semispace_size_; }
    522   int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
    523   int InitialSemiSpaceSize() { return initial_semispace_size_; }
    524   intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
    525   intptr_t MaxExecutableSize() { return max_executable_size_; }
    526   int MaxRegularSpaceAllocationSize() { return InitialSemiSpaceSize() * 3/4; }
    527 
    528   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
    529   // more spaces are needed until it reaches the limit.
    530   intptr_t Capacity();
    531 
    532   // Returns the amount of memory currently committed for the heap.
    533   intptr_t CommittedMemory();
    534 
    535   // Returns the amount of executable memory currently committed for the heap.
    536   intptr_t CommittedMemoryExecutable();
    537 
    538   // Returns the amount of phyical memory currently committed for the heap.
    539   size_t CommittedPhysicalMemory();
    540 
    541   // Returns the available bytes in space w/o growing.
    542   // Heap doesn't guarantee that it can allocate an object that requires
    543   // all available bytes. Check MaxHeapObjectSize() instead.
    544   intptr_t Available();
    545 
    546   // Returns of size of all objects residing in the heap.
    547   intptr_t SizeOfObjects();
    548 
    549   // Return the starting address and a mask for the new space.  And-masking an
    550   // address with the mask will result in the start address of the new space
    551   // for all addresses in either semispace.
    552   Address NewSpaceStart() { return new_space_.start(); }
    553   uintptr_t NewSpaceMask() { return new_space_.mask(); }
    554   Address NewSpaceTop() { return new_space_.top(); }
    555 
    556   NewSpace* new_space() { return &new_space_; }
    557   OldSpace* old_pointer_space() { return old_pointer_space_; }
    558   OldSpace* old_data_space() { return old_data_space_; }
    559   OldSpace* code_space() { return code_space_; }
    560   MapSpace* map_space() { return map_space_; }
    561   CellSpace* cell_space() { return cell_space_; }
    562   PropertyCellSpace* property_cell_space() {
    563     return property_cell_space_;
    564   }
    565   LargeObjectSpace* lo_space() { return lo_space_; }
    566   PagedSpace* paged_space(int idx) {
    567     switch (idx) {
    568       case OLD_POINTER_SPACE:
    569         return old_pointer_space();
    570       case OLD_DATA_SPACE:
    571         return old_data_space();
    572       case MAP_SPACE:
    573         return map_space();
    574       case CELL_SPACE:
    575         return cell_space();
    576       case PROPERTY_CELL_SPACE:
    577         return property_cell_space();
    578       case CODE_SPACE:
    579         return code_space();
    580       case NEW_SPACE:
    581       case LO_SPACE:
    582         UNREACHABLE();
    583     }
    584     return NULL;
    585   }
    586 
    587   bool always_allocate() { return always_allocate_scope_depth_ != 0; }
    588   Address always_allocate_scope_depth_address() {
    589     return reinterpret_cast<Address>(&always_allocate_scope_depth_);
    590   }
    591   bool linear_allocation() {
    592     return linear_allocation_scope_depth_ != 0;
    593   }
    594 
    595   Address* NewSpaceAllocationTopAddress() {
    596     return new_space_.allocation_top_address();
    597   }
    598   Address* NewSpaceAllocationLimitAddress() {
    599     return new_space_.allocation_limit_address();
    600   }
    601 
    602   Address* OldPointerSpaceAllocationTopAddress() {
    603     return old_pointer_space_->allocation_top_address();
    604   }
    605   Address* OldPointerSpaceAllocationLimitAddress() {
    606     return old_pointer_space_->allocation_limit_address();
    607   }
    608 
    609   Address* OldDataSpaceAllocationTopAddress() {
    610     return old_data_space_->allocation_top_address();
    611   }
    612   Address* OldDataSpaceAllocationLimitAddress() {
    613     return old_data_space_->allocation_limit_address();
    614   }
    615 
    616   // Uncommit unused semi space.
    617   bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
    618 
    619   // Allocates and initializes a new JavaScript object based on a
    620   // constructor.
    621   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    622   // failed.
    623   // Please note this does not perform a garbage collection.
    624   MUST_USE_RESULT MaybeObject* AllocateJSObject(
    625       JSFunction* constructor,
    626       PretenureFlag pretenure = NOT_TENURED);
    627 
    628   MUST_USE_RESULT MaybeObject* AllocateJSObjectWithAllocationSite(
    629       JSFunction* constructor,
    630       Handle<AllocationSite> allocation_site);
    631 
    632   MUST_USE_RESULT MaybeObject* AllocateJSGeneratorObject(
    633       JSFunction* function);
    634 
    635   MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
    636                                                 ScopeInfo* scope_info);
    637 
    638   // Allocate a JSArray with no elements
    639   MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
    640       ElementsKind elements_kind,
    641       PretenureFlag pretenure = NOT_TENURED) {
    642     return AllocateJSArrayAndStorage(elements_kind, 0, 0,
    643                                      DONT_INITIALIZE_ARRAY_ELEMENTS,
    644                                      pretenure);
    645   }
    646 
    647   inline MUST_USE_RESULT MaybeObject* AllocateEmptyJSArrayWithAllocationSite(
    648       ElementsKind elements_kind,
    649       Handle<AllocationSite> allocation_site);
    650 
    651   // Allocate a JSArray with a specified length but elements that are left
    652   // uninitialized.
    653   MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
    654       ElementsKind elements_kind,
    655       int length,
    656       int capacity,
    657       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
    658       PretenureFlag pretenure = NOT_TENURED);
    659 
    660   MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorageWithAllocationSite(
    661       ElementsKind elements_kind,
    662       int length,
    663       int capacity,
    664       Handle<AllocationSite> allocation_site,
    665       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
    666 
    667   MUST_USE_RESULT MaybeObject* AllocateJSArrayStorage(
    668       JSArray* array,
    669       int length,
    670       int capacity,
    671       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
    672 
    673   // Allocate a JSArray with no elements
    674   MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
    675       FixedArrayBase* array_base,
    676       ElementsKind elements_kind,
    677       int length,
    678       PretenureFlag pretenure = NOT_TENURED);
    679 
    680   // Allocates and initializes a new global object based on a constructor.
    681   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    682   // failed.
    683   // Please note this does not perform a garbage collection.
    684   MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
    685 
    686   // Returns a deep copy of the JavaScript object.
    687   // Properties and elements are copied too.
    688   // Returns failure if allocation failed.
    689   MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
    690 
    691   MUST_USE_RESULT MaybeObject* CopyJSObjectWithAllocationSite(
    692       JSObject* source, AllocationSite* site);
    693 
    694   // Allocates the function prototype.
    695   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    696   // failed.
    697   // Please note this does not perform a garbage collection.
    698   MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
    699 
    700   // Allocates a JS ArrayBuffer object.
    701   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    702   // failed.
    703   // Please note this does not perform a garbage collection.
    704   MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer();
    705 
    706   // Allocates a Harmony proxy or function proxy.
    707   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    708   // failed.
    709   // Please note this does not perform a garbage collection.
    710   MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
    711                                                Object* prototype);
    712 
    713   MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
    714                                                        Object* call_trap,
    715                                                        Object* construct_trap,
    716                                                        Object* prototype);
    717 
    718   // Reinitialize a JSReceiver into an (empty) JS object of respective type and
    719   // size, but keeping the original prototype.  The receiver must have at least
    720   // the size of the new object.  The object is reinitialized and behaves as an
    721   // object that has been freshly allocated.
    722   // Returns failure if an error occured, otherwise object.
    723   MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
    724                                                       InstanceType type,
    725                                                       int size);
    726 
    727   // Reinitialize an JSGlobalProxy based on a constructor.  The object
    728   // must have the same size as objects allocated using the
    729   // constructor.  The object is reinitialized and behaves as an
    730   // object that has been freshly allocated using the constructor.
    731   MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
    732       JSFunction* constructor, JSGlobalProxy* global);
    733 
    734   // Allocates and initializes a new JavaScript object based on a map.
    735   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    736   // failed.
    737   // Please note this does not perform a garbage collection.
    738   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
    739       Map* map, PretenureFlag pretenure = NOT_TENURED, bool alloc_props = true);
    740 
    741   MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMapWithAllocationSite(
    742       Map* map, Handle<AllocationSite> allocation_site);
    743 
    744   // Allocates a heap object based on the map.
    745   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    746   // failed.
    747   // Please note this function does not perform a garbage collection.
    748   MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
    749 
    750   MUST_USE_RESULT MaybeObject* AllocateWithAllocationSite(Map* map,
    751       AllocationSpace space, Handle<AllocationSite> allocation_site);
    752 
    753   // Allocates a JS Map in the heap.
    754   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    755   // failed.
    756   // Please note this function does not perform a garbage collection.
    757   MUST_USE_RESULT MaybeObject* AllocateMap(
    758       InstanceType instance_type,
    759       int instance_size,
    760       ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
    761 
    762   // Allocates a partial map for bootstrapping.
    763   MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
    764                                                   int instance_size);
    765 
    766   // Allocate a map for the specified function
    767   MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
    768 
    769   // Allocates an empty code cache.
    770   MUST_USE_RESULT MaybeObject* AllocateCodeCache();
    771 
    772   // Allocates a serialized scope info.
    773   MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
    774 
    775   // Allocates an External object for v8's external API.
    776   MUST_USE_RESULT MaybeObject* AllocateExternal(void* value);
    777 
    778   // Allocates an empty PolymorphicCodeCache.
    779   MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
    780 
    781   // Allocates a pre-tenured empty AccessorPair.
    782   MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
    783 
    784   // Allocates an empty TypeFeedbackInfo.
    785   MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
    786 
    787   // Allocates an AliasedArgumentsEntry.
    788   MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
    789 
    790   // Clear the Instanceof cache (used when a prototype changes).
    791   inline void ClearInstanceofCache();
    792 
    793   // For use during bootup.
    794   void RepairFreeListsAfterBoot();
    795 
    796   // Allocates and fully initializes a String.  There are two String
    797   // encodings: ASCII and two byte. One should choose between the three string
    798   // allocation functions based on the encoding of the string buffer used to
    799   // initialized the string.
    800   //   - ...FromAscii initializes the string from a buffer that is ASCII
    801   //     encoded (it does not check that the buffer is ASCII encoded) and the
    802   //     result will be ASCII encoded.
    803   //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
    804   //     encoded.  If the characters are all single-byte characters, the
    805   //     result will be ASCII encoded, otherwise it will converted to two
    806   //     byte.
    807   //   - ...FromTwoByte initializes the string from a buffer that is two-byte
    808   //     encoded.  If the characters are all single-byte characters, the
    809   //     result will be converted to ASCII, otherwise it will be left as
    810   //     two-byte.
    811   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    812   // failed.
    813   // Please note this does not perform a garbage collection.
    814   MUST_USE_RESULT MaybeObject* AllocateStringFromOneByte(
    815       Vector<const uint8_t> str,
    816       PretenureFlag pretenure = NOT_TENURED);
    817   // TODO(dcarney): remove this function.
    818   MUST_USE_RESULT inline MaybeObject* AllocateStringFromOneByte(
    819       Vector<const char> str,
    820       PretenureFlag pretenure = NOT_TENURED) {
    821     return AllocateStringFromOneByte(Vector<const uint8_t>::cast(str),
    822                                      pretenure);
    823   }
    824   MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
    825       Vector<const char> str,
    826       PretenureFlag pretenure = NOT_TENURED);
    827   MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
    828       Vector<const char> str,
    829       int non_ascii_start,
    830       PretenureFlag pretenure = NOT_TENURED);
    831   MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
    832       Vector<const uc16> str,
    833       PretenureFlag pretenure = NOT_TENURED);
    834 
    835   // Allocates an internalized string in old space based on the character
    836   // stream. Returns Failure::RetryAfterGC(requested_bytes, space) if the
    837   // allocation failed.
    838   // Please note this function does not perform a garbage collection.
    839   MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringFromUtf8(
    840       Vector<const char> str,
    841       int chars,
    842       uint32_t hash_field);
    843 
    844   MUST_USE_RESULT inline MaybeObject* AllocateOneByteInternalizedString(
    845         Vector<const uint8_t> str,
    846         uint32_t hash_field);
    847 
    848   MUST_USE_RESULT inline MaybeObject* AllocateTwoByteInternalizedString(
    849         Vector<const uc16> str,
    850         uint32_t hash_field);
    851 
    852   template<typename T>
    853   static inline bool IsOneByte(T t, int chars);
    854 
    855   template<typename T>
    856   MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringImpl(
    857       T t, int chars, uint32_t hash_field);
    858 
    859   template<bool is_one_byte, typename T>
    860   MUST_USE_RESULT MaybeObject* AllocateInternalizedStringImpl(
    861       T t, int chars, uint32_t hash_field);
    862 
    863   // Allocates and partially initializes a String.  There are two String
    864   // encodings: ASCII and two byte.  These functions allocate a string of the
    865   // given length and set its map and length fields.  The characters of the
    866   // string are uninitialized.
    867   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    868   // failed.
    869   // Please note this does not perform a garbage collection.
    870   MUST_USE_RESULT MaybeObject* AllocateRawOneByteString(
    871       int length,
    872       PretenureFlag pretenure = NOT_TENURED);
    873   MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
    874       int length,
    875       PretenureFlag pretenure = NOT_TENURED);
    876 
    877   // Computes a single character string where the character has code.
    878   // A cache is used for ASCII codes.
    879   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    880   // failed. Please note this does not perform a garbage collection.
    881   MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
    882       uint16_t code);
    883 
    884   // Allocate a byte array of the specified length
    885   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    886   // failed.
    887   // Please note this does not perform a garbage collection.
    888   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
    889                                                  PretenureFlag pretenure);
    890 
    891   // Allocate a non-tenured byte array of the specified length
    892   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    893   // failed.
    894   // Please note this does not perform a garbage collection.
    895   MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
    896 
    897   // Allocates an external array of the specified length and type.
    898   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    899   // failed.
    900   // Please note this does not perform a garbage collection.
    901   MUST_USE_RESULT MaybeObject* AllocateExternalArray(
    902       int length,
    903       ExternalArrayType array_type,
    904       void* external_pointer,
    905       PretenureFlag pretenure);
    906 
    907   // Allocate a symbol in old space.
    908   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    909   // failed.
    910   // Please note this does not perform a garbage collection.
    911   MUST_USE_RESULT MaybeObject* AllocateSymbol();
    912 
    913   // Allocate a tenured simple cell.
    914   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    915   // failed.
    916   // Please note this does not perform a garbage collection.
    917   MUST_USE_RESULT MaybeObject* AllocateCell(Object* value);
    918 
    919   // Allocate a tenured JS global property cell.
    920   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    921   // failed.
    922   // Please note this does not perform a garbage collection.
    923   MUST_USE_RESULT MaybeObject* AllocatePropertyCell(Object* value);
    924 
    925   // Allocate Box.
    926   MUST_USE_RESULT MaybeObject* AllocateBox(Object* value,
    927                                            PretenureFlag pretenure);
    928 
    929   // Allocate a tenured AllocationSite. It's payload is null
    930   MUST_USE_RESULT MaybeObject* AllocateAllocationSite();
    931 
    932   // Allocates a fixed array initialized with undefined values
    933   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    934   // failed.
    935   // Please note this does not perform a garbage collection.
    936   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
    937                                                   PretenureFlag pretenure);
    938   // Allocates a fixed array initialized with undefined values
    939   MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
    940 
    941   // Allocates an uninitialized fixed array. It must be filled by the caller.
    942   //
    943   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    944   // failed.
    945   // Please note this does not perform a garbage collection.
    946   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
    947 
    948   // Move len elements within a given array from src_index index to dst_index
    949   // index.
    950   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
    951 
    952   // Make a copy of src and return it. Returns
    953   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    954   MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
    955 
    956   // Make a copy of src, set the map, and return the copy. Returns
    957   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    958   MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
    959 
    960   // Make a copy of src and return it. Returns
    961   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    962   MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
    963       FixedDoubleArray* src);
    964 
    965   // Make a copy of src, set the map, and return the copy. Returns
    966   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    967   MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
    968       FixedDoubleArray* src, Map* map);
    969 
    970   // Allocates a fixed array initialized with the hole values.
    971   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
    972   // failed.
    973   // Please note this does not perform a garbage collection.
    974   MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
    975       int length,
    976       PretenureFlag pretenure = NOT_TENURED);
    977 
    978   MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
    979       int length,
    980       PretenureFlag pretenure);
    981 
    982   // Allocates a fixed double array with uninitialized values. Returns
    983   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    984   // Please note this does not perform a garbage collection.
    985   MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray(
    986       int length,
    987       PretenureFlag pretenure = NOT_TENURED);
    988 
    989   // Allocates a fixed double array with hole values. Returns
    990   // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
    991   // Please note this does not perform a garbage collection.
    992   MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles(
    993       int length,
    994       PretenureFlag pretenure = NOT_TENURED);
    995 
    996   // AllocateHashTable is identical to AllocateFixedArray except
    997   // that the resulting object has hash_table_map as map.
    998   MUST_USE_RESULT MaybeObject* AllocateHashTable(
    999       int length, PretenureFlag pretenure = NOT_TENURED);
   1000 
   1001   // Allocate a native (but otherwise uninitialized) context.
   1002   MUST_USE_RESULT MaybeObject* AllocateNativeContext();
   1003 
   1004   // Allocate a global context.
   1005   MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
   1006                                                      ScopeInfo* scope_info);
   1007 
   1008   // Allocate a module context.
   1009   MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
   1010 
   1011   // Allocate a function context.
   1012   MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
   1013                                                        JSFunction* function);
   1014 
   1015   // Allocate a catch context.
   1016   MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
   1017                                                     Context* previous,
   1018                                                     String* name,
   1019                                                     Object* thrown_object);
   1020   // Allocate a 'with' context.
   1021   MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
   1022                                                    Context* previous,
   1023                                                    JSReceiver* extension);
   1024 
   1025   // Allocate a block context.
   1026   MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
   1027                                                     Context* previous,
   1028                                                     ScopeInfo* info);
   1029 
   1030   // Allocates a new utility object in the old generation.
   1031   MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
   1032 
   1033   // Allocates a function initialized with a shared part.
   1034   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1035   // failed.
   1036   // Please note this does not perform a garbage collection.
   1037   MUST_USE_RESULT MaybeObject* AllocateFunction(
   1038       Map* function_map,
   1039       SharedFunctionInfo* shared,
   1040       Object* prototype,
   1041       PretenureFlag pretenure = TENURED);
   1042 
   1043   // Arguments object size.
   1044   static const int kArgumentsObjectSize =
   1045       JSObject::kHeaderSize + 2 * kPointerSize;
   1046   // Strict mode arguments has no callee so it is smaller.
   1047   static const int kArgumentsObjectSizeStrict =
   1048       JSObject::kHeaderSize + 1 * kPointerSize;
   1049   // Indicies for direct access into argument objects.
   1050   static const int kArgumentsLengthIndex = 0;
   1051   // callee is only valid in non-strict mode.
   1052   static const int kArgumentsCalleeIndex = 1;
   1053 
   1054   // Allocates an arguments object - optionally with an elements array.
   1055   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1056   // failed.
   1057   // Please note this does not perform a garbage collection.
   1058   MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
   1059       Object* callee, int length);
   1060 
   1061   // Same as NewNumberFromDouble, but may return a preallocated/immutable
   1062   // number object (e.g., minus_zero_value_, nan_value_)
   1063   MUST_USE_RESULT MaybeObject* NumberFromDouble(
   1064       double value, PretenureFlag pretenure = NOT_TENURED);
   1065 
   1066   // Allocated a HeapNumber from value.
   1067   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
   1068       double value,
   1069       PretenureFlag pretenure);
   1070   // pretenure = NOT_TENURED
   1071   MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
   1072 
   1073   // Converts an int into either a Smi or a HeapNumber object.
   1074   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1075   // failed.
   1076   // Please note this does not perform a garbage collection.
   1077   MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
   1078       int32_t value, PretenureFlag pretenure = NOT_TENURED);
   1079 
   1080   // Converts an int into either a Smi or a HeapNumber object.
   1081   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1082   // failed.
   1083   // Please note this does not perform a garbage collection.
   1084   MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
   1085       uint32_t value, PretenureFlag pretenure = NOT_TENURED);
   1086 
   1087   // Allocates a new foreign object.
   1088   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1089   // failed.
   1090   // Please note this does not perform a garbage collection.
   1091   MUST_USE_RESULT MaybeObject* AllocateForeign(
   1092       Address address, PretenureFlag pretenure = NOT_TENURED);
   1093 
   1094   // Allocates a new SharedFunctionInfo object.
   1095   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1096   // failed.
   1097   // Please note this does not perform a garbage collection.
   1098   MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
   1099 
   1100   // Allocates a new JSMessageObject object.
   1101   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1102   // failed.
   1103   // Please note that this does not perform a garbage collection.
   1104   MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
   1105       String* type,
   1106       JSArray* arguments,
   1107       int start_position,
   1108       int end_position,
   1109       Object* script,
   1110       Object* stack_trace,
   1111       Object* stack_frames);
   1112 
   1113   // Allocates a new cons string object.
   1114   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1115   // failed.
   1116   // Please note this does not perform a garbage collection.
   1117   MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
   1118                                                   String* second);
   1119 
   1120   // Allocates a new sub string object which is a substring of an underlying
   1121   // string buffer stretching from the index start (inclusive) to the index
   1122   // end (exclusive).
   1123   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1124   // failed.
   1125   // Please note this does not perform a garbage collection.
   1126   MUST_USE_RESULT MaybeObject* AllocateSubString(
   1127       String* buffer,
   1128       int start,
   1129       int end,
   1130       PretenureFlag pretenure = NOT_TENURED);
   1131 
   1132   // Allocate a new external string object, which is backed by a string
   1133   // resource that resides outside the V8 heap.
   1134   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1135   // failed.
   1136   // Please note this does not perform a garbage collection.
   1137   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
   1138       const ExternalAsciiString::Resource* resource);
   1139   MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
   1140       const ExternalTwoByteString::Resource* resource);
   1141 
   1142   // Finalizes an external string by deleting the associated external
   1143   // data and clearing the resource pointer.
   1144   inline void FinalizeExternalString(String* string);
   1145 
   1146   // Allocates an uninitialized object.  The memory is non-executable if the
   1147   // hardware and OS allow.
   1148   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1149   // failed.
   1150   // Please note this function does not perform a garbage collection.
   1151   MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
   1152                                                   AllocationSpace space,
   1153                                                   AllocationSpace retry_space);
   1154 
   1155   // Initialize a filler object to keep the ability to iterate over the heap
   1156   // when shortening objects.
   1157   void CreateFillerObjectAt(Address addr, int size);
   1158 
   1159   // Makes a new native code object
   1160   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1161   // failed. On success, the pointer to the Code object is stored in the
   1162   // self_reference. This allows generated code to reference its own Code
   1163   // object by containing this pointer.
   1164   // Please note this function does not perform a garbage collection.
   1165   MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
   1166                                           Code::Flags flags,
   1167                                           Handle<Object> self_reference,
   1168                                           bool immovable = false,
   1169                                           bool crankshafted = false);
   1170 
   1171   MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
   1172 
   1173   // Copy the code and scope info part of the code object, but insert
   1174   // the provided data as the relocation information.
   1175   MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
   1176 
   1177   // Finds the internalized copy for string in the string table.
   1178   // If not found, a new string is added to the table and returned.
   1179   // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
   1180   // failed.
   1181   // Please note this function does not perform a garbage collection.
   1182   MUST_USE_RESULT MaybeObject* InternalizeUtf8String(Vector<const char> str);
   1183   MUST_USE_RESULT MaybeObject* InternalizeUtf8String(const char* str) {
   1184     return InternalizeUtf8String(CStrVector(str));
   1185   }
   1186   MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
   1187       Vector<const uint8_t> str);
   1188   MUST_USE_RESULT MaybeObject* InternalizeTwoByteString(Vector<const uc16> str);
   1189   MUST_USE_RESULT MaybeObject* InternalizeString(String* str);
   1190   MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
   1191       Handle<SeqOneByteString> string, int from, int length);
   1192 
   1193   bool InternalizeStringIfExists(String* str, String** result);
   1194   bool InternalizeTwoCharsStringIfExists(String* str, String** result);
   1195 
   1196   // Compute the matching internalized string map for a string if possible.
   1197   // NULL is returned if string is in new space or not flattened.
   1198   Map* InternalizedStringMapForString(String* str);
   1199 
   1200   // Tries to flatten a string before compare operation.
   1201   //
   1202   // Returns a failure in case it was decided that flattening was
   1203   // necessary and failed.  Note, if flattening is not necessary the
   1204   // string might stay non-flat even when not a failure is returned.
   1205   //
   1206   // Please note this function does not perform a garbage collection.
   1207   MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
   1208 
   1209   // Converts the given boolean condition to JavaScript boolean value.
   1210   inline Object* ToBoolean(bool condition);
   1211 
   1212   // Code that should be run before and after each GC.  Includes some
   1213   // reporting/verification activities when compiled with DEBUG set.
   1214   void GarbageCollectionPrologue();
   1215   void GarbageCollectionEpilogue();
   1216 
   1217   // Performs garbage collection operation.
   1218   // Returns whether there is a chance that another major GC could
   1219   // collect more garbage.
   1220   bool CollectGarbage(AllocationSpace space,
   1221                       GarbageCollector collector,
   1222                       const char* gc_reason,
   1223                       const char* collector_reason);
   1224 
   1225   // Performs garbage collection operation.
   1226   // Returns whether there is a chance that another major GC could
   1227   // collect more garbage.
   1228   inline bool CollectGarbage(AllocationSpace space,
   1229                              const char* gc_reason = NULL);
   1230 
   1231   static const int kNoGCFlags = 0;
   1232   static const int kSweepPreciselyMask = 1;
   1233   static const int kReduceMemoryFootprintMask = 2;
   1234   static const int kAbortIncrementalMarkingMask = 4;
   1235 
   1236   // Making the heap iterable requires us to sweep precisely and abort any
   1237   // incremental marking as well.
   1238   static const int kMakeHeapIterableMask =
   1239       kSweepPreciselyMask | kAbortIncrementalMarkingMask;
   1240 
   1241   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
   1242   // non-zero, then the slower precise sweeper is used, which leaves the heap
   1243   // in a state where we can iterate over the heap visiting all objects.
   1244   void CollectAllGarbage(int flags, const char* gc_reason = NULL);
   1245 
   1246   // Last hope GC, should try to squeeze as much as possible.
   1247   void CollectAllAvailableGarbage(const char* gc_reason = NULL);
   1248 
   1249   // Check whether the heap is currently iterable.
   1250   bool IsHeapIterable();
   1251 
   1252   // Ensure that we have swept all spaces in such a way that we can iterate
   1253   // over all objects.  May cause a GC.
   1254   void EnsureHeapIsIterable();
   1255 
   1256   // Notify the heap that a context has been disposed.
   1257   int NotifyContextDisposed();
   1258 
   1259   // Utility to invoke the scavenger. This is needed in test code to
   1260   // ensure correct callback for weak global handles.
   1261   void PerformScavenge();
   1262 
   1263   inline void increment_scan_on_scavenge_pages() {
   1264     scan_on_scavenge_pages_++;
   1265     if (FLAG_gc_verbose) {
   1266       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
   1267     }
   1268   }
   1269 
   1270   inline void decrement_scan_on_scavenge_pages() {
   1271     scan_on_scavenge_pages_--;
   1272     if (FLAG_gc_verbose) {
   1273       PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
   1274     }
   1275   }
   1276 
   1277   PromotionQueue* promotion_queue() { return &promotion_queue_; }
   1278 
   1279 #ifdef DEBUG
   1280   // Utility used with flag gc-greedy.
   1281   void GarbageCollectionGreedyCheck();
   1282 #endif
   1283 
   1284   void AddGCPrologueCallback(
   1285       GCPrologueCallback callback, GCType gc_type_filter);
   1286   void RemoveGCPrologueCallback(GCPrologueCallback callback);
   1287 
   1288   void AddGCEpilogueCallback(
   1289       GCEpilogueCallback callback, GCType gc_type_filter);
   1290   void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
   1291 
   1292   void SetGlobalGCPrologueCallback(GCCallback callback) {
   1293     ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
   1294     global_gc_prologue_callback_ = callback;
   1295   }
   1296   void SetGlobalGCEpilogueCallback(GCCallback callback) {
   1297     ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
   1298     global_gc_epilogue_callback_ = callback;
   1299   }
   1300 
   1301   // Heap root getters.  We have versions with and without type::cast() here.
   1302   // You can't use type::cast during GC because the assert fails.
   1303   // TODO(1490): Try removing the unchecked accessors, now that GC marking does
   1304   // not corrupt the map.
   1305 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
   1306   type* name() {                                                               \
   1307     return type::cast(roots_[k##camel_name##RootIndex]);                       \
   1308   }                                                                            \
   1309   type* raw_unchecked_##name() {                                               \
   1310     return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
   1311   }
   1312   ROOT_LIST(ROOT_ACCESSOR)
   1313 #undef ROOT_ACCESSOR
   1314 
   1315 // Utility type maps
   1316 #define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
   1317     Map* name##_map() {                                                        \
   1318       return Map::cast(roots_[k##Name##MapRootIndex]);                         \
   1319     }
   1320   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
   1321 #undef STRUCT_MAP_ACCESSOR
   1322 
   1323 #define STRING_ACCESSOR(name, str) String* name() {                            \
   1324     return String::cast(roots_[k##name##RootIndex]);                           \
   1325   }
   1326   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
   1327 #undef STRING_ACCESSOR
   1328 
   1329   // The hidden_string is special because it is the empty string, but does
   1330   // not match the empty string.
   1331   String* hidden_string() { return hidden_string_; }
   1332 
   1333   void set_native_contexts_list(Object* object) {
   1334     native_contexts_list_ = object;
   1335   }
   1336   Object* native_contexts_list() { return native_contexts_list_; }
   1337 
   1338   void set_array_buffers_list(Object* object) {
   1339     array_buffers_list_ = object;
   1340   }
   1341   Object* array_buffers_list() { return array_buffers_list_; }
   1342 
   1343   void set_allocation_sites_list(Object* object) {
   1344     allocation_sites_list_ = object;
   1345   }
   1346   Object* allocation_sites_list() { return allocation_sites_list_; }
   1347   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
   1348 
   1349   // Number of mark-sweeps.
   1350   unsigned int ms_count() { return ms_count_; }
   1351 
   1352   // Iterates over all roots in the heap.
   1353   void IterateRoots(ObjectVisitor* v, VisitMode mode);
   1354   // Iterates over all strong roots in the heap.
   1355   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
   1356   // Iterates over all the other roots in the heap.
   1357   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
   1358 
   1359   // Iterate pointers to from semispace of new space found in memory interval
   1360   // from start to end.
   1361   void IterateAndMarkPointersToFromSpace(Address start,
   1362                                          Address end,
   1363                                          ObjectSlotCallback callback);
   1364 
   1365   // Returns whether the object resides in new space.
   1366   inline bool InNewSpace(Object* object);
   1367   inline bool InNewSpace(Address address);
   1368   inline bool InNewSpacePage(Address address);
   1369   inline bool InFromSpace(Object* object);
   1370   inline bool InToSpace(Object* object);
   1371 
   1372   // Returns whether the object resides in old pointer space.
   1373   inline bool InOldPointerSpace(Address address);
   1374   inline bool InOldPointerSpace(Object* object);
   1375 
   1376   // Returns whether the object resides in old data space.
   1377   inline bool InOldDataSpace(Address address);
   1378   inline bool InOldDataSpace(Object* object);
   1379 
   1380   // Checks whether an address/object in the heap (including auxiliary
   1381   // area and unused area).
   1382   bool Contains(Address addr);
   1383   bool Contains(HeapObject* value);
   1384 
   1385   // Checks whether an address/object in a space.
   1386   // Currently used by tests, serialization and heap verification only.
   1387   bool InSpace(Address addr, AllocationSpace space);
   1388   bool InSpace(HeapObject* value, AllocationSpace space);
   1389 
   1390   // Finds out which space an object should get promoted to based on its type.
   1391   inline OldSpace* TargetSpace(HeapObject* object);
   1392   static inline AllocationSpace TargetSpaceId(InstanceType type);
   1393 
   1394   // Checks whether the given object is allowed to be migrated from it's
   1395   // current space into the given destination space. Used for debugging.
   1396   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
   1397 
   1398   // Sets the stub_cache_ (only used when expanding the dictionary).
   1399   void public_set_code_stubs(UnseededNumberDictionary* value) {
   1400     roots_[kCodeStubsRootIndex] = value;
   1401   }
   1402 
   1403   // Support for computing object sizes for old objects during GCs. Returns
   1404   // a function that is guaranteed to be safe for computing object sizes in
   1405   // the current GC phase.
   1406   HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
   1407     return gc_safe_size_of_old_object_;
   1408   }
   1409 
   1410   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
   1411   void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
   1412     roots_[kNonMonomorphicCacheRootIndex] = value;
   1413   }
   1414 
   1415   void public_set_empty_script(Script* script) {
   1416     roots_[kEmptyScriptRootIndex] = script;
   1417   }
   1418 
   1419   void public_set_store_buffer_top(Address* top) {
   1420     roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
   1421   }
   1422 
   1423   // Generated code can embed this address to get access to the roots.
   1424   Object** roots_array_start() { return roots_; }
   1425 
   1426   Address* store_buffer_top_address() {
   1427     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
   1428   }
   1429 
   1430   // Get address of native contexts list for serialization support.
   1431   Object** native_contexts_list_address() {
   1432     return &native_contexts_list_;
   1433   }
   1434 
   1435 #ifdef VERIFY_HEAP
   1436   // Verify the heap is in its normal state before or after a GC.
   1437   void Verify();
   1438 
   1439 
   1440   bool weak_embedded_maps_verification_enabled() {
   1441     return no_weak_embedded_maps_verification_scope_depth_ == 0;
   1442   }
   1443 #endif
   1444 
   1445 #ifdef DEBUG
   1446   void Print();
   1447   void PrintHandles();
   1448 
   1449   void OldPointerSpaceCheckStoreBuffer();
   1450   void MapSpaceCheckStoreBuffer();
   1451   void LargeObjectSpaceCheckStoreBuffer();
   1452 
   1453   // Report heap statistics.
   1454   void ReportHeapStatistics(const char* title);
   1455   void ReportCodeStatistics(const char* title);
   1456 #endif
   1457 
   1458   // Zapping is needed for verify heap, and always done in debug builds.
   1459   static inline bool ShouldZapGarbage() {
   1460 #ifdef DEBUG
   1461     return true;
   1462 #else
   1463 #ifdef VERIFY_HEAP
   1464     return FLAG_verify_heap;
   1465 #else
   1466     return false;
   1467 #endif
   1468 #endif
   1469   }
   1470 
   1471   // Fill in bogus values in from space
   1472   void ZapFromSpace();
   1473 
   1474   // Print short heap statistics.
   1475   void PrintShortHeapStatistics();
   1476 
   1477   // Makes a new internalized string object
   1478   // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
   1479   // failed.
   1480   // Please note this function does not perform a garbage collection.
   1481   MUST_USE_RESULT MaybeObject* CreateInternalizedString(
   1482       const char* str, int length, int hash);
   1483   MUST_USE_RESULT MaybeObject* CreateInternalizedString(String* str);
   1484 
   1485   // Write barrier support for address[offset] = o.
   1486   INLINE(void RecordWrite(Address address, int offset));
   1487 
   1488   // Write barrier support for address[start : start + len[ = o.
   1489   INLINE(void RecordWrites(Address address, int start, int len));
   1490 
   1491   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
   1492   inline HeapState gc_state() { return gc_state_; }
   1493 
   1494   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
   1495 
   1496 #ifdef DEBUG
   1497   void set_allocation_timeout(int timeout) {
   1498     allocation_timeout_ = timeout;
   1499   }
   1500 
   1501   bool disallow_allocation_failure() {
   1502     return disallow_allocation_failure_;
   1503   }
   1504 
   1505   void TracePathToObjectFrom(Object* target, Object* root);
   1506   void TracePathToObject(Object* target);
   1507   void TracePathToGlobal();
   1508 #endif
   1509 
   1510   // Callback function passed to Heap::Iterate etc.  Copies an object if
   1511   // necessary, the object might be promoted to an old space.  The caller must
   1512   // ensure the precondition that the object is (a) a heap object and (b) in
   1513   // the heap's from space.
   1514   static inline void ScavengePointer(HeapObject** p);
   1515   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
   1516 
   1517   // Commits from space if it is uncommitted.
   1518   void EnsureFromSpaceIsCommitted();
   1519 
   1520   // Support for partial snapshots.  After calling this we have a linear
   1521   // space to write objects in each space.
   1522   void ReserveSpace(int *sizes, Address* addresses);
   1523 
   1524   //
   1525   // Support for the API.
   1526   //
   1527 
   1528   bool CreateApiObjects();
   1529 
   1530   // Attempt to find the number in a small cache.  If we finds it, return
   1531   // the string representation of the number.  Otherwise return undefined.
   1532   Object* GetNumberStringCache(Object* number);
   1533 
   1534   // Update the cache with a new number-string pair.
   1535   void SetNumberStringCache(Object* number, String* str);
   1536 
   1537   // Adjusts the amount of registered external memory.
   1538   // Returns the adjusted value.
   1539   inline intptr_t AdjustAmountOfExternalAllocatedMemory(
   1540       intptr_t change_in_bytes);
   1541 
   1542   // Allocate uninitialized fixed array.
   1543   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
   1544   MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
   1545                                                      PretenureFlag pretenure);
   1546 
   1547   // This is only needed for testing high promotion mode.
   1548   void SetNewSpaceHighPromotionModeActive(bool mode) {
   1549     new_space_high_promotion_mode_active_ = mode;
   1550   }
   1551 
   1552   // Returns the allocation mode (pre-tenuring) based on observed promotion
   1553   // rates of previous collections.
   1554   inline PretenureFlag GetPretenureMode() {
   1555     return FLAG_pretenuring && new_space_high_promotion_mode_active_
   1556         ? TENURED : NOT_TENURED;
   1557   }
   1558 
   1559   inline Address* NewSpaceHighPromotionModeActiveAddress() {
   1560     return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
   1561   }
   1562 
   1563   inline intptr_t PromotedTotalSize() {
   1564     return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
   1565   }
   1566 
   1567   inline intptr_t OldGenerationSpaceAvailable() {
   1568     return old_generation_allocation_limit_ - PromotedTotalSize();
   1569   }
   1570 
   1571   inline intptr_t OldGenerationCapacityAvailable() {
   1572     return max_old_generation_size_ - PromotedTotalSize();
   1573   }
   1574 
   1575   static const intptr_t kMinimumOldGenerationAllocationLimit =
   1576       8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
   1577 
   1578   intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
   1579     const int divisor = FLAG_stress_compaction ? 10 :
   1580         new_space_high_promotion_mode_active_ ? 1 : 3;
   1581     intptr_t limit =
   1582         Max(old_gen_size + old_gen_size / divisor,
   1583             kMinimumOldGenerationAllocationLimit);
   1584     limit += new_space_.Capacity();
   1585     // TODO(hpayer): Can be removed when when pretenuring is supported for all
   1586     // allocation sites.
   1587     if (IsHighSurvivalRate() && IsStableOrIncreasingSurvivalTrend()) {
   1588       limit *= 2;
   1589     }
   1590     intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
   1591     return Min(limit, halfway_to_the_max);
   1592   }
   1593 
   1594   // Implements the corresponding V8 API function.
   1595   bool IdleNotification(int hint);
   1596 
   1597   // Declare all the root indices.
   1598   enum RootListIndex {
   1599 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
   1600     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
   1601 #undef ROOT_INDEX_DECLARATION
   1602 
   1603 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
   1604     INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
   1605 #undef STRING_DECLARATION
   1606 
   1607     // Utility type maps
   1608 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
   1609     STRUCT_LIST(DECLARE_STRUCT_MAP)
   1610 #undef DECLARE_STRUCT_MAP
   1611 
   1612     kStringTableRootIndex,
   1613     kStrongRootListLength = kStringTableRootIndex,
   1614     kRootListLength
   1615   };
   1616 
   1617   STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
   1618   STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
   1619   STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
   1620   STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
   1621   STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
   1622 
   1623   // Generated code can embed direct references to non-writable roots if
   1624   // they are in new space.
   1625   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
   1626   // Generated code can treat direct references to this root as constant.
   1627   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
   1628 
   1629   MUST_USE_RESULT MaybeObject* NumberToString(
   1630       Object* number, bool check_number_string_cache = true,
   1631       PretenureFlag pretenure = NOT_TENURED);
   1632   MUST_USE_RESULT MaybeObject* Uint32ToString(
   1633       uint32_t value, bool check_number_string_cache = true);
   1634 
   1635   Map* MapForExternalArrayType(ExternalArrayType array_type);
   1636   RootListIndex RootIndexForExternalArrayType(
   1637       ExternalArrayType array_type);
   1638 
   1639   RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
   1640   ExternalArray* EmptyExternalArrayForMap(Map* map);
   1641 
   1642   void RecordStats(HeapStats* stats, bool take_snapshot = false);
   1643 
   1644   // Copy block of memory from src to dst. Size of block should be aligned
   1645   // by pointer size.
   1646   static inline void CopyBlock(Address dst, Address src, int byte_size);
   1647 
   1648   // Optimized version of memmove for blocks with pointer size aligned sizes and
   1649   // pointer size aligned addresses.
   1650   static inline void MoveBlock(Address dst, Address src, int byte_size);
   1651 
   1652   // Check new space expansion criteria and expand semispaces if it was hit.
   1653   void CheckNewSpaceExpansionCriteria();
   1654 
   1655   inline void IncrementYoungSurvivorsCounter(int survived) {
   1656     ASSERT(survived >= 0);
   1657     young_survivors_after_last_gc_ = survived;
   1658     survived_since_last_expansion_ += survived;
   1659   }
   1660 
   1661   inline bool NextGCIsLikelyToBeFull() {
   1662     if (FLAG_gc_global) return true;
   1663 
   1664     if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
   1665 
   1666     intptr_t adjusted_allocation_limit =
   1667         old_generation_allocation_limit_ - new_space_.Capacity();
   1668 
   1669     if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
   1670 
   1671     return false;
   1672   }
   1673 
   1674   void UpdateNewSpaceReferencesInExternalStringTable(
   1675       ExternalStringTableUpdaterCallback updater_func);
   1676 
   1677   void UpdateReferencesInExternalStringTable(
   1678       ExternalStringTableUpdaterCallback updater_func);
   1679 
   1680   void ProcessWeakReferences(WeakObjectRetainer* retainer);
   1681 
   1682   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
   1683 
   1684   // Helper function that governs the promotion policy from new space to
   1685   // old.  If the object's old address lies below the new space's age
   1686   // mark or if we've already filled the bottom 1/16th of the to space,
   1687   // we try to promote this object.
   1688   inline bool ShouldBePromoted(Address old_address, int object_size);
   1689 
   1690   void ClearJSFunctionResultCaches();
   1691 
   1692   void ClearNormalizedMapCaches();
   1693 
   1694   GCTracer* tracer() { return tracer_; }
   1695 
   1696   // Returns the size of objects residing in non new spaces.
   1697   intptr_t PromotedSpaceSizeOfObjects();
   1698 
   1699   double total_regexp_code_generated() { return total_regexp_code_generated_; }
   1700   void IncreaseTotalRegexpCodeGenerated(int size) {
   1701     total_regexp_code_generated_ += size;
   1702   }
   1703 
   1704   // Returns maximum GC pause.
   1705   double get_max_gc_pause() { return max_gc_pause_; }
   1706 
   1707   // Returns maximum size of objects alive after GC.
   1708   intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
   1709 
   1710   // Returns minimal interval between two subsequent collections.
   1711   double get_min_in_mutator() { return min_in_mutator_; }
   1712 
   1713   // TODO(hpayer): remove, should be handled by GCTracer
   1714   void AddMarkingTime(double marking_time) {
   1715     marking_time_ += marking_time;
   1716   }
   1717 
   1718   double marking_time() const {
   1719     return marking_time_;
   1720   }
   1721 
   1722   // TODO(hpayer): remove, should be handled by GCTracer
   1723   void AddSweepingTime(double sweeping_time) {
   1724     sweeping_time_ += sweeping_time;
   1725   }
   1726 
   1727   double sweeping_time() const {
   1728     return sweeping_time_;
   1729   }
   1730 
   1731   MarkCompactCollector* mark_compact_collector() {
   1732     return &mark_compact_collector_;
   1733   }
   1734 
   1735   StoreBuffer* store_buffer() {
   1736     return &store_buffer_;
   1737   }
   1738 
   1739   Marking* marking() {
   1740     return &marking_;
   1741   }
   1742 
   1743   IncrementalMarking* incremental_marking() {
   1744     return &incremental_marking_;
   1745   }
   1746 
   1747   bool IsSweepingComplete() {
   1748     return !mark_compact_collector()->IsConcurrentSweepingInProgress() &&
   1749            old_data_space()->IsLazySweepingComplete() &&
   1750            old_pointer_space()->IsLazySweepingComplete();
   1751   }
   1752 
   1753   bool AdvanceSweepers(int step_size) {
   1754     ASSERT(!FLAG_parallel_sweeping && !FLAG_concurrent_sweeping);
   1755     bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
   1756     sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
   1757     return sweeping_complete;
   1758   }
   1759 
   1760   bool EnsureSweepersProgressed(int step_size) {
   1761     bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size);
   1762     sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size);
   1763     return sweeping_complete;
   1764   }
   1765 
   1766   ExternalStringTable* external_string_table() {
   1767     return &external_string_table_;
   1768   }
   1769 
   1770   // Returns the current sweep generation.
   1771   int sweep_generation() {
   1772     return sweep_generation_;
   1773   }
   1774 
   1775   inline Isolate* isolate();
   1776 
   1777   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
   1778   void CallGCEpilogueCallbacks(GCType gc_type);
   1779 
   1780   inline bool OldGenerationAllocationLimitReached();
   1781 
   1782   inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
   1783     scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
   1784   }
   1785 
   1786   void QueueMemoryChunkForFree(MemoryChunk* chunk);
   1787   void FreeQueuedChunks();
   1788 
   1789   int gc_count() const { return gc_count_; }
   1790 
   1791   // Completely clear the Instanceof cache (to stop it keeping objects alive
   1792   // around a GC).
   1793   inline void CompletelyClearInstanceofCache();
   1794 
   1795   // The roots that have an index less than this are always in old space.
   1796   static const int kOldSpaceRoots = 0x20;
   1797 
   1798   uint32_t HashSeed() {
   1799     uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
   1800     ASSERT(FLAG_randomize_hashes || seed == 0);
   1801     return seed;
   1802   }
   1803 
   1804   void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
   1805     ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
   1806     set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
   1807   }
   1808 
   1809   void SetConstructStubDeoptPCOffset(int pc_offset) {
   1810     ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
   1811     set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1812   }
   1813 
   1814   void SetGetterStubDeoptPCOffset(int pc_offset) {
   1815     ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
   1816     set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1817   }
   1818 
   1819   void SetSetterStubDeoptPCOffset(int pc_offset) {
   1820     ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
   1821     set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
   1822   }
   1823 
   1824   // For post mortem debugging.
   1825   void RememberUnmappedPage(Address page, bool compacted);
   1826 
   1827   // Global inline caching age: it is incremented on some GCs after context
   1828   // disposal. We use it to flush inline caches.
   1829   int global_ic_age() {
   1830     return global_ic_age_;
   1831   }
   1832 
   1833   void AgeInlineCaches() {
   1834     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
   1835   }
   1836 
   1837   bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
   1838 
   1839   intptr_t amount_of_external_allocated_memory() {
   1840     return amount_of_external_allocated_memory_;
   1841   }
   1842 
   1843   // ObjectStats are kept in two arrays, counts and sizes. Related stats are
   1844   // stored in a contiguous linear buffer. Stats groups are stored one after
   1845   // another.
   1846   enum {
   1847     FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
   1848     FIRST_FIXED_ARRAY_SUB_TYPE =
   1849         FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
   1850     OBJECT_STATS_COUNT =
   1851         FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1
   1852   };
   1853 
   1854   void RecordObjectStats(InstanceType type, int sub_type, size_t size) {
   1855     ASSERT(type <= LAST_TYPE);
   1856     if (sub_type < 0) {
   1857       object_counts_[type]++;
   1858       object_sizes_[type] += size;
   1859     } else {
   1860       if (type == CODE_TYPE) {
   1861         ASSERT(sub_type < Code::NUMBER_OF_KINDS);
   1862         object_counts_[FIRST_CODE_KIND_SUB_TYPE + sub_type]++;
   1863         object_sizes_[FIRST_CODE_KIND_SUB_TYPE + sub_type] += size;
   1864       } else if (type == FIXED_ARRAY_TYPE) {
   1865         ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
   1866         object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++;
   1867         object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size;
   1868       }
   1869     }
   1870   }
   1871 
   1872   void CheckpointObjectStats();
   1873 
   1874   // We don't use a ScopedLock here since we want to lock the heap
   1875   // only when FLAG_parallel_recompilation is true.
   1876   class RelocationLock {
   1877    public:
   1878     explicit RelocationLock(Heap* heap);
   1879 
   1880     ~RelocationLock() {
   1881       if (FLAG_parallel_recompilation) {
   1882 #ifdef DEBUG
   1883         heap_->relocation_mutex_locked_by_optimizer_thread_ = false;
   1884 #endif  // DEBUG
   1885         heap_->relocation_mutex_->Unlock();
   1886       }
   1887     }
   1888 
   1889 #ifdef DEBUG
   1890     static bool IsLockedByOptimizerThread(Heap* heap) {
   1891       return heap->relocation_mutex_locked_by_optimizer_thread_;
   1892     }
   1893 #endif  // DEBUG
   1894 
   1895    private:
   1896     Heap* heap_;
   1897   };
   1898 
   1899  private:
   1900   Heap();
   1901 
   1902   // This can be calculated directly from a pointer to the heap; however, it is
   1903   // more expedient to get at the isolate directly from within Heap methods.
   1904   Isolate* isolate_;
   1905 
   1906   Object* roots_[kRootListLength];
   1907 
   1908   intptr_t code_range_size_;
   1909   int reserved_semispace_size_;
   1910   int max_semispace_size_;
   1911   int initial_semispace_size_;
   1912   intptr_t max_old_generation_size_;
   1913   intptr_t max_executable_size_;
   1914 
   1915   // For keeping track of how much data has survived
   1916   // scavenge since last new space expansion.
   1917   int survived_since_last_expansion_;
   1918 
   1919   // For keeping track on when to flush RegExp code.
   1920   int sweep_generation_;
   1921 
   1922   int always_allocate_scope_depth_;
   1923   int linear_allocation_scope_depth_;
   1924 
   1925   // For keeping track of context disposals.
   1926   int contexts_disposed_;
   1927 
   1928   int global_ic_age_;
   1929 
   1930   bool flush_monomorphic_ics_;
   1931 
   1932   int scan_on_scavenge_pages_;
   1933 
   1934   NewSpace new_space_;
   1935   OldSpace* old_pointer_space_;
   1936   OldSpace* old_data_space_;
   1937   OldSpace* code_space_;
   1938   MapSpace* map_space_;
   1939   CellSpace* cell_space_;
   1940   PropertyCellSpace* property_cell_space_;
   1941   LargeObjectSpace* lo_space_;
   1942   HeapState gc_state_;
   1943   int gc_post_processing_depth_;
   1944 
   1945   // Returns the amount of external memory registered since last global gc.
   1946   intptr_t PromotedExternalMemorySize();
   1947 
   1948   unsigned int ms_count_;  // how many mark-sweep collections happened
   1949   unsigned int gc_count_;  // how many gc happened
   1950 
   1951   // For post mortem debugging.
   1952   static const int kRememberedUnmappedPages = 128;
   1953   int remembered_unmapped_pages_index_;
   1954   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
   1955 
   1956   // Total length of the strings we failed to flatten since the last GC.
   1957   int unflattened_strings_length_;
   1958 
   1959 #define ROOT_ACCESSOR(type, name, camel_name)                                  \
   1960   inline void set_##name(type* value) {                                        \
   1961     /* The deserializer makes use of the fact that these common roots are */   \
   1962     /* never in new space and never on a page that is being compacted.    */   \
   1963     ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
   1964     roots_[k##camel_name##RootIndex] = value;                                  \
   1965   }
   1966   ROOT_LIST(ROOT_ACCESSOR)
   1967 #undef ROOT_ACCESSOR
   1968 
   1969 #ifdef DEBUG
   1970   // If the --gc-interval flag is set to a positive value, this
   1971   // variable holds the value indicating the number of allocations
   1972   // remain until the next failure and garbage collection.
   1973   int allocation_timeout_;
   1974 
   1975   // Do we expect to be able to handle allocation failure at this
   1976   // time?
   1977   bool disallow_allocation_failure_;
   1978 #endif  // DEBUG
   1979 
   1980   // Indicates that the new space should be kept small due to high promotion
   1981   // rates caused by the mutator allocating a lot of long-lived objects.
   1982   // TODO(hpayer): change to bool if no longer accessed from generated code
   1983   intptr_t new_space_high_promotion_mode_active_;
   1984 
   1985   // Limit that triggers a global GC on the next (normally caused) GC.  This
   1986   // is checked when we have already decided to do a GC to help determine
   1987   // which collector to invoke, before expanding a paged space in the old
   1988   // generation and on every allocation in large object space.
   1989   intptr_t old_generation_allocation_limit_;
   1990 
   1991   // Used to adjust the limits that control the timing of the next GC.
   1992   intptr_t size_of_old_gen_at_last_old_space_gc_;
   1993 
   1994   // Limit on the amount of externally allocated memory allowed
   1995   // between global GCs. If reached a global GC is forced.
   1996   intptr_t external_allocation_limit_;
   1997 
   1998   // The amount of external memory registered through the API kept alive
   1999   // by global handles
   2000   intptr_t amount_of_external_allocated_memory_;
   2001 
   2002   // Caches the amount of external memory registered at the last global gc.
   2003   intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
   2004 
   2005   // Indicates that an allocation has failed in the old generation since the
   2006   // last GC.
   2007   bool old_gen_exhausted_;
   2008 
   2009   // Weak list heads, threaded through the objects.
   2010   Object* native_contexts_list_;
   2011   Object* array_buffers_list_;
   2012   Object* allocation_sites_list_;
   2013 
   2014   StoreBufferRebuilder store_buffer_rebuilder_;
   2015 
   2016   struct StringTypeTable {
   2017     InstanceType type;
   2018     int size;
   2019     RootListIndex index;
   2020   };
   2021 
   2022   struct ConstantStringTable {
   2023     const char* contents;
   2024     RootListIndex index;
   2025   };
   2026 
   2027   struct StructTable {
   2028     InstanceType type;
   2029     int size;
   2030     RootListIndex index;
   2031   };
   2032 
   2033   static const StringTypeTable string_type_table[];
   2034   static const ConstantStringTable constant_string_table[];
   2035   static const StructTable struct_table[];
   2036 
   2037   // The special hidden string which is an empty string, but does not match
   2038   // any string when looked up in properties.
   2039   String* hidden_string_;
   2040 
   2041   // GC callback function, called before and after mark-compact GC.
   2042   // Allocations in the callback function are disallowed.
   2043   struct GCPrologueCallbackPair {
   2044     GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
   2045         : callback(callback), gc_type(gc_type) {
   2046     }
   2047     bool operator==(const GCPrologueCallbackPair& pair) const {
   2048       return pair.callback == callback;
   2049     }
   2050     GCPrologueCallback callback;
   2051     GCType gc_type;
   2052   };
   2053   List<GCPrologueCallbackPair> gc_prologue_callbacks_;
   2054 
   2055   struct GCEpilogueCallbackPair {
   2056     GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
   2057         : callback(callback), gc_type(gc_type) {
   2058     }
   2059     bool operator==(const GCEpilogueCallbackPair& pair) const {
   2060       return pair.callback == callback;
   2061     }
   2062     GCEpilogueCallback callback;
   2063     GCType gc_type;
   2064   };
   2065   List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
   2066 
   2067   GCCallback global_gc_prologue_callback_;
   2068   GCCallback global_gc_epilogue_callback_;
   2069 
   2070   // Support for computing object sizes during GC.
   2071   HeapObjectCallback gc_safe_size_of_old_object_;
   2072   static int GcSafeSizeOfOldObject(HeapObject* object);
   2073 
   2074   // Update the GC state. Called from the mark-compact collector.
   2075   void MarkMapPointersAsEncoded(bool encoded) {
   2076     ASSERT(!encoded);
   2077     gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
   2078   }
   2079 
   2080   // Checks whether a global GC is necessary
   2081   GarbageCollector SelectGarbageCollector(AllocationSpace space,
   2082                                           const char** reason);
   2083 
   2084   // Performs garbage collection
   2085   // Returns whether there is a chance another major GC could
   2086   // collect more garbage.
   2087   bool PerformGarbageCollection(GarbageCollector collector,
   2088                                 GCTracer* tracer);
   2089 
   2090   inline void UpdateOldSpaceLimits();
   2091 
   2092   // Allocate an uninitialized object in map space.  The behavior is identical
   2093   // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
   2094   // have to test the allocation space argument and (b) can reduce code size
   2095   // (since both AllocateRaw and AllocateRawMap are inlined).
   2096   MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
   2097 
   2098   // Allocate an uninitialized object in the simple cell space.
   2099   MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
   2100 
   2101   // Allocate an uninitialized object in the global property cell space.
   2102   MUST_USE_RESULT inline MaybeObject* AllocateRawPropertyCell();
   2103 
   2104   // Initializes a JSObject based on its map.
   2105   void InitializeJSObjectFromMap(JSObject* obj,
   2106                                  FixedArray* properties,
   2107                                  Map* map);
   2108 
   2109   bool CreateInitialMaps();
   2110   bool CreateInitialObjects();
   2111 
   2112   // These five Create*EntryStub functions are here and forced to not be inlined
   2113   // because of a gcc-4.4 bug that assigns wrong vtable entries.
   2114   NO_INLINE(void CreateJSEntryStub());
   2115   NO_INLINE(void CreateJSConstructEntryStub());
   2116 
   2117   void CreateFixedStubs();
   2118 
   2119   MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
   2120                                              Object* to_number,
   2121                                              byte kind);
   2122 
   2123   // Allocate a JSArray with no elements
   2124   MUST_USE_RESULT MaybeObject* AllocateJSArray(
   2125       ElementsKind elements_kind,
   2126       PretenureFlag pretenure = NOT_TENURED);
   2127 
   2128   MUST_USE_RESULT MaybeObject* AllocateJSArrayWithAllocationSite(
   2129       ElementsKind elements_kind,
   2130       Handle<AllocationSite> allocation_site);
   2131 
   2132   // Allocate empty fixed array.
   2133   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
   2134 
   2135   // Allocate empty external array of given type.
   2136   MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray(
   2137       ExternalArrayType array_type);
   2138 
   2139   // Allocate empty fixed double array.
   2140   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
   2141 
   2142   // Performs a minor collection in new generation.
   2143   void Scavenge();
   2144 
   2145   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
   2146       Heap* heap,
   2147       Object** pointer);
   2148 
   2149   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
   2150   static void ScavengeStoreBufferCallback(Heap* heap,
   2151                                           MemoryChunk* page,
   2152                                           StoreBufferEvent event);
   2153 
   2154   // Performs a major collection in the whole heap.
   2155   void MarkCompact(GCTracer* tracer);
   2156 
   2157   // Code to be run before and after mark-compact.
   2158   void MarkCompactPrologue();
   2159 
   2160   void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
   2161   void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
   2162   void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
   2163 
   2164   // Called on heap tear-down.
   2165   void TearDownArrayBuffers();
   2166 
   2167   // Record statistics before and after garbage collection.
   2168   void ReportStatisticsBeforeGC();
   2169   void ReportStatisticsAfterGC();
   2170 
   2171   // Slow part of scavenge object.
   2172   static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
   2173 
   2174   // Initializes a function with a shared part and prototype.
   2175   // Note: this code was factored out of AllocateFunction such that
   2176   // other parts of the VM could use it. Specifically, a function that creates
   2177   // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
   2178   // Please note this does not perform a garbage collection.
   2179   inline void InitializeFunction(
   2180       JSFunction* function,
   2181       SharedFunctionInfo* shared,
   2182       Object* prototype);
   2183 
   2184   // Total RegExp code ever generated
   2185   double total_regexp_code_generated_;
   2186 
   2187   GCTracer* tracer_;
   2188 
   2189   // Allocates a small number to string cache.
   2190   MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
   2191   // Creates and installs the full-sized number string cache.
   2192   void AllocateFullSizeNumberStringCache();
   2193   // Get the length of the number to string cache based on the max semispace
   2194   // size.
   2195   int FullSizeNumberStringCacheLength();
   2196   // Flush the number to string cache.
   2197   void FlushNumberStringCache();
   2198 
   2199   void UpdateSurvivalRateTrend(int start_new_space_size);
   2200 
   2201   enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
   2202 
   2203   static const int kYoungSurvivalRateHighThreshold = 90;
   2204   static const int kYoungSurvivalRateLowThreshold = 10;
   2205   static const int kYoungSurvivalRateAllowedDeviation = 15;
   2206 
   2207   int young_survivors_after_last_gc_;
   2208   int high_survival_rate_period_length_;
   2209   int low_survival_rate_period_length_;
   2210   double survival_rate_;
   2211   SurvivalRateTrend previous_survival_rate_trend_;
   2212   SurvivalRateTrend survival_rate_trend_;
   2213 
   2214   void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
   2215     ASSERT(survival_rate_trend != FLUCTUATING);
   2216     previous_survival_rate_trend_ = survival_rate_trend_;
   2217     survival_rate_trend_ = survival_rate_trend;
   2218   }
   2219 
   2220   SurvivalRateTrend survival_rate_trend() {
   2221     if (survival_rate_trend_ == STABLE) {
   2222       return STABLE;
   2223     } else if (previous_survival_rate_trend_ == STABLE) {
   2224       return survival_rate_trend_;
   2225     } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
   2226       return FLUCTUATING;
   2227     } else {
   2228       return survival_rate_trend_;
   2229     }
   2230   }
   2231 
   2232   bool IsStableOrIncreasingSurvivalTrend() {
   2233     switch (survival_rate_trend()) {
   2234       case STABLE:
   2235       case INCREASING:
   2236         return true;
   2237       default:
   2238         return false;
   2239     }
   2240   }
   2241 
   2242   bool IsStableOrDecreasingSurvivalTrend() {
   2243     switch (survival_rate_trend()) {
   2244       case STABLE:
   2245       case DECREASING:
   2246         return true;
   2247       default:
   2248         return false;
   2249     }
   2250   }
   2251 
   2252   bool IsIncreasingSurvivalTrend() {
   2253     return survival_rate_trend() == INCREASING;
   2254   }
   2255 
   2256   bool IsHighSurvivalRate() {
   2257     return high_survival_rate_period_length_ > 0;
   2258   }
   2259 
   2260   bool IsLowSurvivalRate() {
   2261     return low_survival_rate_period_length_ > 0;
   2262   }
   2263 
   2264   void SelectScavengingVisitorsTable();
   2265 
   2266   void StartIdleRound() {
   2267     mark_sweeps_since_idle_round_started_ = 0;
   2268   }
   2269 
   2270   void FinishIdleRound() {
   2271     mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
   2272     scavenges_since_last_idle_round_ = 0;
   2273   }
   2274 
   2275   bool EnoughGarbageSinceLastIdleRound() {
   2276     return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
   2277   }
   2278 
   2279   // Estimates how many milliseconds a Mark-Sweep would take to complete.
   2280   // In idle notification handler we assume that this function will return:
   2281   // - a number less than 10 for small heaps, which are less than 8Mb.
   2282   // - a number greater than 10 for large heaps, which are greater than 32Mb.
   2283   int TimeMarkSweepWouldTakeInMs() {
   2284     // Rough estimate of how many megabytes of heap can be processed in 1 ms.
   2285     static const int kMbPerMs = 2;
   2286 
   2287     int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
   2288     return heap_size_mb / kMbPerMs;
   2289   }
   2290 
   2291   // Returns true if no more GC work is left.
   2292   bool IdleGlobalGC();
   2293 
   2294   void AdvanceIdleIncrementalMarking(intptr_t step_size);
   2295 
   2296   void ClearObjectStats(bool clear_last_time_stats = false);
   2297 
   2298   static const int kInitialStringTableSize = 2048;
   2299   static const int kInitialEvalCacheSize = 64;
   2300   static const int kInitialNumberStringCacheSize = 256;
   2301 
   2302   // Object counts and used memory by InstanceType
   2303   size_t object_counts_[OBJECT_STATS_COUNT];
   2304   size_t object_counts_last_time_[OBJECT_STATS_COUNT];
   2305   size_t object_sizes_[OBJECT_STATS_COUNT];
   2306   size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
   2307 
   2308   // Maximum GC pause.
   2309   double max_gc_pause_;
   2310 
   2311   // Total time spent in GC.
   2312   double total_gc_time_ms_;
   2313 
   2314   // Maximum size of objects alive after GC.
   2315   intptr_t max_alive_after_gc_;
   2316 
   2317   // Minimal interval between two subsequent collections.
   2318   double min_in_mutator_;
   2319 
   2320   // Size of objects alive after last GC.
   2321   intptr_t alive_after_last_gc_;
   2322 
   2323   double last_gc_end_timestamp_;
   2324 
   2325   // Cumulative GC time spent in marking
   2326   double marking_time_;
   2327 
   2328   // Cumulative GC time spent in sweeping
   2329   double sweeping_time_;
   2330 
   2331   MarkCompactCollector mark_compact_collector_;
   2332 
   2333   StoreBuffer store_buffer_;
   2334 
   2335   Marking marking_;
   2336 
   2337   IncrementalMarking incremental_marking_;
   2338 
   2339   int number_idle_notifications_;
   2340   unsigned int last_idle_notification_gc_count_;
   2341   bool last_idle_notification_gc_count_init_;
   2342 
   2343   int mark_sweeps_since_idle_round_started_;
   2344   unsigned int gc_count_at_last_idle_gc_;
   2345   int scavenges_since_last_idle_round_;
   2346 
   2347   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
   2348   // this variable holds the number of garbage collections since the last
   2349   // deoptimization triggered by garbage collection.
   2350   int gcs_since_last_deopt_;
   2351 
   2352 #ifdef VERIFY_HEAP
   2353   int no_weak_embedded_maps_verification_scope_depth_;
   2354 #endif
   2355 
   2356   static const int kMaxMarkSweepsInIdleRound = 7;
   2357   static const int kIdleScavengeThreshold = 5;
   2358 
   2359   // Shared state read by the scavenge collector and set by ScavengeObject.
   2360   PromotionQueue promotion_queue_;
   2361 
   2362   // Flag is set when the heap has been configured.  The heap can be repeatedly
   2363   // configured through the API until it is set up.
   2364   bool configured_;
   2365 
   2366   ExternalStringTable external_string_table_;
   2367 
   2368   VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
   2369 
   2370   MemoryChunk* chunks_queued_for_free_;
   2371 
   2372   Mutex* relocation_mutex_;
   2373 #ifdef DEBUG
   2374   bool relocation_mutex_locked_by_optimizer_thread_;
   2375 #endif  // DEBUG;
   2376 
   2377   friend class Factory;
   2378   friend class GCTracer;
   2379   friend class DisallowAllocationFailure;
   2380   friend class AlwaysAllocateScope;
   2381   friend class Page;
   2382   friend class Isolate;
   2383   friend class MarkCompactCollector;
   2384   friend class MarkCompactMarkingVisitor;
   2385   friend class MapCompact;
   2386 #ifdef VERIFY_HEAP
   2387   friend class NoWeakEmbeddedMapsVerificationScope;
   2388 #endif
   2389 
   2390   DISALLOW_COPY_AND_ASSIGN(Heap);
   2391 };
   2392 
   2393 
   2394 class HeapStats {
   2395  public:
   2396   static const int kStartMarker = 0xDECADE00;
   2397   static const int kEndMarker = 0xDECADE01;
   2398 
   2399   int* start_marker;                    //  0
   2400   int* new_space_size;                  //  1
   2401   int* new_space_capacity;              //  2
   2402   intptr_t* old_pointer_space_size;          //  3
   2403   intptr_t* old_pointer_space_capacity;      //  4
   2404   intptr_t* old_data_space_size;             //  5
   2405   intptr_t* old_data_space_capacity;         //  6
   2406   intptr_t* code_space_size;                 //  7
   2407   intptr_t* code_space_capacity;             //  8
   2408   intptr_t* map_space_size;                  //  9
   2409   intptr_t* map_space_capacity;              // 10
   2410   intptr_t* cell_space_size;                 // 11
   2411   intptr_t* cell_space_capacity;             // 12
   2412   intptr_t* lo_space_size;                   // 13
   2413   int* global_handle_count;             // 14
   2414   int* weak_global_handle_count;        // 15
   2415   int* pending_global_handle_count;     // 16
   2416   int* near_death_global_handle_count;  // 17
   2417   int* free_global_handle_count;        // 18
   2418   intptr_t* memory_allocator_size;           // 19
   2419   intptr_t* memory_allocator_capacity;       // 20
   2420   int* objects_per_type;                // 21
   2421   int* size_per_type;                   // 22
   2422   int* os_error;                        // 23
   2423   int* end_marker;                      // 24
   2424   intptr_t* property_cell_space_size;   // 25
   2425   intptr_t* property_cell_space_capacity;    // 26
   2426 };
   2427 
   2428 
   2429 class DisallowAllocationFailure {
   2430  public:
   2431   inline DisallowAllocationFailure();
   2432   inline ~DisallowAllocationFailure();
   2433 
   2434 #ifdef DEBUG
   2435  private:
   2436   bool old_state_;
   2437 #endif
   2438 };
   2439 
   2440 
   2441 class AlwaysAllocateScope {
   2442  public:
   2443   inline AlwaysAllocateScope();
   2444   inline ~AlwaysAllocateScope();
   2445 
   2446  private:
   2447   // Implicitly disable artificial allocation failures.
   2448   DisallowAllocationFailure disallow_allocation_failure_;
   2449 };
   2450 
   2451 #ifdef VERIFY_HEAP
   2452 class NoWeakEmbeddedMapsVerificationScope {
   2453  public:
   2454   inline NoWeakEmbeddedMapsVerificationScope();
   2455   inline ~NoWeakEmbeddedMapsVerificationScope();
   2456 };
   2457 #endif
   2458 
   2459 
   2460 // Visitor class to verify interior pointers in spaces that do not contain
   2461 // or care about intergenerational references. All heap object pointers have to
   2462 // point into the heap to a location that has a map pointer at its first word.
   2463 // Caveat: Heap::Contains is an approximation because it can return true for
   2464 // objects in a heap space but above the allocation pointer.
   2465 class VerifyPointersVisitor: public ObjectVisitor {
   2466  public:
   2467   inline void VisitPointers(Object** start, Object** end);
   2468 };
   2469 
   2470 
   2471 // Space iterator for iterating over all spaces of the heap.  Returns each space
   2472 // in turn, and null when it is done.
   2473 class AllSpaces BASE_EMBEDDED {
   2474  public:
   2475   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
   2476   Space* next();
   2477  private:
   2478   Heap* heap_;
   2479   int counter_;
   2480 };
   2481 
   2482 
   2483 // Space iterator for iterating over all old spaces of the heap: Old pointer
   2484 // space, old data space and code space.  Returns each space in turn, and null
   2485 // when it is done.
   2486 class OldSpaces BASE_EMBEDDED {
   2487  public:
   2488   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   2489   OldSpace* next();
   2490  private:
   2491   Heap* heap_;
   2492   int counter_;
   2493 };
   2494 
   2495 
   2496 // Space iterator for iterating over all the paged spaces of the heap: Map
   2497 // space, old pointer space, old data space, code space and cell space.  Returns
   2498 // each space in turn, and null when it is done.
   2499 class PagedSpaces BASE_EMBEDDED {
   2500  public:
   2501   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   2502   PagedSpace* next();
   2503  private:
   2504   Heap* heap_;
   2505   int counter_;
   2506 };
   2507 
   2508 
   2509 // Space iterator for iterating over all spaces of the heap.
   2510 // For each space an object iterator is provided. The deallocation of the
   2511 // returned object iterators is handled by the space iterator.
   2512 class SpaceIterator : public Malloced {
   2513  public:
   2514   explicit SpaceIterator(Heap* heap);
   2515   SpaceIterator(Heap* heap, HeapObjectCallback size_func);
   2516   virtual ~SpaceIterator();
   2517 
   2518   bool has_next();
   2519   ObjectIterator* next();
   2520 
   2521  private:
   2522   ObjectIterator* CreateIterator();
   2523 
   2524   Heap* heap_;
   2525   int current_space_;  // from enum AllocationSpace.
   2526   ObjectIterator* iterator_;  // object iterator for the current space.
   2527   HeapObjectCallback size_func_;
   2528 };
   2529 
   2530 
   2531 // A HeapIterator provides iteration over the whole heap. It
   2532 // aggregates the specific iterators for the different spaces as
   2533 // these can only iterate over one space only.
   2534 //
   2535 // HeapIterator can skip free list nodes (that is, de-allocated heap
   2536 // objects that still remain in the heap). As implementation of free
   2537 // nodes filtering uses GC marks, it can't be used during MS/MC GC
   2538 // phases. Also, it is forbidden to interrupt iteration in this mode,
   2539 // as this will leave heap objects marked (and thus, unusable).
   2540 class HeapObjectsFilter;
   2541 
   2542 class HeapIterator BASE_EMBEDDED {
   2543  public:
   2544   enum HeapObjectsFiltering {
   2545     kNoFiltering,
   2546     kFilterUnreachable
   2547   };
   2548 
   2549   explicit HeapIterator(Heap* heap);
   2550   HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
   2551   ~HeapIterator();
   2552 
   2553   HeapObject* next();
   2554   void reset();
   2555 
   2556  private:
   2557   // Perform the initialization.
   2558   void Init();
   2559   // Perform all necessary shutdown (destruction) work.
   2560   void Shutdown();
   2561   HeapObject* NextObject();
   2562 
   2563   Heap* heap_;
   2564   HeapObjectsFiltering filtering_;
   2565   HeapObjectsFilter* filter_;
   2566   // Space iterator for iterating all the spaces.
   2567   SpaceIterator* space_iterator_;
   2568   // Object iterator for the space currently being iterated.
   2569   ObjectIterator* object_iterator_;
   2570 };
   2571 
   2572 
   2573 // Cache for mapping (map, property name) into field offset.
   2574 // Cleared at startup and prior to mark sweep collection.
   2575 class KeyedLookupCache {
   2576  public:
   2577   // Lookup field offset for (map, name). If absent, -1 is returned.
   2578   int Lookup(Map* map, Name* name);
   2579 
   2580   // Update an element in the cache.
   2581   void Update(Map* map, Name* name, int field_offset);
   2582 
   2583   // Clear the cache.
   2584   void Clear();
   2585 
   2586   static const int kLength = 256;
   2587   static const int kCapacityMask = kLength - 1;
   2588   static const int kMapHashShift = 5;
   2589   static const int kHashMask = -4;  // Zero the last two bits.
   2590   static const int kEntriesPerBucket = 4;
   2591   static const int kNotFound = -1;
   2592 
   2593   // kEntriesPerBucket should be a power of 2.
   2594   STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
   2595   STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
   2596 
   2597  private:
   2598   KeyedLookupCache() {
   2599     for (int i = 0; i < kLength; ++i) {
   2600       keys_[i].map = NULL;
   2601       keys_[i].name = NULL;
   2602       field_offsets_[i] = kNotFound;
   2603     }
   2604   }
   2605 
   2606   static inline int Hash(Map* map, Name* name);
   2607 
   2608   // Get the address of the keys and field_offsets arrays.  Used in
   2609   // generated code to perform cache lookups.
   2610   Address keys_address() {
   2611     return reinterpret_cast<Address>(&keys_);
   2612   }
   2613 
   2614   Address field_offsets_address() {
   2615     return reinterpret_cast<Address>(&field_offsets_);
   2616   }
   2617 
   2618   struct Key {
   2619     Map* map;
   2620     Name* name;
   2621   };
   2622 
   2623   Key keys_[kLength];
   2624   int field_offsets_[kLength];
   2625 
   2626   friend class ExternalReference;
   2627   friend class Isolate;
   2628   DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
   2629 };
   2630 
   2631 
   2632 // Cache for mapping (map, property name) into descriptor index.
   2633 // The cache contains both positive and negative results.
   2634 // Descriptor index equals kNotFound means the property is absent.
   2635 // Cleared at startup and prior to any gc.
   2636 class DescriptorLookupCache {
   2637  public:
   2638   // Lookup descriptor index for (map, name).
   2639   // If absent, kAbsent is returned.
   2640   int Lookup(Map* source, Name* name) {
   2641     if (!name->IsUniqueName()) return kAbsent;
   2642     int index = Hash(source, name);
   2643     Key& key = keys_[index];
   2644     if ((key.source == source) && (key.name == name)) return results_[index];
   2645     return kAbsent;
   2646   }
   2647 
   2648   // Update an element in the cache.
   2649   void Update(Map* source, Name* name, int result) {
   2650     ASSERT(result != kAbsent);
   2651     if (name->IsUniqueName()) {
   2652       int index = Hash(source, name);
   2653       Key& key = keys_[index];
   2654       key.source = source;
   2655       key.name = name;
   2656       results_[index] = result;
   2657     }
   2658   }
   2659 
   2660   // Clear the cache.
   2661   void Clear();
   2662 
   2663   static const int kAbsent = -2;
   2664 
   2665  private:
   2666   DescriptorLookupCache() {
   2667     for (int i = 0; i < kLength; ++i) {
   2668       keys_[i].source = NULL;
   2669       keys_[i].name = NULL;
   2670       results_[i] = kAbsent;
   2671     }
   2672   }
   2673 
   2674   static int Hash(Object* source, Name* name) {
   2675     // Uses only lower 32 bits if pointers are larger.
   2676     uint32_t source_hash =
   2677         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
   2678             >> kPointerSizeLog2;
   2679     uint32_t name_hash =
   2680         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
   2681             >> kPointerSizeLog2;
   2682     return (source_hash ^ name_hash) % kLength;
   2683   }
   2684 
   2685   static const int kLength = 64;
   2686   struct Key {
   2687     Map* source;
   2688     Name* name;
   2689   };
   2690 
   2691   Key keys_[kLength];
   2692   int results_[kLength];
   2693 
   2694   friend class Isolate;
   2695   DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
   2696 };
   2697 
   2698 
   2699 // GCTracer collects and prints ONE line after each garbage collector
   2700 // invocation IFF --trace_gc is used.
   2701 
   2702 class GCTracer BASE_EMBEDDED {
   2703  public:
   2704   class Scope BASE_EMBEDDED {
   2705    public:
   2706     enum ScopeId {
   2707       EXTERNAL,
   2708       MC_MARK,
   2709       MC_SWEEP,
   2710       MC_SWEEP_NEWSPACE,
   2711       MC_EVACUATE_PAGES,
   2712       MC_UPDATE_NEW_TO_NEW_POINTERS,
   2713       MC_UPDATE_ROOT_TO_NEW_POINTERS,
   2714       MC_UPDATE_OLD_TO_NEW_POINTERS,
   2715       MC_UPDATE_POINTERS_TO_EVACUATED,
   2716       MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
   2717       MC_UPDATE_MISC_POINTERS,
   2718       MC_WEAKCOLLECTION_PROCESS,
   2719       MC_WEAKCOLLECTION_CLEAR,
   2720       MC_FLUSH_CODE,
   2721       kNumberOfScopes
   2722     };
   2723 
   2724     Scope(GCTracer* tracer, ScopeId scope)
   2725         : tracer_(tracer),
   2726         scope_(scope) {
   2727       start_time_ = OS::TimeCurrentMillis();
   2728     }
   2729 
   2730     ~Scope() {
   2731       ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
   2732       tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
   2733     }
   2734 
   2735    private:
   2736     GCTracer* tracer_;
   2737     ScopeId scope_;
   2738     double start_time_;
   2739   };
   2740 
   2741   explicit GCTracer(Heap* heap,
   2742                     const char* gc_reason,
   2743                     const char* collector_reason);
   2744   ~GCTracer();
   2745 
   2746   // Sets the collector.
   2747   void set_collector(GarbageCollector collector) { collector_ = collector; }
   2748 
   2749   // Sets the GC count.
   2750   void set_gc_count(unsigned int count) { gc_count_ = count; }
   2751 
   2752   // Sets the full GC count.
   2753   void set_full_gc_count(int count) { full_gc_count_ = count; }
   2754 
   2755   void increment_promoted_objects_size(int object_size) {
   2756     promoted_objects_size_ += object_size;
   2757   }
   2758 
   2759   void increment_nodes_died_in_new_space() {
   2760     nodes_died_in_new_space_++;
   2761   }
   2762 
   2763   void increment_nodes_copied_in_new_space() {
   2764     nodes_copied_in_new_space_++;
   2765   }
   2766 
   2767   void increment_nodes_promoted() {
   2768     nodes_promoted_++;
   2769   }
   2770 
   2771  private:
   2772   // Returns a string matching the collector.
   2773   const char* CollectorString();
   2774 
   2775   // Returns size of object in heap (in MB).
   2776   inline double SizeOfHeapObjects();
   2777 
   2778   // Timestamp set in the constructor.
   2779   double start_time_;
   2780 
   2781   // Size of objects in heap set in constructor.
   2782   intptr_t start_object_size_;
   2783 
   2784   // Size of memory allocated from OS set in constructor.
   2785   intptr_t start_memory_size_;
   2786 
   2787   // Type of collector.
   2788   GarbageCollector collector_;
   2789 
   2790   // A count (including this one, e.g. the first collection is 1) of the
   2791   // number of garbage collections.
   2792   unsigned int gc_count_;
   2793 
   2794   // A count (including this one) of the number of full garbage collections.
   2795   int full_gc_count_;
   2796 
   2797   // Amounts of time spent in different scopes during GC.
   2798   double scopes_[Scope::kNumberOfScopes];
   2799 
   2800   // Total amount of space either wasted or contained in one of free lists
   2801   // before the current GC.
   2802   intptr_t in_free_list_or_wasted_before_gc_;
   2803 
   2804   // Difference between space used in the heap at the beginning of the current
   2805   // collection and the end of the previous collection.
   2806   intptr_t allocated_since_last_gc_;
   2807 
   2808   // Amount of time spent in mutator that is time elapsed between end of the
   2809   // previous collection and the beginning of the current one.
   2810   double spent_in_mutator_;
   2811 
   2812   // Size of objects promoted during the current collection.
   2813   intptr_t promoted_objects_size_;
   2814 
   2815   // Number of died nodes in the new space.
   2816   int nodes_died_in_new_space_;
   2817 
   2818   // Number of copied nodes to the new space.
   2819   int nodes_copied_in_new_space_;
   2820 
   2821   // Number of promoted nodes to the old space.
   2822   int nodes_promoted_;
   2823 
   2824   // Incremental marking steps counters.
   2825   int steps_count_;
   2826   double steps_took_;
   2827   double longest_step_;
   2828   int steps_count_since_last_gc_;
   2829   double steps_took_since_last_gc_;
   2830 
   2831   Heap* heap_;
   2832 
   2833   const char* gc_reason_;
   2834   const char* collector_reason_;
   2835 };
   2836 
   2837 
   2838 class RegExpResultsCache {
   2839  public:
   2840   enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
   2841 
   2842   // Attempt to retrieve a cached result.  On failure, 0 is returned as a Smi.
   2843   // On success, the returned result is guaranteed to be a COW-array.
   2844   static Object* Lookup(Heap* heap,
   2845                         String* key_string,
   2846                         Object* key_pattern,
   2847                         ResultsCacheType type);
   2848   // Attempt to add value_array to the cache specified by type.  On success,
   2849   // value_array is turned into a COW-array.
   2850   static void Enter(Heap* heap,
   2851                     String* key_string,
   2852                     Object* key_pattern,
   2853                     FixedArray* value_array,
   2854                     ResultsCacheType type);
   2855   static void Clear(FixedArray* cache);
   2856   static const int kRegExpResultsCacheSize = 0x100;
   2857 
   2858  private:
   2859   static const int kArrayEntriesPerCacheEntry = 4;
   2860   static const int kStringOffset = 0;
   2861   static const int kPatternOffset = 1;
   2862   static const int kArrayOffset = 2;
   2863 };
   2864 
   2865 
   2866 class TranscendentalCache {
   2867  public:
   2868   enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
   2869   static const int kTranscendentalTypeBits = 3;
   2870   STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
   2871 
   2872   // Returns a heap number with f(input), where f is a math function specified
   2873   // by the 'type' argument.
   2874   MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
   2875 
   2876   // The cache contains raw Object pointers.  This method disposes of
   2877   // them before a garbage collection.
   2878   void Clear();
   2879 
   2880  private:
   2881   class SubCache {
   2882     static const int kCacheSize = 512;
   2883 
   2884     explicit SubCache(Type t);
   2885 
   2886     MUST_USE_RESULT inline MaybeObject* Get(double input);
   2887 
   2888     inline double Calculate(double input);
   2889 
   2890     struct Element {
   2891       uint32_t in[2];
   2892       Object* output;
   2893     };
   2894 
   2895     union Converter {
   2896       double dbl;
   2897       uint32_t integers[2];
   2898     };
   2899 
   2900     inline static int Hash(const Converter& c) {
   2901       uint32_t hash = (c.integers[0] ^ c.integers[1]);
   2902       hash ^= static_cast<int32_t>(hash) >> 16;
   2903       hash ^= static_cast<int32_t>(hash) >> 8;
   2904       return (hash & (kCacheSize - 1));
   2905     }
   2906 
   2907     Element elements_[kCacheSize];
   2908     Type type_;
   2909     Isolate* isolate_;
   2910 
   2911     // Allow access to the caches_ array as an ExternalReference.
   2912     friend class ExternalReference;
   2913     // Inline implementation of the cache.
   2914     friend class TranscendentalCacheStub;
   2915     // For evaluating value.
   2916     friend class TranscendentalCache;
   2917 
   2918     DISALLOW_COPY_AND_ASSIGN(SubCache);
   2919   };
   2920 
   2921   TranscendentalCache() {
   2922     for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
   2923   }
   2924 
   2925   ~TranscendentalCache() {
   2926     for (int i = 0; i < kNumberOfCaches; ++i) delete caches_[i];
   2927   }
   2928 
   2929   // Used to create an external reference.
   2930   inline Address cache_array_address();
   2931 
   2932   // Instantiation
   2933   friend class Isolate;
   2934   // Inline implementation of the caching.
   2935   friend class TranscendentalCacheStub;
   2936   // Allow access to the caches_ array as an ExternalReference.
   2937   friend class ExternalReference;
   2938 
   2939   SubCache* caches_[kNumberOfCaches];
   2940   DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
   2941 };
   2942 
   2943 
   2944 // Abstract base class for checking whether a weak object should be retained.
   2945 class WeakObjectRetainer {
   2946  public:
   2947   virtual ~WeakObjectRetainer() {}
   2948 
   2949   // Return whether this object should be retained. If NULL is returned the
   2950   // object has no references. Otherwise the address of the retained object
   2951   // should be returned as in some GC situations the object has been moved.
   2952   virtual Object* RetainAs(Object* object) = 0;
   2953 };
   2954 
   2955 
   2956 // Intrusive object marking uses least significant bit of
   2957 // heap object's map word to mark objects.
   2958 // Normally all map words have least significant bit set
   2959 // because they contain tagged map pointer.
   2960 // If the bit is not set object is marked.
   2961 // All objects should be unmarked before resuming
   2962 // JavaScript execution.
   2963 class IntrusiveMarking {
   2964  public:
   2965   static bool IsMarked(HeapObject* object) {
   2966     return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
   2967   }
   2968 
   2969   static void ClearMark(HeapObject* object) {
   2970     uintptr_t map_word = object->map_word().ToRawValue();
   2971     object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
   2972     ASSERT(!IsMarked(object));
   2973   }
   2974 
   2975   static void SetMark(HeapObject* object) {
   2976     uintptr_t map_word = object->map_word().ToRawValue();
   2977     object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
   2978     ASSERT(IsMarked(object));
   2979   }
   2980 
   2981   static Map* MapOfMarkedObject(HeapObject* object) {
   2982     uintptr_t map_word = object->map_word().ToRawValue();
   2983     return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
   2984   }
   2985 
   2986   static int SizeOfMarkedObject(HeapObject* object) {
   2987     return object->SizeFromMap(MapOfMarkedObject(object));
   2988   }
   2989 
   2990  private:
   2991   static const uintptr_t kNotMarkedBit = 0x1;
   2992   STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
   2993 };
   2994 
   2995 
   2996 #ifdef DEBUG
   2997 // Helper class for tracing paths to a search target Object from all roots.
   2998 // The TracePathFrom() method can be used to trace paths from a specific
   2999 // object to the search target object.
   3000 class PathTracer : public ObjectVisitor {
   3001  public:
   3002   enum WhatToFind {
   3003     FIND_ALL,   // Will find all matches.
   3004     FIND_FIRST  // Will stop the search after first match.
   3005   };
   3006 
   3007   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
   3008   // after the first match.  If FIND_ALL is specified, then tracing will be
   3009   // done for all matches.
   3010   PathTracer(Object* search_target,
   3011              WhatToFind what_to_find,
   3012              VisitMode visit_mode)
   3013       : search_target_(search_target),
   3014         found_target_(false),
   3015         found_target_in_trace_(false),
   3016         what_to_find_(what_to_find),
   3017         visit_mode_(visit_mode),
   3018         object_stack_(20),
   3019         no_allocation() {}
   3020 
   3021   virtual void VisitPointers(Object** start, Object** end);
   3022 
   3023   void Reset();
   3024   void TracePathFrom(Object** root);
   3025 
   3026   bool found() const { return found_target_; }
   3027 
   3028   static Object* const kAnyGlobalObject;
   3029 
   3030  protected:
   3031   class MarkVisitor;
   3032   class UnmarkVisitor;
   3033 
   3034   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
   3035   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
   3036   virtual void ProcessResults();
   3037 
   3038   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
   3039   static const int kMarkTag = 2;
   3040 
   3041   Object* search_target_;
   3042   bool found_target_;
   3043   bool found_target_in_trace_;
   3044   WhatToFind what_to_find_;
   3045   VisitMode visit_mode_;
   3046   List<Object*> object_stack_;
   3047 
   3048   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
   3049 
   3050  private:
   3051   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
   3052 };
   3053 #endif  // DEBUG
   3054 
   3055 } }  // namespace v8::internal
   3056 
   3057 #endif  // V8_HEAP_H_
   3058