Home | History | Annotate | Download | only in heap
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include <stdlib.h>
     29 #include <utility>
     30 
     31 #include "src/compilation-cache.h"
     32 #include "src/context-measure.h"
     33 #include "src/deoptimizer.h"
     34 #include "src/elements.h"
     35 #include "src/execution.h"
     36 #include "src/factory.h"
     37 #include "src/field-type.h"
     38 #include "src/global-handles.h"
     39 #include "src/heap/gc-tracer.h"
     40 #include "src/heap/memory-reducer.h"
     41 #include "src/ic/ic.h"
     42 #include "src/macro-assembler.h"
     43 #include "src/regexp/jsregexp.h"
     44 #include "src/snapshot/snapshot.h"
     45 #include "test/cctest/cctest.h"
     46 #include "test/cctest/heap/heap-tester.h"
     47 #include "test/cctest/heap/heap-utils.h"
     48 #include "test/cctest/test-feedback-vector.h"
     49 
     50 
     51 namespace v8 {
     52 namespace internal {
     53 
     54 static void CheckMap(Map* map, int type, int instance_size) {
     55   CHECK(map->IsHeapObject());
     56 #ifdef DEBUG
     57   CHECK(CcTest::heap()->Contains(map));
     58 #endif
     59   CHECK_EQ(CcTest::heap()->meta_map(), map->map());
     60   CHECK_EQ(type, map->instance_type());
     61   CHECK_EQ(instance_size, map->instance_size());
     62 }
     63 
     64 
     65 TEST(HeapMaps) {
     66   CcTest::InitializeVM();
     67   Heap* heap = CcTest::heap();
     68   CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
     69   CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
     70 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
     71   CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
     72   SIMD128_TYPES(SIMD128_TYPE)
     73 #undef SIMD128_TYPE
     74   CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
     75   CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
     76 }
     77 
     78 
     79 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
     80   CHECK(obj->IsOddball());
     81   Handle<Object> handle(obj, isolate);
     82   Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
     83   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
     84 }
     85 
     86 
     87 static void CheckSmi(Isolate* isolate, int value, const char* string) {
     88   Handle<Object> handle(Smi::FromInt(value), isolate);
     89   Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
     90   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
     91 }
     92 
     93 
     94 static void CheckNumber(Isolate* isolate, double value, const char* string) {
     95   Handle<Object> number = isolate->factory()->NewNumber(value);
     96   CHECK(number->IsNumber());
     97   Handle<Object> print_string =
     98       Object::ToString(isolate, number).ToHandleChecked();
     99   CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
    100 }
    101 
    102 
    103 static void CheckFindCodeObject(Isolate* isolate) {
    104   // Test FindCodeObject
    105 #define __ assm.
    106 
    107   Assembler assm(isolate, NULL, 0);
    108 
    109   __ nop();  // supported on all architectures
    110 
    111   CodeDesc desc;
    112   assm.GetCode(&desc);
    113   Handle<Code> code = isolate->factory()->NewCode(
    114       desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
    115   CHECK(code->IsCode());
    116 
    117   HeapObject* obj = HeapObject::cast(*code);
    118   Address obj_addr = obj->address();
    119 
    120   for (int i = 0; i < obj->Size(); i += kPointerSize) {
    121     Object* found = isolate->FindCodeObject(obj_addr + i);
    122     CHECK_EQ(*code, found);
    123   }
    124 
    125   Handle<Code> copy = isolate->factory()->NewCode(
    126       desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
    127   HeapObject* obj_copy = HeapObject::cast(*copy);
    128   Object* not_right = isolate->FindCodeObject(obj_copy->address() +
    129                                               obj_copy->Size() / 2);
    130   CHECK(not_right != *code);
    131 }
    132 
    133 
    134 TEST(HandleNull) {
    135   CcTest::InitializeVM();
    136   Isolate* isolate = CcTest::i_isolate();
    137   HandleScope outer_scope(isolate);
    138   LocalContext context;
    139   Handle<Object> n(static_cast<Object*>(nullptr), isolate);
    140   CHECK(!n.is_null());
    141 }
    142 
    143 
    144 TEST(HeapObjects) {
    145   CcTest::InitializeVM();
    146   Isolate* isolate = CcTest::i_isolate();
    147   Factory* factory = isolate->factory();
    148   Heap* heap = isolate->heap();
    149 
    150   HandleScope sc(isolate);
    151   Handle<Object> value = factory->NewNumber(1.000123);
    152   CHECK(value->IsHeapNumber());
    153   CHECK(value->IsNumber());
    154   CHECK_EQ(1.000123, value->Number());
    155 
    156   value = factory->NewNumber(1.0);
    157   CHECK(value->IsSmi());
    158   CHECK(value->IsNumber());
    159   CHECK_EQ(1.0, value->Number());
    160 
    161   value = factory->NewNumberFromInt(1024);
    162   CHECK(value->IsSmi());
    163   CHECK(value->IsNumber());
    164   CHECK_EQ(1024.0, value->Number());
    165 
    166   value = factory->NewNumberFromInt(Smi::kMinValue);
    167   CHECK(value->IsSmi());
    168   CHECK(value->IsNumber());
    169   CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
    170 
    171   value = factory->NewNumberFromInt(Smi::kMaxValue);
    172   CHECK(value->IsSmi());
    173   CHECK(value->IsNumber());
    174   CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
    175 
    176 #if !defined(V8_TARGET_ARCH_64_BIT)
    177   // TODO(lrn): We need a NumberFromIntptr function in order to test this.
    178   value = factory->NewNumberFromInt(Smi::kMinValue - 1);
    179   CHECK(value->IsHeapNumber());
    180   CHECK(value->IsNumber());
    181   CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
    182 #endif
    183 
    184   value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
    185   CHECK(value->IsHeapNumber());
    186   CHECK(value->IsNumber());
    187   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
    188            value->Number());
    189 
    190   value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
    191   CHECK(value->IsHeapNumber());
    192   CHECK(value->IsNumber());
    193   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
    194            value->Number());
    195 
    196   // nan oddball checks
    197   CHECK(factory->nan_value()->IsNumber());
    198   CHECK(std::isnan(factory->nan_value()->Number()));
    199 
    200   Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
    201   CHECK(s->IsString());
    202   CHECK_EQ(10, s->length());
    203 
    204   Handle<String> object_string = Handle<String>::cast(factory->Object_string());
    205   Handle<JSGlobalObject> global(
    206       CcTest::i_isolate()->context()->global_object());
    207   CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
    208 
    209   // Check ToString for oddballs
    210   CheckOddball(isolate, heap->true_value(), "true");
    211   CheckOddball(isolate, heap->false_value(), "false");
    212   CheckOddball(isolate, heap->null_value(), "null");
    213   CheckOddball(isolate, heap->undefined_value(), "undefined");
    214 
    215   // Check ToString for Smis
    216   CheckSmi(isolate, 0, "0");
    217   CheckSmi(isolate, 42, "42");
    218   CheckSmi(isolate, -42, "-42");
    219 
    220   // Check ToString for Numbers
    221   CheckNumber(isolate, 1.1, "1.1");
    222 
    223   CheckFindCodeObject(isolate);
    224 }
    225 
    226 
    227 template <typename T, typename LANE_TYPE, int LANES>
    228 static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
    229                            LANE_TYPE other_value) {
    230   // Check against lane_values, and check that all lanes can be set to
    231   // other_value without disturbing the other lanes.
    232   for (int i = 0; i < LANES; i++) {
    233     CHECK_EQ(lane_values[i], value->get_lane(i));
    234   }
    235   for (int i = 0; i < LANES; i++) {
    236     value->set_lane(i, other_value);  // change the value
    237     for (int j = 0; j < LANES; j++) {
    238       if (i != j)
    239         CHECK_EQ(lane_values[j], value->get_lane(j));
    240       else
    241         CHECK_EQ(other_value, value->get_lane(j));
    242     }
    243     value->set_lane(i, lane_values[i]);  // restore the lane
    244   }
    245   CHECK(value->BooleanValue());  // SIMD values are 'true'.
    246 }
    247 
    248 
    249 TEST(SimdObjects) {
    250   CcTest::InitializeVM();
    251   Isolate* isolate = CcTest::i_isolate();
    252   Factory* factory = isolate->factory();
    253 
    254   HandleScope sc(isolate);
    255 
    256   // Float32x4
    257   {
    258     float lanes[4] = {1, 2, 3, 4};
    259     float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
    260     float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
    261 
    262     Handle<Float32x4> value = factory->NewFloat32x4(lanes);
    263     CHECK(value->IsFloat32x4());
    264     CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
    265 
    266     // Check special lane values.
    267     value->set_lane(1, -0.0);
    268     CHECK_EQ(-0.0f, value->get_lane(1));
    269     CHECK(std::signbit(value->get_lane(1)));  // Sign bit should be preserved.
    270     value->set_lane(2, quiet_NaN);
    271     CHECK(std::isnan(value->get_lane(2)));
    272     value->set_lane(3, signaling_NaN);
    273     CHECK(std::isnan(value->get_lane(3)));
    274 
    275 #ifdef OBJECT_PRINT
    276     // Check value printing.
    277     {
    278       value = factory->NewFloat32x4(lanes);
    279       std::ostringstream os;
    280       value->Float32x4Print(os);
    281       CHECK_EQ("1, 2, 3, 4", os.str());
    282     }
    283     {
    284       float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
    285       value = factory->NewFloat32x4(special_lanes);
    286       std::ostringstream os;
    287       value->Float32x4Print(os);
    288       // Value printing doesn't preserve signed zeroes.
    289       CHECK_EQ("0, 0, NaN, NaN", os.str());
    290     }
    291 #endif  // OBJECT_PRINT
    292   }
    293   // Int32x4
    294   {
    295     int32_t lanes[4] = {1, 2, 3, 4};
    296 
    297     Handle<Int32x4> value = factory->NewInt32x4(lanes);
    298     CHECK(value->IsInt32x4());
    299     CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
    300 
    301 #ifdef OBJECT_PRINT
    302     std::ostringstream os;
    303     value->Int32x4Print(os);
    304     CHECK_EQ("1, 2, 3, 4", os.str());
    305 #endif  // OBJECT_PRINT
    306   }
    307   // Uint32x4
    308   {
    309     uint32_t lanes[4] = {1, 2, 3, 4};
    310 
    311     Handle<Uint32x4> value = factory->NewUint32x4(lanes);
    312     CHECK(value->IsUint32x4());
    313     CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
    314 
    315 #ifdef OBJECT_PRINT
    316     std::ostringstream os;
    317     value->Uint32x4Print(os);
    318     CHECK_EQ("1, 2, 3, 4", os.str());
    319 #endif  // OBJECT_PRINT
    320   }
    321   // Bool32x4
    322   {
    323     bool lanes[4] = {true, false, true, false};
    324 
    325     Handle<Bool32x4> value = factory->NewBool32x4(lanes);
    326     CHECK(value->IsBool32x4());
    327     CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
    328 
    329 #ifdef OBJECT_PRINT
    330     std::ostringstream os;
    331     value->Bool32x4Print(os);
    332     CHECK_EQ("true, false, true, false", os.str());
    333 #endif  // OBJECT_PRINT
    334   }
    335   // Int16x8
    336   {
    337     int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
    338 
    339     Handle<Int16x8> value = factory->NewInt16x8(lanes);
    340     CHECK(value->IsInt16x8());
    341     CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
    342 
    343 #ifdef OBJECT_PRINT
    344     std::ostringstream os;
    345     value->Int16x8Print(os);
    346     CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
    347 #endif  // OBJECT_PRINT
    348   }
    349   // Uint16x8
    350   {
    351     uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
    352 
    353     Handle<Uint16x8> value = factory->NewUint16x8(lanes);
    354     CHECK(value->IsUint16x8());
    355     CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
    356 
    357 #ifdef OBJECT_PRINT
    358     std::ostringstream os;
    359     value->Uint16x8Print(os);
    360     CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
    361 #endif  // OBJECT_PRINT
    362   }
    363   // Bool16x8
    364   {
    365     bool lanes[8] = {true, false, true, false, true, false, true, false};
    366 
    367     Handle<Bool16x8> value = factory->NewBool16x8(lanes);
    368     CHECK(value->IsBool16x8());
    369     CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
    370 
    371 #ifdef OBJECT_PRINT
    372     std::ostringstream os;
    373     value->Bool16x8Print(os);
    374     CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
    375 #endif  // OBJECT_PRINT
    376   }
    377   // Int8x16
    378   {
    379     int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
    380 
    381     Handle<Int8x16> value = factory->NewInt8x16(lanes);
    382     CHECK(value->IsInt8x16());
    383     CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
    384 
    385 #ifdef OBJECT_PRINT
    386     std::ostringstream os;
    387     value->Int8x16Print(os);
    388     CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
    389 #endif  // OBJECT_PRINT
    390   }
    391   // Uint8x16
    392   {
    393     uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
    394 
    395     Handle<Uint8x16> value = factory->NewUint8x16(lanes);
    396     CHECK(value->IsUint8x16());
    397     CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
    398 
    399 #ifdef OBJECT_PRINT
    400     std::ostringstream os;
    401     value->Uint8x16Print(os);
    402     CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
    403 #endif  // OBJECT_PRINT
    404   }
    405   // Bool8x16
    406   {
    407     bool lanes[16] = {true, false, true, false, true, false, true, false,
    408                       true, false, true, false, true, false, true, false};
    409 
    410     Handle<Bool8x16> value = factory->NewBool8x16(lanes);
    411     CHECK(value->IsBool8x16());
    412     CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
    413 
    414 #ifdef OBJECT_PRINT
    415     std::ostringstream os;
    416     value->Bool8x16Print(os);
    417     CHECK_EQ(
    418         "true, false, true, false, true, false, true, false, true, false, "
    419         "true, false, true, false, true, false",
    420         os.str());
    421 #endif  // OBJECT_PRINT
    422   }
    423 }
    424 
    425 
    426 TEST(Tagging) {
    427   CcTest::InitializeVM();
    428   int request = 24;
    429   CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
    430   CHECK(Smi::FromInt(42)->IsSmi());
    431   CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
    432   CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
    433 }
    434 
    435 
    436 TEST(GarbageCollection) {
    437   CcTest::InitializeVM();
    438   Isolate* isolate = CcTest::i_isolate();
    439   Heap* heap = isolate->heap();
    440   Factory* factory = isolate->factory();
    441 
    442   HandleScope sc(isolate);
    443   // Check GC.
    444   heap->CollectGarbage(NEW_SPACE);
    445 
    446   Handle<JSGlobalObject> global(
    447       CcTest::i_isolate()->context()->global_object());
    448   Handle<String> name = factory->InternalizeUtf8String("theFunction");
    449   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    450   Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
    451   Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
    452   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
    453   Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
    454 
    455   {
    456     HandleScope inner_scope(isolate);
    457     // Allocate a function and keep it in global object's property.
    458     Handle<JSFunction> function = factory->NewFunction(name);
    459     JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
    460     // Allocate an object.  Unrooted after leaving the scope.
    461     Handle<JSObject> obj = factory->NewJSObject(function);
    462     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
    463     JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
    464 
    465     CHECK_EQ(Smi::FromInt(23),
    466              *Object::GetProperty(obj, prop_name).ToHandleChecked());
    467     CHECK_EQ(Smi::FromInt(24),
    468              *Object::GetProperty(obj, prop_namex).ToHandleChecked());
    469   }
    470 
    471   heap->CollectGarbage(NEW_SPACE);
    472 
    473   // Function should be alive.
    474   CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
    475   // Check function is retained.
    476   Handle<Object> func_value =
    477       Object::GetProperty(global, name).ToHandleChecked();
    478   CHECK(func_value->IsJSFunction());
    479   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
    480 
    481   {
    482     HandleScope inner_scope(isolate);
    483     // Allocate another object, make it reachable from global.
    484     Handle<JSObject> obj = factory->NewJSObject(function);
    485     JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
    486     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
    487   }
    488 
    489   // After gc, it should survive.
    490   heap->CollectGarbage(NEW_SPACE);
    491 
    492   CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
    493   Handle<Object> obj =
    494       Object::GetProperty(global, obj_name).ToHandleChecked();
    495   CHECK(obj->IsJSObject());
    496   CHECK_EQ(Smi::FromInt(23),
    497            *Object::GetProperty(obj, prop_name).ToHandleChecked());
    498 }
    499 
    500 
    501 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
    502   HandleScope scope(isolate);
    503   Handle<String> s = isolate->factory()->NewStringFromUtf8(
    504       CStrVector(string)).ToHandleChecked();
    505   CHECK_EQ(StrLength(string), s->length());
    506   for (int index = 0; index < s->length(); index++) {
    507     CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
    508   }
    509 }
    510 
    511 
    512 TEST(String) {
    513   CcTest::InitializeVM();
    514   Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
    515 
    516   VerifyStringAllocation(isolate, "a");
    517   VerifyStringAllocation(isolate, "ab");
    518   VerifyStringAllocation(isolate, "abc");
    519   VerifyStringAllocation(isolate, "abcd");
    520   VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
    521 }
    522 
    523 
    524 TEST(LocalHandles) {
    525   CcTest::InitializeVM();
    526   Isolate* isolate = CcTest::i_isolate();
    527   Factory* factory = isolate->factory();
    528 
    529   v8::HandleScope scope(CcTest::isolate());
    530   const char* name = "Kasper the spunky";
    531   Handle<String> string = factory->NewStringFromAsciiChecked(name);
    532   CHECK_EQ(StrLength(name), string->length());
    533 }
    534 
    535 
    536 TEST(GlobalHandles) {
    537   CcTest::InitializeVM();
    538   Isolate* isolate = CcTest::i_isolate();
    539   Heap* heap = isolate->heap();
    540   Factory* factory = isolate->factory();
    541   GlobalHandles* global_handles = isolate->global_handles();
    542 
    543   Handle<Object> h1;
    544   Handle<Object> h2;
    545   Handle<Object> h3;
    546   Handle<Object> h4;
    547 
    548   {
    549     HandleScope scope(isolate);
    550 
    551     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
    552     Handle<Object> u = factory->NewNumber(1.12344);
    553 
    554     h1 = global_handles->Create(*i);
    555     h2 = global_handles->Create(*u);
    556     h3 = global_handles->Create(*i);
    557     h4 = global_handles->Create(*u);
    558   }
    559 
    560   // after gc, it should survive
    561   heap->CollectGarbage(NEW_SPACE);
    562 
    563   CHECK((*h1)->IsString());
    564   CHECK((*h2)->IsHeapNumber());
    565   CHECK((*h3)->IsString());
    566   CHECK((*h4)->IsHeapNumber());
    567 
    568   CHECK_EQ(*h3, *h1);
    569   GlobalHandles::Destroy(h1.location());
    570   GlobalHandles::Destroy(h3.location());
    571 
    572   CHECK_EQ(*h4, *h2);
    573   GlobalHandles::Destroy(h2.location());
    574   GlobalHandles::Destroy(h4.location());
    575 }
    576 
    577 
    578 static bool WeakPointerCleared = false;
    579 
    580 static void TestWeakGlobalHandleCallback(
    581     const v8::WeakCallbackInfo<void>& data) {
    582   std::pair<v8::Persistent<v8::Value>*, int>* p =
    583       reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
    584           data.GetParameter());
    585   if (p->second == 1234) WeakPointerCleared = true;
    586   p->first->Reset();
    587 }
    588 
    589 
    590 TEST(WeakGlobalHandlesScavenge) {
    591   i::FLAG_stress_compaction = false;
    592   CcTest::InitializeVM();
    593   Isolate* isolate = CcTest::i_isolate();
    594   Heap* heap = isolate->heap();
    595   Factory* factory = isolate->factory();
    596   GlobalHandles* global_handles = isolate->global_handles();
    597 
    598   WeakPointerCleared = false;
    599 
    600   Handle<Object> h1;
    601   Handle<Object> h2;
    602 
    603   {
    604     HandleScope scope(isolate);
    605 
    606     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
    607     Handle<Object> u = factory->NewNumber(1.12344);
    608 
    609     h1 = global_handles->Create(*i);
    610     h2 = global_handles->Create(*u);
    611   }
    612 
    613   std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
    614   GlobalHandles::MakeWeak(
    615       h2.location(), reinterpret_cast<void*>(&handle_and_id),
    616       &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
    617 
    618   // Scavenge treats weak pointers as normal roots.
    619   heap->CollectGarbage(NEW_SPACE);
    620 
    621   CHECK((*h1)->IsString());
    622   CHECK((*h2)->IsHeapNumber());
    623 
    624   CHECK(!WeakPointerCleared);
    625   CHECK(!global_handles->IsNearDeath(h2.location()));
    626   CHECK(!global_handles->IsNearDeath(h1.location()));
    627 
    628   GlobalHandles::Destroy(h1.location());
    629   GlobalHandles::Destroy(h2.location());
    630 }
    631 
    632 
    633 TEST(WeakGlobalHandlesMark) {
    634   CcTest::InitializeVM();
    635   Isolate* isolate = CcTest::i_isolate();
    636   Heap* heap = isolate->heap();
    637   Factory* factory = isolate->factory();
    638   GlobalHandles* global_handles = isolate->global_handles();
    639 
    640   WeakPointerCleared = false;
    641 
    642   Handle<Object> h1;
    643   Handle<Object> h2;
    644 
    645   {
    646     HandleScope scope(isolate);
    647 
    648     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
    649     Handle<Object> u = factory->NewNumber(1.12344);
    650 
    651     h1 = global_handles->Create(*i);
    652     h2 = global_handles->Create(*u);
    653   }
    654 
    655   // Make sure the objects are promoted.
    656   heap->CollectGarbage(OLD_SPACE);
    657   heap->CollectGarbage(NEW_SPACE);
    658   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
    659 
    660   std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
    661   GlobalHandles::MakeWeak(
    662       h2.location(), reinterpret_cast<void*>(&handle_and_id),
    663       &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
    664   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
    665   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
    666 
    667   // Incremental marking potentially marked handles before they turned weak.
    668   heap->CollectAllGarbage();
    669 
    670   CHECK((*h1)->IsString());
    671 
    672   CHECK(WeakPointerCleared);
    673   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
    674 
    675   GlobalHandles::Destroy(h1.location());
    676 }
    677 
    678 
    679 TEST(DeleteWeakGlobalHandle) {
    680   i::FLAG_stress_compaction = false;
    681   CcTest::InitializeVM();
    682   Isolate* isolate = CcTest::i_isolate();
    683   Heap* heap = isolate->heap();
    684   Factory* factory = isolate->factory();
    685   GlobalHandles* global_handles = isolate->global_handles();
    686 
    687   WeakPointerCleared = false;
    688 
    689   Handle<Object> h;
    690 
    691   {
    692     HandleScope scope(isolate);
    693 
    694     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
    695     h = global_handles->Create(*i);
    696   }
    697 
    698   std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
    699   GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
    700                           &TestWeakGlobalHandleCallback,
    701                           v8::WeakCallbackType::kParameter);
    702 
    703   // Scanvenge does not recognize weak reference.
    704   heap->CollectGarbage(NEW_SPACE);
    705 
    706   CHECK(!WeakPointerCleared);
    707 
    708   // Mark-compact treats weak reference properly.
    709   heap->CollectGarbage(OLD_SPACE);
    710 
    711   CHECK(WeakPointerCleared);
    712 }
    713 
    714 TEST(DoNotPromoteWhiteObjectsOnScavenge) {
    715   CcTest::InitializeVM();
    716   Isolate* isolate = CcTest::i_isolate();
    717   Heap* heap = isolate->heap();
    718   Factory* factory = isolate->factory();
    719 
    720   HandleScope scope(isolate);
    721   Handle<Object> white = factory->NewStringFromStaticChars("white");
    722 
    723   CHECK(Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(*white))));
    724 
    725   heap->CollectGarbage(NEW_SPACE);
    726 
    727   CHECK(heap->InNewSpace(*white));
    728 }
    729 
    730 TEST(PromoteGreyOrBlackObjectsOnScavenge) {
    731   CcTest::InitializeVM();
    732   Isolate* isolate = CcTest::i_isolate();
    733   Heap* heap = isolate->heap();
    734   Factory* factory = isolate->factory();
    735 
    736   HandleScope scope(isolate);
    737   Handle<Object> marked = factory->NewStringFromStaticChars("marked");
    738 
    739   IncrementalMarking* marking = heap->incremental_marking();
    740   marking->Stop();
    741   heap->StartIncrementalMarking();
    742   while (Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(*marked)))) {
    743     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
    744                   IncrementalMarking::FORCE_MARKING,
    745                   IncrementalMarking::DO_NOT_FORCE_COMPLETION);
    746   }
    747 
    748   heap->CollectGarbage(NEW_SPACE);
    749 
    750   CHECK(!heap->InNewSpace(*marked));
    751 }
    752 
    753 TEST(BytecodeArray) {
    754   static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
    755   static const int kRawBytesSize = sizeof(kRawBytes);
    756   static const int kFrameSize = 32;
    757   static const int kParameterCount = 2;
    758 
    759   i::FLAG_manual_evacuation_candidates_selection = true;
    760   CcTest::InitializeVM();
    761   Isolate* isolate = CcTest::i_isolate();
    762   Heap* heap = isolate->heap();
    763   Factory* factory = isolate->factory();
    764   HandleScope scope(isolate);
    765 
    766   heap::SimulateFullSpace(heap->old_space());
    767   Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
    768   for (int i = 0; i < 5; i++) {
    769     Handle<Object> number = factory->NewHeapNumber(i);
    770     constant_pool->set(i, *number);
    771   }
    772 
    773   // Allocate and initialize BytecodeArray
    774   Handle<BytecodeArray> array = factory->NewBytecodeArray(
    775       kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
    776 
    777   CHECK(array->IsBytecodeArray());
    778   CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
    779   CHECK_EQ(array->frame_size(), kFrameSize);
    780   CHECK_EQ(array->parameter_count(), kParameterCount);
    781   CHECK_EQ(array->constant_pool(), *constant_pool);
    782   CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
    783   CHECK_GE(array->address() + array->BytecodeArraySize(),
    784            array->GetFirstBytecodeAddress() + array->length());
    785   for (int i = 0; i < kRawBytesSize; i++) {
    786     CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
    787     CHECK_EQ(array->get(i), kRawBytes[i]);
    788   }
    789 
    790   FixedArray* old_constant_pool_address = *constant_pool;
    791 
    792   // Perform a full garbage collection and force the constant pool to be on an
    793   // evacuation candidate.
    794   Page* evac_page = Page::FromAddress(constant_pool->address());
    795   evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
    796   heap->CollectAllGarbage();
    797 
    798   // BytecodeArray should survive.
    799   CHECK_EQ(array->length(), kRawBytesSize);
    800   CHECK_EQ(array->frame_size(), kFrameSize);
    801   for (int i = 0; i < kRawBytesSize; i++) {
    802     CHECK_EQ(array->get(i), kRawBytes[i]);
    803     CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
    804   }
    805 
    806   // Constant pool should have been migrated.
    807   CHECK_EQ(array->constant_pool(), *constant_pool);
    808   CHECK_NE(array->constant_pool(), old_constant_pool_address);
    809 }
    810 
    811 
    812 static const char* not_so_random_string_table[] = {
    813   "abstract",
    814   "boolean",
    815   "break",
    816   "byte",
    817   "case",
    818   "catch",
    819   "char",
    820   "class",
    821   "const",
    822   "continue",
    823   "debugger",
    824   "default",
    825   "delete",
    826   "do",
    827   "double",
    828   "else",
    829   "enum",
    830   "export",
    831   "extends",
    832   "false",
    833   "final",
    834   "finally",
    835   "float",
    836   "for",
    837   "function",
    838   "goto",
    839   "if",
    840   "implements",
    841   "import",
    842   "in",
    843   "instanceof",
    844   "int",
    845   "interface",
    846   "long",
    847   "native",
    848   "new",
    849   "null",
    850   "package",
    851   "private",
    852   "protected",
    853   "public",
    854   "return",
    855   "short",
    856   "static",
    857   "super",
    858   "switch",
    859   "synchronized",
    860   "this",
    861   "throw",
    862   "throws",
    863   "transient",
    864   "true",
    865   "try",
    866   "typeof",
    867   "var",
    868   "void",
    869   "volatile",
    870   "while",
    871   "with",
    872   0
    873 };
    874 
    875 
    876 static void CheckInternalizedStrings(const char** strings) {
    877   Isolate* isolate = CcTest::i_isolate();
    878   Factory* factory = isolate->factory();
    879   for (const char* string = *strings; *strings != 0; string = *strings++) {
    880     HandleScope scope(isolate);
    881     Handle<String> a =
    882         isolate->factory()->InternalizeUtf8String(CStrVector(string));
    883     // InternalizeUtf8String may return a failure if a GC is needed.
    884     CHECK(a->IsInternalizedString());
    885     Handle<String> b = factory->InternalizeUtf8String(string);
    886     CHECK_EQ(*b, *a);
    887     CHECK(b->IsUtf8EqualTo(CStrVector(string)));
    888     b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
    889     CHECK_EQ(*b, *a);
    890     CHECK(b->IsUtf8EqualTo(CStrVector(string)));
    891   }
    892 }
    893 
    894 
    895 TEST(StringTable) {
    896   CcTest::InitializeVM();
    897 
    898   v8::HandleScope sc(CcTest::isolate());
    899   CheckInternalizedStrings(not_so_random_string_table);
    900   CheckInternalizedStrings(not_so_random_string_table);
    901 }
    902 
    903 
    904 TEST(FunctionAllocation) {
    905   CcTest::InitializeVM();
    906   Isolate* isolate = CcTest::i_isolate();
    907   Factory* factory = isolate->factory();
    908 
    909   v8::HandleScope sc(CcTest::isolate());
    910   Handle<String> name = factory->InternalizeUtf8String("theFunction");
    911   Handle<JSFunction> function = factory->NewFunction(name);
    912 
    913   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
    914   Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
    915 
    916   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    917   Handle<JSObject> obj = factory->NewJSObject(function);
    918   JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
    919   CHECK_EQ(Smi::FromInt(23),
    920            *Object::GetProperty(obj, prop_name).ToHandleChecked());
    921   // Check that we can add properties to function objects.
    922   JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
    923   CHECK_EQ(Smi::FromInt(24),
    924            *Object::GetProperty(function, prop_name).ToHandleChecked());
    925 }
    926 
    927 
    928 TEST(ObjectProperties) {
    929   CcTest::InitializeVM();
    930   Isolate* isolate = CcTest::i_isolate();
    931   Factory* factory = isolate->factory();
    932 
    933   v8::HandleScope sc(CcTest::isolate());
    934   Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
    935   Handle<Object> object = Object::GetProperty(
    936       CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
    937   Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
    938   Handle<JSObject> obj = factory->NewJSObject(constructor);
    939   Handle<String> first = factory->InternalizeUtf8String("first");
    940   Handle<String> second = factory->InternalizeUtf8String("second");
    941 
    942   Handle<Smi> one(Smi::FromInt(1), isolate);
    943   Handle<Smi> two(Smi::FromInt(2), isolate);
    944 
    945   // check for empty
    946   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
    947 
    948   // add first
    949   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
    950   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
    951 
    952   // delete first
    953   CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
    954   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
    955 
    956   // add first and then second
    957   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
    958   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
    959   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
    960   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
    961 
    962   // delete first and then second
    963   CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
    964   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
    965   CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
    966   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
    967   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
    968 
    969   // add first and then second
    970   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
    971   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
    972   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
    973   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
    974 
    975   // delete second and then first
    976   CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
    977   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
    978   CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
    979   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
    980   CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
    981 
    982   // check string and internalized string match
    983   const char* string1 = "fisk";
    984   Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
    985   JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
    986   Handle<String> s1_string = factory->InternalizeUtf8String(string1);
    987   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
    988 
    989   // check internalized string and string match
    990   const char* string2 = "fugl";
    991   Handle<String> s2_string = factory->InternalizeUtf8String(string2);
    992   JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
    993   Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
    994   CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
    995 }
    996 
    997 
    998 TEST(JSObjectMaps) {
    999   CcTest::InitializeVM();
   1000   Isolate* isolate = CcTest::i_isolate();
   1001   Factory* factory = isolate->factory();
   1002 
   1003   v8::HandleScope sc(CcTest::isolate());
   1004   Handle<String> name = factory->InternalizeUtf8String("theFunction");
   1005   Handle<JSFunction> function = factory->NewFunction(name);
   1006 
   1007   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
   1008   Handle<JSObject> obj = factory->NewJSObject(function);
   1009   Handle<Map> initial_map(function->initial_map());
   1010 
   1011   // Set a propery
   1012   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
   1013   JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
   1014   CHECK_EQ(Smi::FromInt(23),
   1015            *Object::GetProperty(obj, prop_name).ToHandleChecked());
   1016 
   1017   // Check the map has changed
   1018   CHECK(*initial_map != obj->map());
   1019 }
   1020 
   1021 
   1022 TEST(JSArray) {
   1023   CcTest::InitializeVM();
   1024   Isolate* isolate = CcTest::i_isolate();
   1025   Factory* factory = isolate->factory();
   1026 
   1027   v8::HandleScope sc(CcTest::isolate());
   1028   Handle<String> name = factory->InternalizeUtf8String("Array");
   1029   Handle<Object> fun_obj = Object::GetProperty(
   1030       CcTest::i_isolate()->global_object(), name).ToHandleChecked();
   1031   Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
   1032 
   1033   // Allocate the object.
   1034   Handle<Object> element;
   1035   Handle<JSObject> object = factory->NewJSObject(function);
   1036   Handle<JSArray> array = Handle<JSArray>::cast(object);
   1037   // We just initialized the VM, no heap allocation failure yet.
   1038   JSArray::Initialize(array, 0);
   1039 
   1040   // Set array length to 0.
   1041   JSArray::SetLength(array, 0);
   1042   CHECK_EQ(Smi::FromInt(0), array->length());
   1043   // Must be in fast mode.
   1044   CHECK(array->HasFastSmiOrObjectElements());
   1045 
   1046   // array[length] = name.
   1047   JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
   1048   CHECK_EQ(Smi::FromInt(1), array->length());
   1049   element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
   1050   CHECK_EQ(*element, *name);
   1051 
   1052   // Set array length with larger than smi value.
   1053   JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
   1054 
   1055   uint32_t int_length = 0;
   1056   CHECK(array->length()->ToArrayIndex(&int_length));
   1057   CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
   1058   CHECK(array->HasDictionaryElements());  // Must be in slow mode.
   1059 
   1060   // array[length] = name.
   1061   JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
   1062   uint32_t new_int_length = 0;
   1063   CHECK(array->length()->ToArrayIndex(&new_int_length));
   1064   CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
   1065   element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
   1066   CHECK_EQ(*element, *name);
   1067   element = Object::GetElement(isolate, array, 0).ToHandleChecked();
   1068   CHECK_EQ(*element, *name);
   1069 }
   1070 
   1071 
   1072 TEST(JSObjectCopy) {
   1073   CcTest::InitializeVM();
   1074   Isolate* isolate = CcTest::i_isolate();
   1075   Factory* factory = isolate->factory();
   1076 
   1077   v8::HandleScope sc(CcTest::isolate());
   1078   Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
   1079   Handle<Object> object = Object::GetProperty(
   1080       CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
   1081   Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
   1082   Handle<JSObject> obj = factory->NewJSObject(constructor);
   1083   Handle<String> first = factory->InternalizeUtf8String("first");
   1084   Handle<String> second = factory->InternalizeUtf8String("second");
   1085 
   1086   Handle<Smi> one(Smi::FromInt(1), isolate);
   1087   Handle<Smi> two(Smi::FromInt(2), isolate);
   1088 
   1089   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
   1090   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
   1091 
   1092   JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
   1093   JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
   1094 
   1095   // Make the clone.
   1096   Handle<Object> value1, value2;
   1097   Handle<JSObject> clone = factory->CopyJSObject(obj);
   1098   CHECK(!clone.is_identical_to(obj));
   1099 
   1100   value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
   1101   value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
   1102   CHECK_EQ(*value1, *value2);
   1103   value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
   1104   value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
   1105   CHECK_EQ(*value1, *value2);
   1106 
   1107   value1 = Object::GetProperty(obj, first).ToHandleChecked();
   1108   value2 = Object::GetProperty(clone, first).ToHandleChecked();
   1109   CHECK_EQ(*value1, *value2);
   1110   value1 = Object::GetProperty(obj, second).ToHandleChecked();
   1111   value2 = Object::GetProperty(clone, second).ToHandleChecked();
   1112   CHECK_EQ(*value1, *value2);
   1113 
   1114   // Flip the values.
   1115   JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
   1116   JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
   1117 
   1118   JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
   1119   JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
   1120 
   1121   value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
   1122   value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
   1123   CHECK_EQ(*value1, *value2);
   1124   value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
   1125   value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
   1126   CHECK_EQ(*value1, *value2);
   1127 
   1128   value1 = Object::GetProperty(obj, second).ToHandleChecked();
   1129   value2 = Object::GetProperty(clone, first).ToHandleChecked();
   1130   CHECK_EQ(*value1, *value2);
   1131   value1 = Object::GetProperty(obj, first).ToHandleChecked();
   1132   value2 = Object::GetProperty(clone, second).ToHandleChecked();
   1133   CHECK_EQ(*value1, *value2);
   1134 }
   1135 
   1136 
   1137 TEST(StringAllocation) {
   1138   CcTest::InitializeVM();
   1139   Isolate* isolate = CcTest::i_isolate();
   1140   Factory* factory = isolate->factory();
   1141 
   1142   const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
   1143   for (int length = 0; length < 100; length++) {
   1144     v8::HandleScope scope(CcTest::isolate());
   1145     char* non_one_byte = NewArray<char>(3 * length + 1);
   1146     char* one_byte = NewArray<char>(length + 1);
   1147     non_one_byte[3 * length] = 0;
   1148     one_byte[length] = 0;
   1149     for (int i = 0; i < length; i++) {
   1150       one_byte[i] = 'a';
   1151       non_one_byte[3 * i] = chars[0];
   1152       non_one_byte[3 * i + 1] = chars[1];
   1153       non_one_byte[3 * i + 2] = chars[2];
   1154     }
   1155     Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
   1156         Vector<const char>(non_one_byte, 3 * length));
   1157     CHECK_EQ(length, non_one_byte_sym->length());
   1158     Handle<String> one_byte_sym =
   1159         factory->InternalizeOneByteString(OneByteVector(one_byte, length));
   1160     CHECK_EQ(length, one_byte_sym->length());
   1161     Handle<String> non_one_byte_str =
   1162         factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
   1163             .ToHandleChecked();
   1164     non_one_byte_str->Hash();
   1165     CHECK_EQ(length, non_one_byte_str->length());
   1166     Handle<String> one_byte_str =
   1167         factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
   1168             .ToHandleChecked();
   1169     one_byte_str->Hash();
   1170     CHECK_EQ(length, one_byte_str->length());
   1171     DeleteArray(non_one_byte);
   1172     DeleteArray(one_byte);
   1173   }
   1174 }
   1175 
   1176 
   1177 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
   1178   // Count the number of objects found in the heap.
   1179   int found_count = 0;
   1180   HeapIterator iterator(heap);
   1181   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
   1182     for (int i = 0; i < size; i++) {
   1183       if (*objs[i] == obj) {
   1184         found_count++;
   1185       }
   1186     }
   1187   }
   1188   return found_count;
   1189 }
   1190 
   1191 
   1192 TEST(Iteration) {
   1193   CcTest::InitializeVM();
   1194   Isolate* isolate = CcTest::i_isolate();
   1195   Factory* factory = isolate->factory();
   1196   v8::HandleScope scope(CcTest::isolate());
   1197 
   1198   // Array of objects to scan haep for.
   1199   const int objs_count = 6;
   1200   Handle<Object> objs[objs_count];
   1201   int next_objs_index = 0;
   1202 
   1203   // Allocate a JS array to OLD_SPACE and NEW_SPACE
   1204   objs[next_objs_index++] = factory->NewJSArray(10);
   1205   objs[next_objs_index++] =
   1206       factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, TENURED);
   1207 
   1208   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
   1209   objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
   1210   objs[next_objs_index++] =
   1211       factory->NewStringFromStaticChars("abcdefghij", TENURED);
   1212 
   1213   // Allocate a large string (for large object space).
   1214   int large_size = Page::kMaxRegularHeapObjectSize + 1;
   1215   char* str = new char[large_size];
   1216   for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
   1217   str[large_size - 1] = '\0';
   1218   objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
   1219   delete[] str;
   1220 
   1221   // Add a Map object to look for.
   1222   objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
   1223 
   1224   CHECK_EQ(objs_count, next_objs_index);
   1225   CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
   1226 }
   1227 
   1228 
   1229 UNINITIALIZED_TEST(TestCodeFlushing) {
   1230   // If we do not flush code this test is invalid.
   1231   if (!FLAG_flush_code) return;
   1232   i::FLAG_allow_natives_syntax = true;
   1233   i::FLAG_optimize_for_size = false;
   1234   v8::Isolate::CreateParams create_params;
   1235   create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
   1236   v8::Isolate* isolate = v8::Isolate::New(create_params);
   1237   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
   1238   isolate->Enter();
   1239   Factory* factory = i_isolate->factory();
   1240   {
   1241     v8::HandleScope scope(isolate);
   1242     v8::Context::New(isolate)->Enter();
   1243     const char* source =
   1244         "function foo() {"
   1245         "  var x = 42;"
   1246         "  var y = 42;"
   1247         "  var z = x + y;"
   1248         "};"
   1249         "foo()";
   1250     Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1251 
   1252     // This compile will add the code to the compilation cache.
   1253     {
   1254       v8::HandleScope scope(isolate);
   1255       CompileRun(source);
   1256     }
   1257 
   1258     // Check function is compiled.
   1259     Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
   1260                                                     foo_name).ToHandleChecked();
   1261     CHECK(func_value->IsJSFunction());
   1262     Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
   1263     CHECK(function->shared()->is_compiled());
   1264 
   1265     // The code will survive at least two GCs.
   1266     i_isolate->heap()->CollectAllGarbage();
   1267     i_isolate->heap()->CollectAllGarbage();
   1268     CHECK(function->shared()->is_compiled());
   1269 
   1270     // Simulate several GCs that use full marking.
   1271     const int kAgingThreshold = 6;
   1272     for (int i = 0; i < kAgingThreshold; i++) {
   1273       i_isolate->heap()->CollectAllGarbage();
   1274     }
   1275 
   1276     // foo should no longer be in the compilation cache
   1277     CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1278     CHECK(!function->is_compiled() || function->IsOptimized());
   1279     // Call foo to get it recompiled.
   1280     CompileRun("foo()");
   1281     CHECK(function->shared()->is_compiled());
   1282     CHECK(function->is_compiled());
   1283   }
   1284   isolate->Exit();
   1285   isolate->Dispose();
   1286 }
   1287 
   1288 
   1289 TEST(TestCodeFlushingPreAged) {
   1290   // If we do not flush code this test is invalid.
   1291   if (!FLAG_flush_code) return;
   1292   i::FLAG_allow_natives_syntax = true;
   1293   i::FLAG_optimize_for_size = true;
   1294   CcTest::InitializeVM();
   1295   Isolate* isolate = CcTest::i_isolate();
   1296   Factory* factory = isolate->factory();
   1297   v8::HandleScope scope(CcTest::isolate());
   1298   const char* source = "function foo() {"
   1299                        "  var x = 42;"
   1300                        "  var y = 42;"
   1301                        "  var z = x + y;"
   1302                        "};"
   1303                        "foo()";
   1304   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1305 
   1306   // Compile foo, but don't run it.
   1307   { v8::HandleScope scope(CcTest::isolate());
   1308     CompileRun(source);
   1309   }
   1310 
   1311   // Check function is compiled.
   1312   Handle<Object> func_value =
   1313       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
   1314   CHECK(func_value->IsJSFunction());
   1315   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
   1316   CHECK(function->shared()->is_compiled());
   1317 
   1318   // The code has been run so will survive at least one GC.
   1319   CcTest::heap()->CollectAllGarbage();
   1320   CHECK(function->shared()->is_compiled());
   1321 
   1322   // The code was only run once, so it should be pre-aged and collected on the
   1323   // next GC.
   1324   CcTest::heap()->CollectAllGarbage();
   1325   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1326 
   1327   // Execute the function again twice, and ensure it is reset to the young age.
   1328   { v8::HandleScope scope(CcTest::isolate());
   1329     CompileRun("foo();"
   1330                "foo();");
   1331   }
   1332 
   1333   // The code will survive at least two GC now that it is young again.
   1334   CcTest::heap()->CollectAllGarbage();
   1335   CcTest::heap()->CollectAllGarbage();
   1336   CHECK(function->shared()->is_compiled());
   1337 
   1338   // Simulate several GCs that use full marking.
   1339   const int kAgingThreshold = 6;
   1340   for (int i = 0; i < kAgingThreshold; i++) {
   1341     CcTest::heap()->CollectAllGarbage();
   1342   }
   1343 
   1344   // foo should no longer be in the compilation cache
   1345   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1346   CHECK(!function->is_compiled() || function->IsOptimized());
   1347   // Call foo to get it recompiled.
   1348   CompileRun("foo()");
   1349   CHECK(function->shared()->is_compiled());
   1350   CHECK(function->is_compiled());
   1351 }
   1352 
   1353 
   1354 TEST(TestCodeFlushingIncremental) {
   1355   // If we do not flush code this test is invalid.
   1356   if (!FLAG_flush_code) return;
   1357   i::FLAG_allow_natives_syntax = true;
   1358   i::FLAG_optimize_for_size = false;
   1359   CcTest::InitializeVM();
   1360   Isolate* isolate = CcTest::i_isolate();
   1361   Factory* factory = isolate->factory();
   1362   v8::HandleScope scope(CcTest::isolate());
   1363   const char* source = "function foo() {"
   1364                        "  var x = 42;"
   1365                        "  var y = 42;"
   1366                        "  var z = x + y;"
   1367                        "};"
   1368                        "foo()";
   1369   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1370 
   1371   // This compile will add the code to the compilation cache.
   1372   { v8::HandleScope scope(CcTest::isolate());
   1373     CompileRun(source);
   1374   }
   1375 
   1376   // Check function is compiled.
   1377   Handle<Object> func_value =
   1378       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
   1379   CHECK(func_value->IsJSFunction());
   1380   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
   1381   CHECK(function->shared()->is_compiled());
   1382 
   1383   // The code will survive at least two GCs.
   1384   CcTest::heap()->CollectAllGarbage();
   1385   CcTest::heap()->CollectAllGarbage();
   1386   CHECK(function->shared()->is_compiled());
   1387 
   1388   // Simulate several GCs that use incremental marking.
   1389   const int kAgingThreshold = 6;
   1390   for (int i = 0; i < kAgingThreshold; i++) {
   1391     heap::SimulateIncrementalMarking(CcTest::heap());
   1392     CcTest::heap()->CollectAllGarbage();
   1393   }
   1394   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1395   CHECK(!function->is_compiled() || function->IsOptimized());
   1396 
   1397   // This compile will compile the function again.
   1398   { v8::HandleScope scope(CcTest::isolate());
   1399     CompileRun("foo();");
   1400   }
   1401 
   1402   // Simulate several GCs that use incremental marking but make sure
   1403   // the loop breaks once the function is enqueued as a candidate.
   1404   for (int i = 0; i < kAgingThreshold; i++) {
   1405     heap::SimulateIncrementalMarking(CcTest::heap());
   1406     if (!function->next_function_link()->IsUndefined(CcTest::i_isolate()))
   1407       break;
   1408     CcTest::heap()->CollectAllGarbage();
   1409   }
   1410 
   1411   // Force optimization while incremental marking is active and while
   1412   // the function is enqueued as a candidate.
   1413   { v8::HandleScope scope(CcTest::isolate());
   1414     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
   1415   }
   1416 
   1417   // Simulate one final GC to make sure the candidate queue is sane.
   1418   CcTest::heap()->CollectAllGarbage();
   1419   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
   1420   CHECK(function->is_compiled() || !function->IsOptimized());
   1421 }
   1422 
   1423 
   1424 TEST(TestCodeFlushingIncrementalScavenge) {
   1425   // If we do not flush code this test is invalid.
   1426   if (!FLAG_flush_code) return;
   1427   i::FLAG_allow_natives_syntax = true;
   1428   i::FLAG_optimize_for_size = false;
   1429   CcTest::InitializeVM();
   1430   Isolate* isolate = CcTest::i_isolate();
   1431   Factory* factory = isolate->factory();
   1432   v8::HandleScope scope(CcTest::isolate());
   1433   const char* source = "var foo = function() {"
   1434                        "  var x = 42;"
   1435                        "  var y = 42;"
   1436                        "  var z = x + y;"
   1437                        "};"
   1438                        "foo();"
   1439                        "var bar = function() {"
   1440                        "  var x = 23;"
   1441                        "};"
   1442                        "bar();";
   1443   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1444   Handle<String> bar_name = factory->InternalizeUtf8String("bar");
   1445 
   1446   // Perfrom one initial GC to enable code flushing.
   1447   CcTest::heap()->CollectAllGarbage();
   1448 
   1449   // This compile will add the code to the compilation cache.
   1450   { v8::HandleScope scope(CcTest::isolate());
   1451     CompileRun(source);
   1452   }
   1453 
   1454   // Check functions are compiled.
   1455   Handle<Object> func_value =
   1456       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
   1457   CHECK(func_value->IsJSFunction());
   1458   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
   1459   CHECK(function->shared()->is_compiled());
   1460   Handle<Object> func_value2 =
   1461       Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
   1462   CHECK(func_value2->IsJSFunction());
   1463   Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
   1464   CHECK(function2->shared()->is_compiled());
   1465 
   1466   // Clear references to functions so that one of them can die.
   1467   { v8::HandleScope scope(CcTest::isolate());
   1468     CompileRun("foo = 0; bar = 0;");
   1469   }
   1470 
   1471   // Bump the code age so that flushing is triggered while the function
   1472   // object is still located in new-space.
   1473   const int kAgingThreshold = 6;
   1474   for (int i = 0; i < kAgingThreshold; i++) {
   1475     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1476     function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1477   }
   1478 
   1479   // Simulate incremental marking so that the functions are enqueued as
   1480   // code flushing candidates. Then kill one of the functions. Finally
   1481   // perform a scavenge while incremental marking is still running.
   1482   heap::SimulateIncrementalMarking(CcTest::heap());
   1483   *function2.location() = NULL;
   1484   CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
   1485 
   1486   // Simulate one final GC to make sure the candidate queue is sane.
   1487   CcTest::heap()->CollectAllGarbage();
   1488   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1489   CHECK(!function->is_compiled() || function->IsOptimized());
   1490 }
   1491 
   1492 
   1493 TEST(TestCodeFlushingIncrementalAbort) {
   1494   // If we do not flush code this test is invalid.
   1495   if (!FLAG_flush_code) return;
   1496   i::FLAG_allow_natives_syntax = true;
   1497   i::FLAG_optimize_for_size = false;
   1498   CcTest::InitializeVM();
   1499   Isolate* isolate = CcTest::i_isolate();
   1500   Factory* factory = isolate->factory();
   1501   Heap* heap = isolate->heap();
   1502   v8::HandleScope scope(CcTest::isolate());
   1503   const char* source = "function foo() {"
   1504                        "  var x = 42;"
   1505                        "  var y = 42;"
   1506                        "  var z = x + y;"
   1507                        "};"
   1508                        "foo()";
   1509   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1510 
   1511   // This compile will add the code to the compilation cache.
   1512   { v8::HandleScope scope(CcTest::isolate());
   1513     CompileRun(source);
   1514   }
   1515 
   1516   // Check function is compiled.
   1517   Handle<Object> func_value =
   1518       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
   1519   CHECK(func_value->IsJSFunction());
   1520   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
   1521   CHECK(function->shared()->is_compiled());
   1522 
   1523   // The code will survive at least two GCs.
   1524   heap->CollectAllGarbage();
   1525   heap->CollectAllGarbage();
   1526   CHECK(function->shared()->is_compiled());
   1527 
   1528   // Bump the code age so that flushing is triggered.
   1529   const int kAgingThreshold = 6;
   1530   for (int i = 0; i < kAgingThreshold; i++) {
   1531     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1532   }
   1533 
   1534   // Simulate incremental marking so that the function is enqueued as
   1535   // code flushing candidate.
   1536   heap::SimulateIncrementalMarking(heap);
   1537 
   1538   // Enable the debugger and add a breakpoint while incremental marking
   1539   // is running so that incremental marking aborts and code flushing is
   1540   // disabled.
   1541   int position = 0;
   1542   Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
   1543   EnableDebugger(CcTest::isolate());
   1544   isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
   1545   isolate->debug()->ClearAllBreakPoints();
   1546   DisableDebugger(CcTest::isolate());
   1547 
   1548   // Force optimization now that code flushing is disabled.
   1549   { v8::HandleScope scope(CcTest::isolate());
   1550     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
   1551   }
   1552 
   1553   // Simulate one final GC to make sure the candidate queue is sane.
   1554   heap->CollectAllGarbage();
   1555   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
   1556   CHECK(function->is_compiled() || !function->IsOptimized());
   1557 }
   1558 
   1559 TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
   1560   // Turn off always_opt because it interferes with running the built-in for
   1561   // the last call to g().
   1562   i::FLAG_always_opt = false;
   1563   i::FLAG_allow_natives_syntax = true;
   1564   CcTest::InitializeVM();
   1565   Isolate* isolate = CcTest::i_isolate();
   1566   Factory* factory = isolate->factory();
   1567   Heap* heap = isolate->heap();
   1568   v8::HandleScope scope(CcTest::isolate());
   1569 
   1570   CompileRun(
   1571       "function make_closure(x) {"
   1572       "  return function() { return x + 3 };"
   1573       "}"
   1574       "var f = make_closure(5); f();"
   1575       "var g = make_closure(5);");
   1576 
   1577   // Check f is compiled.
   1578   Handle<String> f_name = factory->InternalizeUtf8String("f");
   1579   Handle<Object> f_value =
   1580       Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
   1581   Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
   1582   CHECK(f_function->is_compiled());
   1583 
   1584   // Check g is not compiled.
   1585   Handle<String> g_name = factory->InternalizeUtf8String("g");
   1586   Handle<Object> g_value =
   1587       Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
   1588   Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
   1589   CHECK(!g_function->is_compiled());
   1590 
   1591   heap::SimulateIncrementalMarking(heap);
   1592   CompileRun("%OptimizeFunctionOnNextCall(f); f();");
   1593 
   1594   // g should now have available an optimized function, unmarked by gc. The
   1595   // CompileLazy built-in will discover it and install it in the closure, and
   1596   // the incremental write barrier should be used.
   1597   CompileRun("g();");
   1598   CHECK(g_function->is_compiled());
   1599 }
   1600 
   1601 TEST(CompilationCacheCachingBehavior) {
   1602   // If we do not flush code, or have the compilation cache turned off, this
   1603   // test is invalid.
   1604   if (!FLAG_flush_code || !FLAG_compilation_cache) {
   1605     return;
   1606   }
   1607   CcTest::InitializeVM();
   1608   Isolate* isolate = CcTest::i_isolate();
   1609   Factory* factory = isolate->factory();
   1610   Heap* heap = isolate->heap();
   1611   CompilationCache* compilation_cache = isolate->compilation_cache();
   1612   LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
   1613 
   1614   v8::HandleScope scope(CcTest::isolate());
   1615   const char* raw_source =
   1616       "function foo() {"
   1617       "  var x = 42;"
   1618       "  var y = 42;"
   1619       "  var z = x + y;"
   1620       "};"
   1621       "foo()";
   1622   Handle<String> source = factory->InternalizeUtf8String(raw_source);
   1623   Handle<Context> native_context = isolate->native_context();
   1624 
   1625   {
   1626     v8::HandleScope scope(CcTest::isolate());
   1627     CompileRun(raw_source);
   1628   }
   1629 
   1630   // The script should be in the cache now.
   1631   MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
   1632       source, Handle<Object>(), 0, 0,
   1633       v8::ScriptOriginOptions(false, true, false), native_context,
   1634       language_mode);
   1635   CHECK(!info.is_null());
   1636 
   1637   // Check that the code cache entry survives at least on GC.
   1638   // (Unless --optimize-for-size, in which case it might get collected
   1639   // immediately.)
   1640   if (!FLAG_optimize_for_size) {
   1641     heap->CollectAllGarbage();
   1642     info = compilation_cache->LookupScript(
   1643         source, Handle<Object>(), 0, 0,
   1644         v8::ScriptOriginOptions(false, true, false), native_context,
   1645         language_mode);
   1646     CHECK(!info.is_null());
   1647   }
   1648 
   1649   // Progress code age until it's old and ready for GC.
   1650   while (!info.ToHandleChecked()->code()->IsOld()) {
   1651     // To guarantee progress, we have to MakeOlder with different parities.
   1652     // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
   1653     // always NO_MARKING_PARITY and the code age only progresses if the parity
   1654     // is different.
   1655     info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
   1656     info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
   1657   }
   1658 
   1659   heap->CollectAllGarbage();
   1660   // Ensure code aging cleared the entry from the cache.
   1661   info = compilation_cache->LookupScript(
   1662       source, Handle<Object>(), 0, 0,
   1663       v8::ScriptOriginOptions(false, true, false), native_context,
   1664       language_mode);
   1665   CHECK(info.is_null());
   1666 }
   1667 
   1668 
   1669 static void OptimizeEmptyFunction(const char* name) {
   1670   HandleScope scope(CcTest::i_isolate());
   1671   EmbeddedVector<char, 256> source;
   1672   SNPrintF(source,
   1673            "function %s() { return 0; }"
   1674            "%s(); %s();"
   1675            "%%OptimizeFunctionOnNextCall(%s);"
   1676            "%s();",
   1677            name, name, name, name, name);
   1678   CompileRun(source.start());
   1679 }
   1680 
   1681 
   1682 // Count the number of native contexts in the weak list of native contexts.
   1683 int CountNativeContexts() {
   1684   int count = 0;
   1685   Object* object = CcTest::heap()->native_contexts_list();
   1686   while (!object->IsUndefined(CcTest::i_isolate())) {
   1687     count++;
   1688     object = Context::cast(object)->next_context_link();
   1689   }
   1690   return count;
   1691 }
   1692 
   1693 
   1694 // Count the number of user functions in the weak list of optimized
   1695 // functions attached to a native context.
   1696 static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
   1697   int count = 0;
   1698   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
   1699   Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
   1700   while (object->IsJSFunction() &&
   1701          !JSFunction::cast(object)->shared()->IsBuiltin()) {
   1702     count++;
   1703     object = JSFunction::cast(object)->next_function_link();
   1704   }
   1705   return count;
   1706 }
   1707 
   1708 
   1709 TEST(TestInternalWeakLists) {
   1710   FLAG_always_opt = false;
   1711   FLAG_allow_natives_syntax = true;
   1712   v8::V8::Initialize();
   1713 
   1714   // Some flags turn Scavenge collections into Mark-sweep collections
   1715   // and hence are incompatible with this test case.
   1716   if (FLAG_gc_global || FLAG_stress_compaction) return;
   1717   FLAG_retain_maps_for_n_gc = 0;
   1718 
   1719   static const int kNumTestContexts = 10;
   1720 
   1721   Isolate* isolate = CcTest::i_isolate();
   1722   Heap* heap = isolate->heap();
   1723   HandleScope scope(isolate);
   1724   v8::Local<v8::Context> ctx[kNumTestContexts];
   1725   if (!isolate->use_crankshaft()) return;
   1726 
   1727   CHECK_EQ(0, CountNativeContexts());
   1728 
   1729   // Create a number of global contests which gets linked together.
   1730   for (int i = 0; i < kNumTestContexts; i++) {
   1731     ctx[i] = v8::Context::New(CcTest::isolate());
   1732 
   1733     // Collect garbage that might have been created by one of the
   1734     // installed extensions.
   1735     isolate->compilation_cache()->Clear();
   1736     heap->CollectAllGarbage();
   1737 
   1738     CHECK_EQ(i + 1, CountNativeContexts());
   1739 
   1740     ctx[i]->Enter();
   1741 
   1742     // Create a handle scope so no function objects get stuck in the outer
   1743     // handle scope.
   1744     HandleScope scope(isolate);
   1745     CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
   1746     OptimizeEmptyFunction("f1");
   1747     CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
   1748     OptimizeEmptyFunction("f2");
   1749     CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
   1750     OptimizeEmptyFunction("f3");
   1751     CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
   1752     OptimizeEmptyFunction("f4");
   1753     CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
   1754     OptimizeEmptyFunction("f5");
   1755     CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
   1756 
   1757     // Remove function f1, and
   1758     CompileRun("f1=null");
   1759 
   1760     // Scavenge treats these references as strong.
   1761     for (int j = 0; j < 10; j++) {
   1762       CcTest::heap()->CollectGarbage(NEW_SPACE);
   1763       CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
   1764     }
   1765 
   1766     // Mark compact handles the weak references.
   1767     isolate->compilation_cache()->Clear();
   1768     heap->CollectAllGarbage();
   1769     CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
   1770 
   1771     // Get rid of f3 and f5 in the same way.
   1772     CompileRun("f3=null");
   1773     for (int j = 0; j < 10; j++) {
   1774       CcTest::heap()->CollectGarbage(NEW_SPACE);
   1775       CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
   1776     }
   1777     CcTest::heap()->CollectAllGarbage();
   1778     CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
   1779     CompileRun("f5=null");
   1780     for (int j = 0; j < 10; j++) {
   1781       CcTest::heap()->CollectGarbage(NEW_SPACE);
   1782       CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
   1783     }
   1784     CcTest::heap()->CollectAllGarbage();
   1785     CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
   1786 
   1787     ctx[i]->Exit();
   1788   }
   1789 
   1790   // Force compilation cache cleanup.
   1791   CcTest::heap()->NotifyContextDisposed(true);
   1792   CcTest::heap()->CollectAllGarbage();
   1793 
   1794   // Dispose the native contexts one by one.
   1795   for (int i = 0; i < kNumTestContexts; i++) {
   1796     // TODO(dcarney): is there a better way to do this?
   1797     i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
   1798     *unsafe = CcTest::heap()->undefined_value();
   1799     ctx[i].Clear();
   1800 
   1801     // Scavenge treats these references as strong.
   1802     for (int j = 0; j < 10; j++) {
   1803       CcTest::heap()->CollectGarbage(i::NEW_SPACE);
   1804       CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
   1805     }
   1806 
   1807     // Mark compact handles the weak references.
   1808     CcTest::heap()->CollectAllGarbage();
   1809     CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
   1810   }
   1811 
   1812   CHECK_EQ(0, CountNativeContexts());
   1813 }
   1814 
   1815 
   1816 // Count the number of native contexts in the weak list of native contexts
   1817 // causing a GC after the specified number of elements.
   1818 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
   1819   Heap* heap = isolate->heap();
   1820   int count = 0;
   1821   Handle<Object> object(heap->native_contexts_list(), isolate);
   1822   while (!object->IsUndefined(isolate)) {
   1823     count++;
   1824     if (count == n) heap->CollectAllGarbage();
   1825     object =
   1826         Handle<Object>(Context::cast(*object)->next_context_link(), isolate);
   1827   }
   1828   return count;
   1829 }
   1830 
   1831 
   1832 // Count the number of user functions in the weak list of optimized
   1833 // functions attached to a native context causing a GC after the
   1834 // specified number of elements.
   1835 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
   1836                                              int n) {
   1837   int count = 0;
   1838   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
   1839   Isolate* isolate = icontext->GetIsolate();
   1840   Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
   1841                         isolate);
   1842   while (object->IsJSFunction() &&
   1843          !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
   1844     count++;
   1845     if (count == n) isolate->heap()->CollectAllGarbage();
   1846     object = Handle<Object>(
   1847         Object::cast(JSFunction::cast(*object)->next_function_link()),
   1848         isolate);
   1849   }
   1850   return count;
   1851 }
   1852 
   1853 
   1854 TEST(TestInternalWeakListsTraverseWithGC) {
   1855   FLAG_always_opt = false;
   1856   FLAG_allow_natives_syntax = true;
   1857   v8::V8::Initialize();
   1858 
   1859   static const int kNumTestContexts = 10;
   1860 
   1861   Isolate* isolate = CcTest::i_isolate();
   1862   HandleScope scope(isolate);
   1863   v8::Local<v8::Context> ctx[kNumTestContexts];
   1864   if (!isolate->use_crankshaft()) return;
   1865 
   1866   CHECK_EQ(0, CountNativeContexts());
   1867 
   1868   // Create an number of contexts and check the length of the weak list both
   1869   // with and without GCs while iterating the list.
   1870   for (int i = 0; i < kNumTestContexts; i++) {
   1871     ctx[i] = v8::Context::New(CcTest::isolate());
   1872     CHECK_EQ(i + 1, CountNativeContexts());
   1873     CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
   1874   }
   1875 
   1876   ctx[0]->Enter();
   1877 
   1878   // Compile a number of functions the length of the weak list of optimized
   1879   // functions both with and without GCs while iterating the list.
   1880   CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
   1881   OptimizeEmptyFunction("f1");
   1882   CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
   1883   CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1884   OptimizeEmptyFunction("f2");
   1885   CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
   1886   CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1887   OptimizeEmptyFunction("f3");
   1888   CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
   1889   CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1890   OptimizeEmptyFunction("f4");
   1891   CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
   1892   CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
   1893   OptimizeEmptyFunction("f5");
   1894   CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
   1895   CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
   1896 
   1897   ctx[0]->Exit();
   1898 }
   1899 
   1900 
   1901 TEST(TestSizeOfRegExpCode) {
   1902   if (!FLAG_regexp_optimization) return;
   1903 
   1904   v8::V8::Initialize();
   1905 
   1906   Isolate* isolate = CcTest::i_isolate();
   1907   HandleScope scope(isolate);
   1908 
   1909   LocalContext context;
   1910 
   1911   // Adjust source below and this check to match
   1912   // RegExpImple::kRegExpTooLargeToOptimize.
   1913   CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
   1914 
   1915   // Compile a regexp that is much larger if we are using regexp optimizations.
   1916   CompileRun(
   1917       "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
   1918       "var half_size_reg_exp;"
   1919       "while (reg_exp_source.length < 20 * 1024) {"
   1920       "  half_size_reg_exp = reg_exp_source;"
   1921       "  reg_exp_source = reg_exp_source + reg_exp_source;"
   1922       "}"
   1923       // Flatten string.
   1924       "reg_exp_source.match(/f/);");
   1925 
   1926   // Get initial heap size after several full GCs, which will stabilize
   1927   // the heap size and return with sweeping finished completely.
   1928   CcTest::heap()->CollectAllGarbage();
   1929   CcTest::heap()->CollectAllGarbage();
   1930   CcTest::heap()->CollectAllGarbage();
   1931   CcTest::heap()->CollectAllGarbage();
   1932   CcTest::heap()->CollectAllGarbage();
   1933   MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
   1934   if (collector->sweeping_in_progress()) {
   1935     collector->EnsureSweepingCompleted();
   1936   }
   1937   int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
   1938 
   1939   CompileRun("'foo'.match(reg_exp_source);");
   1940   CcTest::heap()->CollectAllGarbage();
   1941   int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
   1942 
   1943   CompileRun("'foo'.match(half_size_reg_exp);");
   1944   CcTest::heap()->CollectAllGarbage();
   1945   int size_with_optimized_regexp =
   1946       static_cast<int>(CcTest::heap()->SizeOfObjects());
   1947 
   1948   int size_of_regexp_code = size_with_regexp - initial_size;
   1949 
   1950   // On some platforms the debug-code flag causes huge amounts of regexp code
   1951   // to be emitted, breaking this test.
   1952   if (!FLAG_debug_code) {
   1953     CHECK_LE(size_of_regexp_code, 1 * MB);
   1954   }
   1955 
   1956   // Small regexp is half the size, but compiles to more than twice the code
   1957   // due to the optimization steps.
   1958   CHECK_GE(size_with_optimized_regexp,
   1959            size_with_regexp + size_of_regexp_code * 2);
   1960 }
   1961 
   1962 
   1963 HEAP_TEST(TestSizeOfObjects) {
   1964   v8::V8::Initialize();
   1965 
   1966   // Get initial heap size after several full GCs, which will stabilize
   1967   // the heap size and return with sweeping finished completely.
   1968   CcTest::heap()->CollectAllGarbage();
   1969   CcTest::heap()->CollectAllGarbage();
   1970   CcTest::heap()->CollectAllGarbage();
   1971   CcTest::heap()->CollectAllGarbage();
   1972   CcTest::heap()->CollectAllGarbage();
   1973   MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
   1974   if (collector->sweeping_in_progress()) {
   1975     collector->EnsureSweepingCompleted();
   1976   }
   1977   int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
   1978 
   1979   {
   1980     // Allocate objects on several different old-space pages so that
   1981     // concurrent sweeper threads will be busy sweeping the old space on
   1982     // subsequent GC runs.
   1983     AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   1984     int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
   1985     for (int i = 1; i <= 100; i++) {
   1986       CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
   1987       CHECK_EQ(initial_size + i * filler_size,
   1988                static_cast<int>(CcTest::heap()->SizeOfObjects()));
   1989     }
   1990   }
   1991 
   1992   // The heap size should go back to initial size after a full GC, even
   1993   // though sweeping didn't finish yet.
   1994   CcTest::heap()->CollectAllGarbage();
   1995 
   1996   // Normally sweeping would not be complete here, but no guarantees.
   1997 
   1998   CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
   1999 
   2000   // Waiting for sweeper threads should not change heap size.
   2001   if (collector->sweeping_in_progress()) {
   2002     collector->EnsureSweepingCompleted();
   2003   }
   2004   CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
   2005 }
   2006 
   2007 
   2008 TEST(TestAlignmentCalculations) {
   2009   // Maximum fill amounts are consistent.
   2010   int maximum_double_misalignment = kDoubleSize - kPointerSize;
   2011   int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
   2012   int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
   2013   CHECK_EQ(0, max_word_fill);
   2014   int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
   2015   CHECK_EQ(maximum_double_misalignment, max_double_fill);
   2016   int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
   2017   CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
   2018   int max_simd128_unaligned_fill =
   2019       Heap::GetMaximumFillToAlign(kSimd128Unaligned);
   2020   CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
   2021 
   2022   Address base = static_cast<Address>(NULL);
   2023   int fill = 0;
   2024 
   2025   // Word alignment never requires fill.
   2026   fill = Heap::GetFillToAlign(base, kWordAligned);
   2027   CHECK_EQ(0, fill);
   2028   fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
   2029   CHECK_EQ(0, fill);
   2030 
   2031   // No fill is required when address is double aligned.
   2032   fill = Heap::GetFillToAlign(base, kDoubleAligned);
   2033   CHECK_EQ(0, fill);
   2034   // Fill is required if address is not double aligned.
   2035   fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
   2036   CHECK_EQ(maximum_double_misalignment, fill);
   2037   // kDoubleUnaligned has the opposite fill amounts.
   2038   fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
   2039   CHECK_EQ(maximum_double_misalignment, fill);
   2040   fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
   2041   CHECK_EQ(0, fill);
   2042 
   2043   // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
   2044   fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
   2045   CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
   2046   fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
   2047   CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
   2048   fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
   2049   CHECK_EQ(kPointerSize, fill);
   2050   fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
   2051   CHECK_EQ(0, fill);
   2052 }
   2053 
   2054 
   2055 static HeapObject* NewSpaceAllocateAligned(int size,
   2056                                            AllocationAlignment alignment) {
   2057   Heap* heap = CcTest::heap();
   2058   AllocationResult allocation =
   2059       heap->new_space()->AllocateRawAligned(size, alignment);
   2060   HeapObject* obj = NULL;
   2061   allocation.To(&obj);
   2062   heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
   2063   return obj;
   2064 }
   2065 
   2066 
   2067 // Get new space allocation into the desired alignment.
   2068 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
   2069   Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
   2070   int fill = Heap::GetFillToAlign(*top_addr, alignment);
   2071   if (fill) {
   2072     NewSpaceAllocateAligned(fill + offset, kWordAligned);
   2073   }
   2074   return *top_addr;
   2075 }
   2076 
   2077 
   2078 TEST(TestAlignedAllocation) {
   2079   // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
   2080   const intptr_t double_misalignment = kDoubleSize - kPointerSize;
   2081   Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
   2082   Address start;
   2083   HeapObject* obj;
   2084   HeapObject* filler;
   2085   if (double_misalignment) {
   2086     // Allocate a pointer sized object that must be double aligned at an
   2087     // aligned address.
   2088     start = AlignNewSpace(kDoubleAligned, 0);
   2089     obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
   2090     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
   2091     // There is no filler.
   2092     CHECK_EQ(kPointerSize, *top_addr - start);
   2093 
   2094     // Allocate a second pointer sized object that must be double aligned at an
   2095     // unaligned address.
   2096     start = AlignNewSpace(kDoubleAligned, kPointerSize);
   2097     obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
   2098     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
   2099     // There is a filler object before the object.
   2100     filler = HeapObject::FromAddress(start);
   2101     CHECK(obj != filler && filler->IsFiller() &&
   2102           filler->Size() == kPointerSize);
   2103     CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
   2104 
   2105     // Similarly for kDoubleUnaligned.
   2106     start = AlignNewSpace(kDoubleUnaligned, 0);
   2107     obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
   2108     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
   2109     CHECK_EQ(kPointerSize, *top_addr - start);
   2110     start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
   2111     obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
   2112     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
   2113     // There is a filler object before the object.
   2114     filler = HeapObject::FromAddress(start);
   2115     CHECK(obj != filler && filler->IsFiller() &&
   2116           filler->Size() == kPointerSize);
   2117     CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
   2118   }
   2119 
   2120   // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
   2121   // on platform.
   2122   start = AlignNewSpace(kSimd128Unaligned, 0);
   2123   obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2124   CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2125   // There is no filler.
   2126   CHECK_EQ(kPointerSize, *top_addr - start);
   2127   start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
   2128   obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2129   CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2130   // There is a filler object before the object.
   2131   filler = HeapObject::FromAddress(start);
   2132   CHECK(obj != filler && filler->IsFiller() &&
   2133         filler->Size() == kSimd128Size - kPointerSize);
   2134   CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
   2135 
   2136   if (double_misalignment) {
   2137     // Test the 2 other alignments possible on 32 bit platforms.
   2138     start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
   2139     obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2140     CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2141     // There is a filler object before the object.
   2142     filler = HeapObject::FromAddress(start);
   2143     CHECK(obj != filler && filler->IsFiller() &&
   2144           filler->Size() == 2 * kPointerSize);
   2145     CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
   2146     start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
   2147     obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2148     CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2149     // There is a filler object before the object.
   2150     filler = HeapObject::FromAddress(start);
   2151     CHECK(obj != filler && filler->IsFiller() &&
   2152           filler->Size() == kPointerSize);
   2153     CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
   2154   }
   2155 }
   2156 
   2157 
   2158 static HeapObject* OldSpaceAllocateAligned(int size,
   2159                                            AllocationAlignment alignment) {
   2160   Heap* heap = CcTest::heap();
   2161   AllocationResult allocation =
   2162       heap->old_space()->AllocateRawAligned(size, alignment);
   2163   HeapObject* obj = NULL;
   2164   allocation.To(&obj);
   2165   heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
   2166   return obj;
   2167 }
   2168 
   2169 
   2170 // Get old space allocation into the desired alignment.
   2171 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
   2172   Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
   2173   int fill = Heap::GetFillToAlign(*top_addr, alignment);
   2174   int allocation = fill + offset;
   2175   if (allocation) {
   2176     OldSpaceAllocateAligned(allocation, kWordAligned);
   2177   }
   2178   Address top = *top_addr;
   2179   // Now force the remaining allocation onto the free list.
   2180   CcTest::heap()->old_space()->EmptyAllocationInfo();
   2181   return top;
   2182 }
   2183 
   2184 
   2185 // Test the case where allocation must be done from the free list, so filler
   2186 // may precede or follow the object.
   2187 TEST(TestAlignedOverAllocation) {
   2188   // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
   2189   const intptr_t double_misalignment = kDoubleSize - kPointerSize;
   2190   Address start;
   2191   HeapObject* obj;
   2192   HeapObject* filler1;
   2193   HeapObject* filler2;
   2194   if (double_misalignment) {
   2195     start = AlignOldSpace(kDoubleAligned, 0);
   2196     obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
   2197     // The object is aligned, and a filler object is created after.
   2198     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
   2199     filler1 = HeapObject::FromAddress(start + kPointerSize);
   2200     CHECK(obj != filler1 && filler1->IsFiller() &&
   2201           filler1->Size() == kPointerSize);
   2202     // Try the opposite alignment case.
   2203     start = AlignOldSpace(kDoubleAligned, kPointerSize);
   2204     obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
   2205     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
   2206     filler1 = HeapObject::FromAddress(start);
   2207     CHECK(obj != filler1);
   2208     CHECK(filler1->IsFiller());
   2209     CHECK(filler1->Size() == kPointerSize);
   2210     CHECK(obj != filler1 && filler1->IsFiller() &&
   2211           filler1->Size() == kPointerSize);
   2212 
   2213     // Similarly for kDoubleUnaligned.
   2214     start = AlignOldSpace(kDoubleUnaligned, 0);
   2215     obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
   2216     // The object is aligned, and a filler object is created after.
   2217     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
   2218     filler1 = HeapObject::FromAddress(start + kPointerSize);
   2219     CHECK(obj != filler1 && filler1->IsFiller() &&
   2220           filler1->Size() == kPointerSize);
   2221     // Try the opposite alignment case.
   2222     start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
   2223     obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
   2224     CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
   2225     filler1 = HeapObject::FromAddress(start);
   2226     CHECK(obj != filler1 && filler1->IsFiller() &&
   2227           filler1->Size() == kPointerSize);
   2228   }
   2229 
   2230   // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
   2231   // on platform.
   2232   start = AlignOldSpace(kSimd128Unaligned, 0);
   2233   obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2234   CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2235   // There is a filler object after the object.
   2236   filler1 = HeapObject::FromAddress(start + kPointerSize);
   2237   CHECK(obj != filler1 && filler1->IsFiller() &&
   2238         filler1->Size() == kSimd128Size - kPointerSize);
   2239   start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
   2240   obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2241   CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2242   // There is a filler object before the object.
   2243   filler1 = HeapObject::FromAddress(start);
   2244   CHECK(obj != filler1 && filler1->IsFiller() &&
   2245         filler1->Size() == kSimd128Size - kPointerSize);
   2246 
   2247   if (double_misalignment) {
   2248     // Test the 2 other alignments possible on 32 bit platforms.
   2249     start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
   2250     obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2251     CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2252     // There are filler objects before and after the object.
   2253     filler1 = HeapObject::FromAddress(start);
   2254     CHECK(obj != filler1 && filler1->IsFiller() &&
   2255           filler1->Size() == 2 * kPointerSize);
   2256     filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
   2257     CHECK(obj != filler2 && filler2->IsFiller() &&
   2258           filler2->Size() == kPointerSize);
   2259     start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
   2260     obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
   2261     CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
   2262     // There are filler objects before and after the object.
   2263     filler1 = HeapObject::FromAddress(start);
   2264     CHECK(obj != filler1 && filler1->IsFiller() &&
   2265           filler1->Size() == kPointerSize);
   2266     filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
   2267     CHECK(obj != filler2 && filler2->IsFiller() &&
   2268           filler2->Size() == 2 * kPointerSize);
   2269   }
   2270 }
   2271 
   2272 
   2273 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
   2274   CcTest::InitializeVM();
   2275   HeapIterator iterator(CcTest::heap());
   2276   intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
   2277   intptr_t size_of_objects_2 = 0;
   2278   for (HeapObject* obj = iterator.next();
   2279        obj != NULL;
   2280        obj = iterator.next()) {
   2281     if (!obj->IsFreeSpace()) {
   2282       size_of_objects_2 += obj->Size();
   2283     }
   2284   }
   2285   // Delta must be within 5% of the larger result.
   2286   // TODO(gc): Tighten this up by distinguishing between byte
   2287   // arrays that are real and those that merely mark free space
   2288   // on the heap.
   2289   if (size_of_objects_1 > size_of_objects_2) {
   2290     intptr_t delta = size_of_objects_1 - size_of_objects_2;
   2291     PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
   2292            ", "
   2293            "Iterator: %" V8PRIdPTR
   2294            ", "
   2295            "delta: %" V8PRIdPTR "\n",
   2296            size_of_objects_1, size_of_objects_2, delta);
   2297     CHECK_GT(size_of_objects_1 / 20, delta);
   2298   } else {
   2299     intptr_t delta = size_of_objects_2 - size_of_objects_1;
   2300     PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
   2301            ", "
   2302            "Iterator: %" V8PRIdPTR
   2303            ", "
   2304            "delta: %" V8PRIdPTR "\n",
   2305            size_of_objects_1, size_of_objects_2, delta);
   2306     CHECK_GT(size_of_objects_2 / 20, delta);
   2307   }
   2308 }
   2309 
   2310 
   2311 static void FillUpNewSpace(NewSpace* new_space) {
   2312   // Fill up new space to the point that it is completely full. Make sure
   2313   // that the scavenger does not undo the filling.
   2314   Heap* heap = new_space->heap();
   2315   Isolate* isolate = heap->isolate();
   2316   Factory* factory = isolate->factory();
   2317   HandleScope scope(isolate);
   2318   AlwaysAllocateScope always_allocate(isolate);
   2319   intptr_t available = new_space->Capacity() - new_space->Size();
   2320   intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
   2321   for (intptr_t i = 0; i < number_of_fillers; i++) {
   2322     CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
   2323   }
   2324 }
   2325 
   2326 
   2327 TEST(GrowAndShrinkNewSpace) {
   2328   CcTest::InitializeVM();
   2329   Heap* heap = CcTest::heap();
   2330   NewSpace* new_space = heap->new_space();
   2331 
   2332   if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
   2333     return;
   2334   }
   2335 
   2336   // Explicitly growing should double the space capacity.
   2337   intptr_t old_capacity, new_capacity;
   2338   old_capacity = new_space->TotalCapacity();
   2339   new_space->Grow();
   2340   new_capacity = new_space->TotalCapacity();
   2341   CHECK(2 * old_capacity == new_capacity);
   2342 
   2343   old_capacity = new_space->TotalCapacity();
   2344   FillUpNewSpace(new_space);
   2345   new_capacity = new_space->TotalCapacity();
   2346   CHECK(old_capacity == new_capacity);
   2347 
   2348   // Explicitly shrinking should not affect space capacity.
   2349   old_capacity = new_space->TotalCapacity();
   2350   new_space->Shrink();
   2351   new_capacity = new_space->TotalCapacity();
   2352   CHECK(old_capacity == new_capacity);
   2353 
   2354   // Let the scavenger empty the new space.
   2355   heap->CollectGarbage(NEW_SPACE);
   2356   CHECK_LE(new_space->Size(), old_capacity);
   2357 
   2358   // Explicitly shrinking should halve the space capacity.
   2359   old_capacity = new_space->TotalCapacity();
   2360   new_space->Shrink();
   2361   new_capacity = new_space->TotalCapacity();
   2362   CHECK(old_capacity == 2 * new_capacity);
   2363 
   2364   // Consecutive shrinking should not affect space capacity.
   2365   old_capacity = new_space->TotalCapacity();
   2366   new_space->Shrink();
   2367   new_space->Shrink();
   2368   new_space->Shrink();
   2369   new_capacity = new_space->TotalCapacity();
   2370   CHECK(old_capacity == new_capacity);
   2371 }
   2372 
   2373 
   2374 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
   2375   CcTest::InitializeVM();
   2376   Heap* heap = CcTest::heap();
   2377   if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
   2378     return;
   2379   }
   2380 
   2381   v8::HandleScope scope(CcTest::isolate());
   2382   NewSpace* new_space = heap->new_space();
   2383   intptr_t old_capacity, new_capacity;
   2384   old_capacity = new_space->TotalCapacity();
   2385   new_space->Grow();
   2386   new_capacity = new_space->TotalCapacity();
   2387   CHECK(2 * old_capacity == new_capacity);
   2388   FillUpNewSpace(new_space);
   2389   heap->CollectAllAvailableGarbage();
   2390   new_capacity = new_space->TotalCapacity();
   2391   CHECK(old_capacity == new_capacity);
   2392 }
   2393 
   2394 
   2395 static int NumberOfGlobalObjects() {
   2396   int count = 0;
   2397   HeapIterator iterator(CcTest::heap());
   2398   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
   2399     if (obj->IsJSGlobalObject()) count++;
   2400   }
   2401   return count;
   2402 }
   2403 
   2404 
   2405 // Test that we don't embed maps from foreign contexts into
   2406 // optimized code.
   2407 TEST(LeakNativeContextViaMap) {
   2408   i::FLAG_allow_natives_syntax = true;
   2409   v8::Isolate* isolate = CcTest::isolate();
   2410   v8::HandleScope outer_scope(isolate);
   2411   v8::Persistent<v8::Context> ctx1p;
   2412   v8::Persistent<v8::Context> ctx2p;
   2413   {
   2414     v8::HandleScope scope(isolate);
   2415     ctx1p.Reset(isolate, v8::Context::New(isolate));
   2416     ctx2p.Reset(isolate, v8::Context::New(isolate));
   2417     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   2418   }
   2419 
   2420   CcTest::heap()->CollectAllAvailableGarbage();
   2421   CHECK_EQ(2, NumberOfGlobalObjects());
   2422 
   2423   {
   2424     v8::HandleScope inner_scope(isolate);
   2425     CompileRun("var v = {x: 42}");
   2426     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   2427     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   2428     v8::Local<v8::Value> v =
   2429         ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
   2430     ctx2->Enter();
   2431     CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
   2432     v8::Local<v8::Value> res = CompileRun(
   2433         "function f() { return o.x; }"
   2434         "for (var i = 0; i < 10; ++i) f();"
   2435         "%OptimizeFunctionOnNextCall(f);"
   2436         "f();");
   2437     CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
   2438     CHECK(ctx2->Global()
   2439               ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
   2440               .FromJust());
   2441     ctx2->Exit();
   2442     v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
   2443     ctx1p.Reset();
   2444     isolate->ContextDisposedNotification();
   2445   }
   2446   CcTest::heap()->CollectAllAvailableGarbage();
   2447   CHECK_EQ(1, NumberOfGlobalObjects());
   2448   ctx2p.Reset();
   2449   CcTest::heap()->CollectAllAvailableGarbage();
   2450   CHECK_EQ(0, NumberOfGlobalObjects());
   2451 }
   2452 
   2453 
   2454 // Test that we don't embed functions from foreign contexts into
   2455 // optimized code.
   2456 TEST(LeakNativeContextViaFunction) {
   2457   i::FLAG_allow_natives_syntax = true;
   2458   v8::Isolate* isolate = CcTest::isolate();
   2459   v8::HandleScope outer_scope(isolate);
   2460   v8::Persistent<v8::Context> ctx1p;
   2461   v8::Persistent<v8::Context> ctx2p;
   2462   {
   2463     v8::HandleScope scope(isolate);
   2464     ctx1p.Reset(isolate, v8::Context::New(isolate));
   2465     ctx2p.Reset(isolate, v8::Context::New(isolate));
   2466     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   2467   }
   2468 
   2469   CcTest::heap()->CollectAllAvailableGarbage();
   2470   CHECK_EQ(2, NumberOfGlobalObjects());
   2471 
   2472   {
   2473     v8::HandleScope inner_scope(isolate);
   2474     CompileRun("var v = function() { return 42; }");
   2475     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   2476     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   2477     v8::Local<v8::Value> v =
   2478         ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
   2479     ctx2->Enter();
   2480     CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
   2481     v8::Local<v8::Value> res = CompileRun(
   2482         "function f(x) { return x(); }"
   2483         "for (var i = 0; i < 10; ++i) f(o);"
   2484         "%OptimizeFunctionOnNextCall(f);"
   2485         "f(o);");
   2486     CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
   2487     CHECK(ctx2->Global()
   2488               ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
   2489               .FromJust());
   2490     ctx2->Exit();
   2491     ctx1->Exit();
   2492     ctx1p.Reset();
   2493     isolate->ContextDisposedNotification();
   2494   }
   2495   CcTest::heap()->CollectAllAvailableGarbage();
   2496   CHECK_EQ(1, NumberOfGlobalObjects());
   2497   ctx2p.Reset();
   2498   CcTest::heap()->CollectAllAvailableGarbage();
   2499   CHECK_EQ(0, NumberOfGlobalObjects());
   2500 }
   2501 
   2502 
   2503 TEST(LeakNativeContextViaMapKeyed) {
   2504   i::FLAG_allow_natives_syntax = true;
   2505   v8::Isolate* isolate = CcTest::isolate();
   2506   v8::HandleScope outer_scope(isolate);
   2507   v8::Persistent<v8::Context> ctx1p;
   2508   v8::Persistent<v8::Context> ctx2p;
   2509   {
   2510     v8::HandleScope scope(isolate);
   2511     ctx1p.Reset(isolate, v8::Context::New(isolate));
   2512     ctx2p.Reset(isolate, v8::Context::New(isolate));
   2513     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   2514   }
   2515 
   2516   CcTest::heap()->CollectAllAvailableGarbage();
   2517   CHECK_EQ(2, NumberOfGlobalObjects());
   2518 
   2519   {
   2520     v8::HandleScope inner_scope(isolate);
   2521     CompileRun("var v = [42, 43]");
   2522     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   2523     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   2524     v8::Local<v8::Value> v =
   2525         ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
   2526     ctx2->Enter();
   2527     CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
   2528     v8::Local<v8::Value> res = CompileRun(
   2529         "function f() { return o[0]; }"
   2530         "for (var i = 0; i < 10; ++i) f();"
   2531         "%OptimizeFunctionOnNextCall(f);"
   2532         "f();");
   2533     CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
   2534     CHECK(ctx2->Global()
   2535               ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
   2536               .FromJust());
   2537     ctx2->Exit();
   2538     ctx1->Exit();
   2539     ctx1p.Reset();
   2540     isolate->ContextDisposedNotification();
   2541   }
   2542   CcTest::heap()->CollectAllAvailableGarbage();
   2543   CHECK_EQ(1, NumberOfGlobalObjects());
   2544   ctx2p.Reset();
   2545   CcTest::heap()->CollectAllAvailableGarbage();
   2546   CHECK_EQ(0, NumberOfGlobalObjects());
   2547 }
   2548 
   2549 
   2550 TEST(LeakNativeContextViaMapProto) {
   2551   i::FLAG_allow_natives_syntax = true;
   2552   v8::Isolate* isolate = CcTest::isolate();
   2553   v8::HandleScope outer_scope(isolate);
   2554   v8::Persistent<v8::Context> ctx1p;
   2555   v8::Persistent<v8::Context> ctx2p;
   2556   {
   2557     v8::HandleScope scope(isolate);
   2558     ctx1p.Reset(isolate, v8::Context::New(isolate));
   2559     ctx2p.Reset(isolate, v8::Context::New(isolate));
   2560     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   2561   }
   2562 
   2563   CcTest::heap()->CollectAllAvailableGarbage();
   2564   CHECK_EQ(2, NumberOfGlobalObjects());
   2565 
   2566   {
   2567     v8::HandleScope inner_scope(isolate);
   2568     CompileRun("var v = { y: 42}");
   2569     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   2570     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   2571     v8::Local<v8::Value> v =
   2572         ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
   2573     ctx2->Enter();
   2574     CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
   2575     v8::Local<v8::Value> res = CompileRun(
   2576         "function f() {"
   2577         "  var p = {x: 42};"
   2578         "  p.__proto__ = o;"
   2579         "  return p.x;"
   2580         "}"
   2581         "for (var i = 0; i < 10; ++i) f();"
   2582         "%OptimizeFunctionOnNextCall(f);"
   2583         "f();");
   2584     CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
   2585     CHECK(ctx2->Global()
   2586               ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
   2587               .FromJust());
   2588     ctx2->Exit();
   2589     ctx1->Exit();
   2590     ctx1p.Reset();
   2591     isolate->ContextDisposedNotification();
   2592   }
   2593   CcTest::heap()->CollectAllAvailableGarbage();
   2594   CHECK_EQ(1, NumberOfGlobalObjects());
   2595   ctx2p.Reset();
   2596   CcTest::heap()->CollectAllAvailableGarbage();
   2597   CHECK_EQ(0, NumberOfGlobalObjects());
   2598 }
   2599 
   2600 
   2601 TEST(InstanceOfStubWriteBarrier) {
   2602   i::FLAG_allow_natives_syntax = true;
   2603 #ifdef VERIFY_HEAP
   2604   i::FLAG_verify_heap = true;
   2605 #endif
   2606 
   2607   CcTest::InitializeVM();
   2608   if (!CcTest::i_isolate()->use_crankshaft()) return;
   2609   if (i::FLAG_force_marking_deque_overflows) return;
   2610   v8::HandleScope outer_scope(CcTest::isolate());
   2611   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   2612 
   2613   {
   2614     v8::HandleScope scope(CcTest::isolate());
   2615     CompileRun(
   2616         "function foo () { }"
   2617         "function mkbar () { return new (new Function(\"\")) (); }"
   2618         "function f (x) { return (x instanceof foo); }"
   2619         "function g () { f(mkbar()); }"
   2620         "f(new foo()); f(new foo());"
   2621         "%OptimizeFunctionOnNextCall(f);"
   2622         "f(new foo()); g();");
   2623   }
   2624 
   2625   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
   2626   marking->Stop();
   2627   CcTest::heap()->StartIncrementalMarking();
   2628 
   2629   i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
   2630       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   2631           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   2632 
   2633   CHECK(f->IsOptimized());
   2634 
   2635   while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
   2636          !marking->IsStopped()) {
   2637     // Discard any pending GC requests otherwise we will get GC when we enter
   2638     // code below.
   2639     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
   2640   }
   2641 
   2642   CHECK(marking->IsMarking());
   2643 
   2644   {
   2645     v8::HandleScope scope(CcTest::isolate());
   2646     v8::Local<v8::Object> global = CcTest::global();
   2647     v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
   2648         global->Get(ctx, v8_str("g")).ToLocalChecked());
   2649     g->Call(ctx, global, 0, nullptr).ToLocalChecked();
   2650   }
   2651 
   2652   CcTest::heap()->incremental_marking()->set_should_hurry(true);
   2653   CcTest::heap()->CollectGarbage(OLD_SPACE);
   2654 }
   2655 
   2656 namespace {
   2657 
   2658 int GetProfilerTicks(SharedFunctionInfo* shared) {
   2659   return FLAG_ignition ? shared->profiler_ticks()
   2660                        : shared->code()->profiler_ticks();
   2661 }
   2662 
   2663 }  // namespace
   2664 
   2665 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
   2666   i::FLAG_stress_compaction = false;
   2667   i::FLAG_allow_natives_syntax = true;
   2668 #ifdef VERIFY_HEAP
   2669   i::FLAG_verify_heap = true;
   2670 #endif
   2671 
   2672   CcTest::InitializeVM();
   2673   if (!CcTest::i_isolate()->use_crankshaft()) return;
   2674   v8::HandleScope outer_scope(CcTest::isolate());
   2675   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   2676 
   2677   {
   2678     v8::HandleScope scope(CcTest::isolate());
   2679     CompileRun(
   2680         "function f () {"
   2681         "  var s = 0;"
   2682         "  for (var i = 0; i < 100; i++)  s += i;"
   2683         "  return s;"
   2684         "}"
   2685         "f(); f();"
   2686         "%OptimizeFunctionOnNextCall(f);"
   2687         "f();");
   2688   }
   2689   i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
   2690       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   2691           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   2692   CHECK(f->IsOptimized());
   2693 
   2694   // Make sure incremental marking it not running.
   2695   CcTest::heap()->incremental_marking()->Stop();
   2696 
   2697   CcTest::heap()->StartIncrementalMarking();
   2698   // The following calls will increment CcTest::heap()->global_ic_age().
   2699   CcTest::isolate()->ContextDisposedNotification();
   2700   heap::SimulateIncrementalMarking(CcTest::heap());
   2701   CcTest::heap()->CollectAllGarbage();
   2702 
   2703   CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
   2704   CHECK_EQ(0, f->shared()->opt_count());
   2705   CHECK_EQ(0, GetProfilerTicks(f->shared()));
   2706 }
   2707 
   2708 
   2709 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
   2710   i::FLAG_stress_compaction = false;
   2711   i::FLAG_allow_natives_syntax = true;
   2712 #ifdef VERIFY_HEAP
   2713   i::FLAG_verify_heap = true;
   2714 #endif
   2715 
   2716   CcTest::InitializeVM();
   2717   if (!CcTest::i_isolate()->use_crankshaft()) return;
   2718   v8::HandleScope outer_scope(CcTest::isolate());
   2719   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   2720 
   2721   {
   2722     v8::HandleScope scope(CcTest::isolate());
   2723     CompileRun(
   2724         "function f () {"
   2725         "  var s = 0;"
   2726         "  for (var i = 0; i < 100; i++)  s += i;"
   2727         "  return s;"
   2728         "}"
   2729         "f(); f();"
   2730         "%OptimizeFunctionOnNextCall(f);"
   2731         "f();");
   2732   }
   2733   i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
   2734       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   2735           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   2736   CHECK(f->IsOptimized());
   2737 
   2738   // Make sure incremental marking it not running.
   2739   CcTest::heap()->incremental_marking()->Stop();
   2740 
   2741   // The following two calls will increment CcTest::heap()->global_ic_age().
   2742   CcTest::isolate()->ContextDisposedNotification();
   2743   CcTest::heap()->CollectAllGarbage();
   2744 
   2745   CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
   2746   CHECK_EQ(0, f->shared()->opt_count());
   2747   CHECK_EQ(0, GetProfilerTicks(f->shared()));
   2748 }
   2749 
   2750 
   2751 HEAP_TEST(GCFlags) {
   2752   CcTest::InitializeVM();
   2753   Heap* heap = CcTest::heap();
   2754 
   2755   heap->set_current_gc_flags(Heap::kNoGCFlags);
   2756   CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
   2757 
   2758   // Set the flags to check whether we appropriately resets them after the GC.
   2759   heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
   2760   heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
   2761   CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
   2762 
   2763   MarkCompactCollector* collector = heap->mark_compact_collector();
   2764   if (collector->sweeping_in_progress()) {
   2765     collector->EnsureSweepingCompleted();
   2766   }
   2767 
   2768   IncrementalMarking* marking = heap->incremental_marking();
   2769   marking->Stop();
   2770   heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
   2771   CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
   2772 
   2773   heap->CollectGarbage(NEW_SPACE);
   2774   // NewSpace scavenges should not overwrite the flags.
   2775   CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
   2776 
   2777   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   2778   CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
   2779 }
   2780 
   2781 
   2782 TEST(IdleNotificationFinishMarking) {
   2783   i::FLAG_allow_natives_syntax = true;
   2784   CcTest::InitializeVM();
   2785   const int initial_gc_count = CcTest::heap()->gc_count();
   2786   heap::SimulateFullSpace(CcTest::heap()->old_space());
   2787   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
   2788   marking->Stop();
   2789   CcTest::heap()->StartIncrementalMarking();
   2790 
   2791   CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
   2792 
   2793   // TODO(hpayer): We cannot write proper unit test right now for heap.
   2794   // The ideal test would call kMaxIdleMarkingDelayCounter to test the
   2795   // marking delay counter.
   2796 
   2797   // Perform a huge incremental marking step but don't complete marking.
   2798   intptr_t bytes_processed = 0;
   2799   do {
   2800     bytes_processed =
   2801         marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
   2802                       IncrementalMarking::FORCE_MARKING,
   2803                       IncrementalMarking::DO_NOT_FORCE_COMPLETION);
   2804     CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
   2805   } while (bytes_processed);
   2806 
   2807   // The next invocations of incremental marking are not going to complete
   2808   // marking
   2809   // since the completion threshold is not reached
   2810   for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
   2811        i++) {
   2812     marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
   2813                   IncrementalMarking::FORCE_MARKING,
   2814                   IncrementalMarking::DO_NOT_FORCE_COMPLETION);
   2815     CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
   2816   }
   2817 
   2818   marking->SetWeakClosureWasOverApproximatedForTesting(true);
   2819 
   2820   // The next idle notification has to finish incremental marking.
   2821   const double kLongIdleTime = 1000.0;
   2822   CcTest::isolate()->IdleNotificationDeadline(
   2823       (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
   2824        static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
   2825       kLongIdleTime);
   2826   CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
   2827 }
   2828 
   2829 
   2830 // Test that HAllocateObject will always return an object in new-space.
   2831 TEST(OptimizedAllocationAlwaysInNewSpace) {
   2832   i::FLAG_allow_natives_syntax = true;
   2833   CcTest::InitializeVM();
   2834   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   2835   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2836   v8::HandleScope scope(CcTest::isolate());
   2837   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   2838   heap::SimulateFullSpace(CcTest::heap()->new_space());
   2839   AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   2840   v8::Local<v8::Value> res = CompileRun(
   2841       "function c(x) {"
   2842       "  this.x = x;"
   2843       "  for (var i = 0; i < 32; i++) {"
   2844       "    this['x' + i] = x;"
   2845       "  }"
   2846       "}"
   2847       "function f(x) { return new c(x); };"
   2848       "f(1); f(2); f(3);"
   2849       "%OptimizeFunctionOnNextCall(f);"
   2850       "f(4);");
   2851 
   2852   CHECK_EQ(4, res.As<v8::Object>()
   2853                   ->GetRealNamedProperty(ctx, v8_str("x"))
   2854                   .ToLocalChecked()
   2855                   ->Int32Value(ctx)
   2856                   .FromJust());
   2857 
   2858   i::Handle<JSReceiver> o =
   2859       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
   2860 
   2861   CHECK(CcTest::heap()->InNewSpace(*o));
   2862 }
   2863 
   2864 
   2865 TEST(OptimizedPretenuringAllocationFolding) {
   2866   i::FLAG_allow_natives_syntax = true;
   2867   i::FLAG_expose_gc = true;
   2868   CcTest::InitializeVM();
   2869   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   2870   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2871   v8::HandleScope scope(CcTest::isolate());
   2872   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   2873   // Grow new space unitl maximum capacity reached.
   2874   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   2875     CcTest::heap()->new_space()->Grow();
   2876   }
   2877 
   2878   i::ScopedVector<char> source(1024);
   2879   i::SNPrintF(
   2880       source,
   2881       "var number_elements = %d;"
   2882       "var elements = new Array();"
   2883       "function f() {"
   2884       "  for (var i = 0; i < number_elements; i++) {"
   2885       "    elements[i] = [[{}], [1.1]];"
   2886       "  }"
   2887       "  return elements[number_elements-1]"
   2888       "};"
   2889       "f(); gc();"
   2890       "f(); f();"
   2891       "%%OptimizeFunctionOnNextCall(f);"
   2892       "f();",
   2893       AllocationSite::kPretenureMinimumCreated);
   2894 
   2895   v8::Local<v8::Value> res = CompileRun(source.start());
   2896 
   2897   v8::Local<v8::Value> int_array =
   2898       v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
   2899   i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
   2900       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
   2901   v8::Local<v8::Value> double_array =
   2902       v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
   2903   i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
   2904       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
   2905 
   2906   i::Handle<JSReceiver> o =
   2907       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
   2908   CHECK(CcTest::heap()->InOldSpace(*o));
   2909   CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
   2910   CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
   2911   CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
   2912   CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
   2913 }
   2914 
   2915 
   2916 TEST(OptimizedPretenuringObjectArrayLiterals) {
   2917   i::FLAG_allow_natives_syntax = true;
   2918   i::FLAG_expose_gc = true;
   2919   CcTest::InitializeVM();
   2920   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   2921   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2922   v8::HandleScope scope(CcTest::isolate());
   2923 
   2924   // Grow new space unitl maximum capacity reached.
   2925   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   2926     CcTest::heap()->new_space()->Grow();
   2927   }
   2928 
   2929   i::ScopedVector<char> source(1024);
   2930   i::SNPrintF(
   2931       source,
   2932       "var number_elements = %d;"
   2933       "var elements = new Array(number_elements);"
   2934       "function f() {"
   2935       "  for (var i = 0; i < number_elements; i++) {"
   2936       "    elements[i] = [{}, {}, {}];"
   2937       "  }"
   2938       "  return elements[number_elements - 1];"
   2939       "};"
   2940       "f(); gc();"
   2941       "f(); f();"
   2942       "%%OptimizeFunctionOnNextCall(f);"
   2943       "f();",
   2944       AllocationSite::kPretenureMinimumCreated);
   2945 
   2946   v8::Local<v8::Value> res = CompileRun(source.start());
   2947 
   2948   i::Handle<JSObject> o = Handle<JSObject>::cast(
   2949       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   2950 
   2951   CHECK(CcTest::heap()->InOldSpace(o->elements()));
   2952   CHECK(CcTest::heap()->InOldSpace(*o));
   2953 }
   2954 
   2955 
   2956 TEST(OptimizedPretenuringMixedInObjectProperties) {
   2957   i::FLAG_allow_natives_syntax = true;
   2958   i::FLAG_expose_gc = true;
   2959   CcTest::InitializeVM();
   2960   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   2961   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2962   v8::HandleScope scope(CcTest::isolate());
   2963 
   2964   // Grow new space unitl maximum capacity reached.
   2965   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   2966     CcTest::heap()->new_space()->Grow();
   2967   }
   2968 
   2969 
   2970   i::ScopedVector<char> source(1024);
   2971   i::SNPrintF(
   2972       source,
   2973       "var number_elements = %d;"
   2974       "var elements = new Array(number_elements);"
   2975       "function f() {"
   2976       "  for (var i = 0; i < number_elements; i++) {"
   2977       "    elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
   2978       "  }"
   2979       "  return elements[number_elements - 1];"
   2980       "};"
   2981       "f(); gc();"
   2982       "f(); f();"
   2983       "%%OptimizeFunctionOnNextCall(f);"
   2984       "f();",
   2985       AllocationSite::kPretenureMinimumCreated);
   2986 
   2987   v8::Local<v8::Value> res = CompileRun(source.start());
   2988 
   2989   i::Handle<JSObject> o = Handle<JSObject>::cast(
   2990       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   2991 
   2992   CHECK(CcTest::heap()->InOldSpace(*o));
   2993   FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
   2994   FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
   2995   CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
   2996   if (!o->IsUnboxedDoubleField(idx2)) {
   2997     CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
   2998   } else {
   2999     CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
   3000   }
   3001 
   3002   JSObject* inner_object =
   3003       reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
   3004   CHECK(CcTest::heap()->InOldSpace(inner_object));
   3005   if (!inner_object->IsUnboxedDoubleField(idx1)) {
   3006     CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
   3007   } else {
   3008     CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
   3009   }
   3010   CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
   3011 }
   3012 
   3013 
   3014 TEST(OptimizedPretenuringDoubleArrayProperties) {
   3015   i::FLAG_allow_natives_syntax = true;
   3016   i::FLAG_expose_gc = true;
   3017   CcTest::InitializeVM();
   3018   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3019   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3020   v8::HandleScope scope(CcTest::isolate());
   3021 
   3022   // Grow new space unitl maximum capacity reached.
   3023   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   3024     CcTest::heap()->new_space()->Grow();
   3025   }
   3026 
   3027   i::ScopedVector<char> source(1024);
   3028   i::SNPrintF(
   3029       source,
   3030       "var number_elements = %d;"
   3031       "var elements = new Array(number_elements);"
   3032       "function f() {"
   3033       "  for (var i = 0; i < number_elements; i++) {"
   3034       "    elements[i] = {a: 1.1, b: 2.2};"
   3035       "  }"
   3036       "  return elements[i - 1];"
   3037       "};"
   3038       "f(); gc();"
   3039       "f(); f();"
   3040       "%%OptimizeFunctionOnNextCall(f);"
   3041       "f();",
   3042       AllocationSite::kPretenureMinimumCreated);
   3043 
   3044   v8::Local<v8::Value> res = CompileRun(source.start());
   3045 
   3046   i::Handle<JSObject> o = Handle<JSObject>::cast(
   3047       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3048 
   3049   CHECK(CcTest::heap()->InOldSpace(*o));
   3050   CHECK(CcTest::heap()->InOldSpace(o->properties()));
   3051 }
   3052 
   3053 
   3054 TEST(OptimizedPretenuringdoubleArrayLiterals) {
   3055   i::FLAG_allow_natives_syntax = true;
   3056   i::FLAG_expose_gc = true;
   3057   CcTest::InitializeVM();
   3058   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3059   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3060   v8::HandleScope scope(CcTest::isolate());
   3061 
   3062   // Grow new space unitl maximum capacity reached.
   3063   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   3064     CcTest::heap()->new_space()->Grow();
   3065   }
   3066 
   3067   i::ScopedVector<char> source(1024);
   3068   i::SNPrintF(
   3069       source,
   3070       "var number_elements = %d;"
   3071       "var elements = new Array(number_elements);"
   3072       "function f() {"
   3073       "  for (var i = 0; i < number_elements; i++) {"
   3074       "    elements[i] = [1.1, 2.2, 3.3];"
   3075       "  }"
   3076       "  return elements[number_elements - 1];"
   3077       "};"
   3078       "f(); gc();"
   3079       "f(); f();"
   3080       "%%OptimizeFunctionOnNextCall(f);"
   3081       "f();",
   3082       AllocationSite::kPretenureMinimumCreated);
   3083 
   3084   v8::Local<v8::Value> res = CompileRun(source.start());
   3085 
   3086   i::Handle<JSObject> o = Handle<JSObject>::cast(
   3087       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3088 
   3089   CHECK(CcTest::heap()->InOldSpace(o->elements()));
   3090   CHECK(CcTest::heap()->InOldSpace(*o));
   3091 }
   3092 
   3093 
   3094 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
   3095   i::FLAG_allow_natives_syntax = true;
   3096   i::FLAG_expose_gc = true;
   3097   CcTest::InitializeVM();
   3098   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3099   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3100   v8::HandleScope scope(CcTest::isolate());
   3101   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3102   // Grow new space unitl maximum capacity reached.
   3103   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   3104     CcTest::heap()->new_space()->Grow();
   3105   }
   3106 
   3107   i::ScopedVector<char> source(1024);
   3108   i::SNPrintF(
   3109       source,
   3110       "var number_elements = 100;"
   3111       "var elements = new Array(number_elements);"
   3112       "function f() {"
   3113       "  for (var i = 0; i < number_elements; i++) {"
   3114       "    elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
   3115       "  }"
   3116       "  return elements[number_elements - 1];"
   3117       "};"
   3118       "f(); gc();"
   3119       "f(); f();"
   3120       "%%OptimizeFunctionOnNextCall(f);"
   3121       "f();");
   3122 
   3123   v8::Local<v8::Value> res = CompileRun(source.start());
   3124 
   3125   v8::Local<v8::Value> int_array =
   3126       v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
   3127   i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
   3128       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
   3129   v8::Local<v8::Value> double_array =
   3130       v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
   3131   i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
   3132       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
   3133 
   3134   Handle<JSObject> o = Handle<JSObject>::cast(
   3135       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3136   CHECK(CcTest::heap()->InOldSpace(*o));
   3137   CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
   3138   CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
   3139   CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
   3140   CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
   3141 }
   3142 
   3143 
   3144 TEST(OptimizedPretenuringNestedObjectLiterals) {
   3145   i::FLAG_allow_natives_syntax = true;
   3146   i::FLAG_expose_gc = true;
   3147   CcTest::InitializeVM();
   3148   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3149   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3150   v8::HandleScope scope(CcTest::isolate());
   3151   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3152   // Grow new space unitl maximum capacity reached.
   3153   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   3154     CcTest::heap()->new_space()->Grow();
   3155   }
   3156 
   3157   i::ScopedVector<char> source(1024);
   3158   i::SNPrintF(
   3159       source,
   3160       "var number_elements = %d;"
   3161       "var elements = new Array(number_elements);"
   3162       "function f() {"
   3163       "  for (var i = 0; i < number_elements; i++) {"
   3164       "    elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
   3165       "  }"
   3166       "  return elements[number_elements - 1];"
   3167       "};"
   3168       "f(); gc();"
   3169       "f(); f();"
   3170       "%%OptimizeFunctionOnNextCall(f);"
   3171       "f();",
   3172       AllocationSite::kPretenureMinimumCreated);
   3173 
   3174   v8::Local<v8::Value> res = CompileRun(source.start());
   3175 
   3176   v8::Local<v8::Value> int_array_1 =
   3177       v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
   3178   Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
   3179       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
   3180   v8::Local<v8::Value> int_array_2 =
   3181       v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
   3182   Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
   3183       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
   3184 
   3185   Handle<JSObject> o = Handle<JSObject>::cast(
   3186       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3187   CHECK(CcTest::heap()->InOldSpace(*o));
   3188   CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
   3189   CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
   3190   CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
   3191   CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
   3192 }
   3193 
   3194 
   3195 TEST(OptimizedPretenuringNestedDoubleLiterals) {
   3196   i::FLAG_allow_natives_syntax = true;
   3197   i::FLAG_expose_gc = true;
   3198   CcTest::InitializeVM();
   3199   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3200   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3201   v8::HandleScope scope(CcTest::isolate());
   3202   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3203   // Grow new space unitl maximum capacity reached.
   3204   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
   3205     CcTest::heap()->new_space()->Grow();
   3206   }
   3207 
   3208   i::ScopedVector<char> source(1024);
   3209   i::SNPrintF(
   3210       source,
   3211       "var number_elements = %d;"
   3212       "var elements = new Array(number_elements);"
   3213       "function f() {"
   3214       "  for (var i = 0; i < number_elements; i++) {"
   3215       "    elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
   3216       "  }"
   3217       "  return elements[number_elements - 1];"
   3218       "};"
   3219       "f(); gc();"
   3220       "f(); f();"
   3221       "%%OptimizeFunctionOnNextCall(f);"
   3222       "f();",
   3223       AllocationSite::kPretenureMinimumCreated);
   3224 
   3225   v8::Local<v8::Value> res = CompileRun(source.start());
   3226 
   3227   v8::Local<v8::Value> double_array_1 =
   3228       v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
   3229   i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
   3230       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
   3231   v8::Local<v8::Value> double_array_2 =
   3232       v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
   3233   i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
   3234       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
   3235 
   3236   i::Handle<JSObject> o = Handle<JSObject>::cast(
   3237       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3238   CHECK(CcTest::heap()->InOldSpace(*o));
   3239   CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
   3240   CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
   3241   CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
   3242   CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
   3243 }
   3244 
   3245 
   3246 // Test regular array literals allocation.
   3247 TEST(OptimizedAllocationArrayLiterals) {
   3248   i::FLAG_allow_natives_syntax = true;
   3249   CcTest::InitializeVM();
   3250   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   3251   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   3252   v8::HandleScope scope(CcTest::isolate());
   3253   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3254   v8::Local<v8::Value> res = CompileRun(
   3255       "function f() {"
   3256       "  var numbers = new Array(1, 2, 3);"
   3257       "  numbers[0] = 3.14;"
   3258       "  return numbers;"
   3259       "};"
   3260       "f(); f(); f();"
   3261       "%OptimizeFunctionOnNextCall(f);"
   3262       "f();");
   3263   CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
   3264                                        ->Get(ctx, v8_str("0"))
   3265                                        .ToLocalChecked()
   3266                                        ->Int32Value(ctx)
   3267                                        .FromJust());
   3268 
   3269   i::Handle<JSObject> o = Handle<JSObject>::cast(
   3270       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
   3271 
   3272   CHECK(CcTest::heap()->InNewSpace(o->elements()));
   3273 }
   3274 
   3275 
   3276 static int CountMapTransitions(Map* map) {
   3277   return TransitionArray::NumberOfTransitions(map->raw_transitions());
   3278 }
   3279 
   3280 
   3281 // Test that map transitions are cleared and maps are collected with
   3282 // incremental marking as well.
   3283 TEST(Regress1465) {
   3284   i::FLAG_stress_compaction = false;
   3285   i::FLAG_allow_natives_syntax = true;
   3286   i::FLAG_trace_incremental_marking = true;
   3287   i::FLAG_retain_maps_for_n_gc = 0;
   3288   CcTest::InitializeVM();
   3289   v8::HandleScope scope(CcTest::isolate());
   3290   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3291   static const int transitions_count = 256;
   3292 
   3293   CompileRun("function F() {}");
   3294   {
   3295     AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   3296     for (int i = 0; i < transitions_count; i++) {
   3297       EmbeddedVector<char, 64> buffer;
   3298       SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
   3299       CompileRun(buffer.start());
   3300     }
   3301     CompileRun("var root = new F;");
   3302   }
   3303 
   3304   i::Handle<JSReceiver> root =
   3305       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
   3306           CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
   3307 
   3308   // Count number of live transitions before marking.
   3309   int transitions_before = CountMapTransitions(root->map());
   3310   CompileRun("%DebugPrint(root);");
   3311   CHECK_EQ(transitions_count, transitions_before);
   3312 
   3313   heap::SimulateIncrementalMarking(CcTest::heap());
   3314   CcTest::heap()->CollectAllGarbage();
   3315 
   3316   // Count number of live transitions after marking.  Note that one transition
   3317   // is left, because 'o' still holds an instance of one transition target.
   3318   int transitions_after = CountMapTransitions(root->map());
   3319   CompileRun("%DebugPrint(root);");
   3320   CHECK_EQ(1, transitions_after);
   3321 }
   3322 
   3323 
   3324 #ifdef DEBUG
   3325 static void AddTransitions(int transitions_count) {
   3326   AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   3327   for (int i = 0; i < transitions_count; i++) {
   3328     EmbeddedVector<char, 64> buffer;
   3329     SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
   3330     CompileRun(buffer.start());
   3331   }
   3332 }
   3333 
   3334 
   3335 static i::Handle<JSObject> GetByName(const char* name) {
   3336   return i::Handle<JSObject>::cast(
   3337       v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
   3338           CcTest::global()
   3339               ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
   3340               .ToLocalChecked())));
   3341 }
   3342 
   3343 
   3344 static void AddPropertyTo(
   3345     int gc_count, Handle<JSObject> object, const char* property_name) {
   3346   Isolate* isolate = CcTest::i_isolate();
   3347   Factory* factory = isolate->factory();
   3348   Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
   3349   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
   3350   i::FLAG_gc_interval = gc_count;
   3351   i::FLAG_gc_global = true;
   3352   i::FLAG_retain_maps_for_n_gc = 0;
   3353   CcTest::heap()->set_allocation_timeout(gc_count);
   3354   JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
   3355 }
   3356 
   3357 
   3358 TEST(TransitionArrayShrinksDuringAllocToZero) {
   3359   i::FLAG_stress_compaction = false;
   3360   i::FLAG_allow_natives_syntax = true;
   3361   CcTest::InitializeVM();
   3362   v8::HandleScope scope(CcTest::isolate());
   3363   static const int transitions_count = 10;
   3364   CompileRun("function F() { }");
   3365   AddTransitions(transitions_count);
   3366   CompileRun("var root = new F;");
   3367   Handle<JSObject> root = GetByName("root");
   3368 
   3369   // Count number of live transitions before marking.
   3370   int transitions_before = CountMapTransitions(root->map());
   3371   CHECK_EQ(transitions_count, transitions_before);
   3372 
   3373   // Get rid of o
   3374   CompileRun("o = new F;"
   3375              "root = new F");
   3376   root = GetByName("root");
   3377   AddPropertyTo(2, root, "funny");
   3378   CcTest::heap()->CollectGarbage(NEW_SPACE);
   3379 
   3380   // Count number of live transitions after marking.  Note that one transition
   3381   // is left, because 'o' still holds an instance of one transition target.
   3382   int transitions_after = CountMapTransitions(
   3383       Map::cast(root->map()->GetBackPointer()));
   3384   CHECK_EQ(1, transitions_after);
   3385 }
   3386 
   3387 
   3388 TEST(TransitionArrayShrinksDuringAllocToOne) {
   3389   i::FLAG_stress_compaction = false;
   3390   i::FLAG_allow_natives_syntax = true;
   3391   CcTest::InitializeVM();
   3392   v8::HandleScope scope(CcTest::isolate());
   3393   static const int transitions_count = 10;
   3394   CompileRun("function F() {}");
   3395   AddTransitions(transitions_count);
   3396   CompileRun("var root = new F;");
   3397   Handle<JSObject> root = GetByName("root");
   3398 
   3399   // Count number of live transitions before marking.
   3400   int transitions_before = CountMapTransitions(root->map());
   3401   CHECK_EQ(transitions_count, transitions_before);
   3402 
   3403   root = GetByName("root");
   3404   AddPropertyTo(2, root, "funny");
   3405   CcTest::heap()->CollectGarbage(NEW_SPACE);
   3406 
   3407   // Count number of live transitions after marking.  Note that one transition
   3408   // is left, because 'o' still holds an instance of one transition target.
   3409   int transitions_after = CountMapTransitions(
   3410       Map::cast(root->map()->GetBackPointer()));
   3411   CHECK_EQ(2, transitions_after);
   3412 }
   3413 
   3414 
   3415 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
   3416   i::FLAG_stress_compaction = false;
   3417   i::FLAG_allow_natives_syntax = true;
   3418   CcTest::InitializeVM();
   3419   v8::HandleScope scope(CcTest::isolate());
   3420   static const int transitions_count = 10;
   3421   CompileRun("function F() {}");
   3422   AddTransitions(transitions_count);
   3423   CompileRun("var root = new F;");
   3424   Handle<JSObject> root = GetByName("root");
   3425 
   3426   // Count number of live transitions before marking.
   3427   int transitions_before = CountMapTransitions(root->map());
   3428   CHECK_EQ(transitions_count, transitions_before);
   3429 
   3430   root = GetByName("root");
   3431   AddPropertyTo(0, root, "prop9");
   3432   CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
   3433 
   3434   // Count number of live transitions after marking.  Note that one transition
   3435   // is left, because 'o' still holds an instance of one transition target.
   3436   int transitions_after = CountMapTransitions(
   3437       Map::cast(root->map()->GetBackPointer()));
   3438   CHECK_EQ(1, transitions_after);
   3439 }
   3440 
   3441 
   3442 TEST(TransitionArraySimpleToFull) {
   3443   i::FLAG_stress_compaction = false;
   3444   i::FLAG_allow_natives_syntax = true;
   3445   CcTest::InitializeVM();
   3446   v8::HandleScope scope(CcTest::isolate());
   3447   static const int transitions_count = 1;
   3448   CompileRun("function F() {}");
   3449   AddTransitions(transitions_count);
   3450   CompileRun("var root = new F;");
   3451   Handle<JSObject> root = GetByName("root");
   3452 
   3453   // Count number of live transitions before marking.
   3454   int transitions_before = CountMapTransitions(root->map());
   3455   CHECK_EQ(transitions_count, transitions_before);
   3456 
   3457   CompileRun("o = new F;"
   3458              "root = new F");
   3459   root = GetByName("root");
   3460   CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
   3461   AddPropertyTo(2, root, "happy");
   3462 
   3463   // Count number of live transitions after marking.  Note that one transition
   3464   // is left, because 'o' still holds an instance of one transition target.
   3465   int transitions_after = CountMapTransitions(
   3466       Map::cast(root->map()->GetBackPointer()));
   3467   CHECK_EQ(1, transitions_after);
   3468 }
   3469 #endif  // DEBUG
   3470 
   3471 
   3472 TEST(Regress2143a) {
   3473   i::FLAG_incremental_marking = true;
   3474   CcTest::InitializeVM();
   3475   v8::HandleScope scope(CcTest::isolate());
   3476 
   3477   // Prepare a map transition from the root object together with a yet
   3478   // untransitioned root object.
   3479   CompileRun("var root = new Object;"
   3480              "root.foo = 0;"
   3481              "root = new Object;");
   3482 
   3483   heap::SimulateIncrementalMarking(CcTest::heap());
   3484 
   3485   // Compile a StoreIC that performs the prepared map transition. This
   3486   // will restart incremental marking and should make sure the root is
   3487   // marked grey again.
   3488   CompileRun("function f(o) {"
   3489              "  o.foo = 0;"
   3490              "}"
   3491              "f(new Object);"
   3492              "f(root);");
   3493 
   3494   // This bug only triggers with aggressive IC clearing.
   3495   CcTest::heap()->AgeInlineCaches();
   3496 
   3497   // Explicitly request GC to perform final marking step and sweeping.
   3498   CcTest::heap()->CollectAllGarbage();
   3499 
   3500   Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
   3501       CcTest::global()
   3502           ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
   3503           .ToLocalChecked()));
   3504 
   3505   // The root object should be in a sane state.
   3506   CHECK(root->IsJSObject());
   3507   CHECK(root->map()->IsMap());
   3508 }
   3509 
   3510 
   3511 TEST(Regress2143b) {
   3512   i::FLAG_incremental_marking = true;
   3513   i::FLAG_allow_natives_syntax = true;
   3514   CcTest::InitializeVM();
   3515   v8::HandleScope scope(CcTest::isolate());
   3516 
   3517   // Prepare a map transition from the root object together with a yet
   3518   // untransitioned root object.
   3519   CompileRun("var root = new Object;"
   3520              "root.foo = 0;"
   3521              "root = new Object;");
   3522 
   3523   heap::SimulateIncrementalMarking(CcTest::heap());
   3524 
   3525   // Compile an optimized LStoreNamedField that performs the prepared
   3526   // map transition. This will restart incremental marking and should
   3527   // make sure the root is marked grey again.
   3528   CompileRun("function f(o) {"
   3529              "  o.foo = 0;"
   3530              "}"
   3531              "f(new Object);"
   3532              "f(new Object);"
   3533              "%OptimizeFunctionOnNextCall(f);"
   3534              "f(root);"
   3535              "%DeoptimizeFunction(f);");
   3536 
   3537   // This bug only triggers with aggressive IC clearing.
   3538   CcTest::heap()->AgeInlineCaches();
   3539 
   3540   // Explicitly request GC to perform final marking step and sweeping.
   3541   CcTest::heap()->CollectAllGarbage();
   3542 
   3543   Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
   3544       CcTest::global()
   3545           ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
   3546           .ToLocalChecked()));
   3547 
   3548   // The root object should be in a sane state.
   3549   CHECK(root->IsJSObject());
   3550   CHECK(root->map()->IsMap());
   3551 }
   3552 
   3553 
   3554 TEST(ReleaseOverReservedPages) {
   3555   if (FLAG_never_compact) return;
   3556   i::FLAG_trace_gc = true;
   3557   // The optimizer can allocate stuff, messing up the test.
   3558   i::FLAG_crankshaft = false;
   3559   i::FLAG_always_opt = false;
   3560   // Parallel compaction increases fragmentation, depending on how existing
   3561   // memory is distributed. Since this is non-deterministic because of
   3562   // concurrent sweeping, we disable it for this test.
   3563   i::FLAG_parallel_compaction = false;
   3564   // Concurrent sweeping adds non determinism, depending on when memory is
   3565   // available for further reuse.
   3566   i::FLAG_concurrent_sweeping = false;
   3567   // Fast evacuation of pages may result in a different page count in old space.
   3568   i::FLAG_page_promotion = false;
   3569   CcTest::InitializeVM();
   3570   Isolate* isolate = CcTest::i_isolate();
   3571   Factory* factory = isolate->factory();
   3572   Heap* heap = isolate->heap();
   3573   v8::HandleScope scope(CcTest::isolate());
   3574   static const int number_of_test_pages = 20;
   3575 
   3576   // Prepare many pages with low live-bytes count.
   3577   PagedSpace* old_space = heap->old_space();
   3578   const int initial_page_count = old_space->CountTotalPages();
   3579   const int overall_page_count = number_of_test_pages + initial_page_count;
   3580   for (int i = 0; i < number_of_test_pages; i++) {
   3581     AlwaysAllocateScope always_allocate(isolate);
   3582     heap::SimulateFullSpace(old_space);
   3583     factory->NewFixedArray(1, TENURED);
   3584   }
   3585   CHECK_EQ(overall_page_count, old_space->CountTotalPages());
   3586 
   3587   // Triggering one GC will cause a lot of garbage to be discovered but
   3588   // even spread across all allocated pages.
   3589   heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
   3590                           "triggered for preparation");
   3591   CHECK_GE(overall_page_count, old_space->CountTotalPages());
   3592 
   3593   // Triggering subsequent GCs should cause at least half of the pages
   3594   // to be released to the OS after at most two cycles.
   3595   heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
   3596                           "triggered by test 1");
   3597   CHECK_GE(overall_page_count, old_space->CountTotalPages());
   3598   heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
   3599                           "triggered by test 2");
   3600   CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
   3601 
   3602   // Triggering a last-resort GC should cause all pages to be released to the
   3603   // OS so that other processes can seize the memory.  If we get a failure here
   3604   // where there are 2 pages left instead of 1, then we should increase the
   3605   // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
   3606   // first page should be small in order to reduce memory used when the VM
   3607   // boots, but if the 20 small arrays don't fit on the first page then that's
   3608   // an indication that it is too small.
   3609   heap->CollectAllAvailableGarbage("triggered really hard");
   3610   CHECK_EQ(initial_page_count, old_space->CountTotalPages());
   3611 }
   3612 
   3613 static int forced_gc_counter = 0;
   3614 
   3615 void MockUseCounterCallback(v8::Isolate* isolate,
   3616                             v8::Isolate::UseCounterFeature feature) {
   3617   isolate->GetCurrentContext();
   3618   if (feature == v8::Isolate::kForcedGC) {
   3619     forced_gc_counter++;
   3620   }
   3621 }
   3622 
   3623 
   3624 TEST(CountForcedGC) {
   3625   i::FLAG_expose_gc = true;
   3626   CcTest::InitializeVM();
   3627   Isolate* isolate = CcTest::i_isolate();
   3628   v8::HandleScope scope(CcTest::isolate());
   3629 
   3630   isolate->SetUseCounterCallback(MockUseCounterCallback);
   3631 
   3632   forced_gc_counter = 0;
   3633   const char* source = "gc();";
   3634   CompileRun(source);
   3635   CHECK_GT(forced_gc_counter, 0);
   3636 }
   3637 
   3638 
   3639 #ifdef OBJECT_PRINT
   3640 TEST(PrintSharedFunctionInfo) {
   3641   CcTest::InitializeVM();
   3642   v8::HandleScope scope(CcTest::isolate());
   3643   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3644   const char* source = "f = function() { return 987654321; }\n"
   3645                        "g = function() { return 123456789; }\n";
   3646   CompileRun(source);
   3647   i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
   3648       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3649           CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
   3650 
   3651   OFStream os(stdout);
   3652   g->shared()->Print(os);
   3653   os << std::endl;
   3654 }
   3655 #endif  // OBJECT_PRINT
   3656 
   3657 
   3658 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
   3659   if (i::FLAG_always_opt) return;
   3660   CcTest::InitializeVM();
   3661   v8::HandleScope scope(CcTest::isolate());
   3662   v8::Local<v8::Value> fun1, fun2;
   3663   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3664   {
   3665     CompileRun("function fun() {};");
   3666     fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
   3667   }
   3668 
   3669   {
   3670     CompileRun("function fun() {};");
   3671     fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
   3672   }
   3673 
   3674   // Prepare function f that contains type feedback for the two closures.
   3675   CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
   3676   CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
   3677   CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
   3678 
   3679   Handle<JSFunction> f = Handle<JSFunction>::cast(
   3680       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3681           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   3682 
   3683   Handle<TypeFeedbackVector> feedback_vector(f->feedback_vector());
   3684   FeedbackVectorHelper feedback_helper(feedback_vector);
   3685 
   3686   int expected_slots = 2;
   3687   CHECK_EQ(expected_slots, feedback_helper.slot_count());
   3688   int slot1 = 0;
   3689   int slot2 = 1;
   3690   CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
   3691   CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
   3692 
   3693   heap::SimulateIncrementalMarking(CcTest::heap());
   3694   CcTest::heap()->CollectAllGarbage();
   3695 
   3696   CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
   3697              ->cleared());
   3698   CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
   3699              ->cleared());
   3700 }
   3701 
   3702 
   3703 static Code* FindFirstIC(Code* code, Code::Kind kind) {
   3704   int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
   3705              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
   3706   for (RelocIterator it(code, mask); !it.done(); it.next()) {
   3707     RelocInfo* info = it.rinfo();
   3708     Code* target = Code::GetCodeFromTargetAddress(info->target_address());
   3709     if (target->is_inline_cache_stub() && target->kind() == kind) {
   3710       return target;
   3711     }
   3712   }
   3713   return NULL;
   3714 }
   3715 
   3716 
   3717 static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
   3718                           InlineCacheState desired_state) {
   3719   Handle<TypeFeedbackVector> vector =
   3720       Handle<TypeFeedbackVector>(f->feedback_vector());
   3721   FeedbackVectorHelper helper(vector);
   3722   FeedbackVectorSlot slot = helper.slot(slot_index);
   3723   if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
   3724     LoadICNexus nexus(vector, slot);
   3725     CHECK(nexus.StateFromFeedback() == desired_state);
   3726   } else {
   3727     CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
   3728     KeyedLoadICNexus nexus(vector, slot);
   3729     CHECK(nexus.StateFromFeedback() == desired_state);
   3730   }
   3731 }
   3732 
   3733 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
   3734   if (i::FLAG_always_opt) return;
   3735   CcTest::InitializeVM();
   3736   v8::HandleScope scope(CcTest::isolate());
   3737   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3738   // Prepare function f that contains a monomorphic IC for object
   3739   // originating from the same native context.
   3740   CompileRun(
   3741       "function fun() { this.x = 1; };"
   3742       "function f(o) { return new o(); } f(fun); f(fun);");
   3743   Handle<JSFunction> f = Handle<JSFunction>::cast(
   3744       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3745           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   3746 
   3747   Handle<TypeFeedbackVector> vector(f->feedback_vector());
   3748   CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
   3749 
   3750   heap::SimulateIncrementalMarking(CcTest::heap());
   3751   CcTest::heap()->CollectAllGarbage();
   3752 
   3753   CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
   3754 }
   3755 
   3756 TEST(IncrementalMarkingPreservesMonomorphicIC) {
   3757   if (i::FLAG_always_opt) return;
   3758   CcTest::InitializeVM();
   3759   v8::HandleScope scope(CcTest::isolate());
   3760   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3761   // Prepare function f that contains a monomorphic IC for object
   3762   // originating from the same native context.
   3763   CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
   3764              "function f(o) { return o.x; } f(obj); f(obj);");
   3765   Handle<JSFunction> f = Handle<JSFunction>::cast(
   3766       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3767           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   3768 
   3769   CheckVectorIC(f, 0, MONOMORPHIC);
   3770 
   3771   heap::SimulateIncrementalMarking(CcTest::heap());
   3772   CcTest::heap()->CollectAllGarbage();
   3773 
   3774   CheckVectorIC(f, 0, MONOMORPHIC);
   3775 }
   3776 
   3777 TEST(IncrementalMarkingPreservesPolymorphicIC) {
   3778   if (i::FLAG_always_opt) return;
   3779   CcTest::InitializeVM();
   3780   v8::HandleScope scope(CcTest::isolate());
   3781   v8::Local<v8::Value> obj1, obj2;
   3782   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3783 
   3784   {
   3785     LocalContext env;
   3786     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
   3787     obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
   3788   }
   3789 
   3790   {
   3791     LocalContext env;
   3792     CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
   3793     obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
   3794   }
   3795 
   3796   // Prepare function f that contains a polymorphic IC for objects
   3797   // originating from two different native contexts.
   3798   CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
   3799   CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
   3800   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
   3801   Handle<JSFunction> f = Handle<JSFunction>::cast(
   3802       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3803           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   3804 
   3805   CheckVectorIC(f, 0, POLYMORPHIC);
   3806 
   3807   // Fire context dispose notification.
   3808   heap::SimulateIncrementalMarking(CcTest::heap());
   3809   CcTest::heap()->CollectAllGarbage();
   3810 
   3811   CheckVectorIC(f, 0, POLYMORPHIC);
   3812 }
   3813 
   3814 TEST(ContextDisposeDoesntClearPolymorphicIC) {
   3815   if (i::FLAG_always_opt) return;
   3816   CcTest::InitializeVM();
   3817   v8::HandleScope scope(CcTest::isolate());
   3818   v8::Local<v8::Value> obj1, obj2;
   3819   v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
   3820 
   3821   {
   3822     LocalContext env;
   3823     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
   3824     obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
   3825   }
   3826 
   3827   {
   3828     LocalContext env;
   3829     CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
   3830     obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
   3831   }
   3832 
   3833   // Prepare function f that contains a polymorphic IC for objects
   3834   // originating from two different native contexts.
   3835   CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
   3836   CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
   3837   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
   3838   Handle<JSFunction> f = Handle<JSFunction>::cast(
   3839       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3840           CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
   3841 
   3842   CheckVectorIC(f, 0, POLYMORPHIC);
   3843 
   3844   // Fire context dispose notification.
   3845   CcTest::isolate()->ContextDisposedNotification();
   3846   heap::SimulateIncrementalMarking(CcTest::heap());
   3847   CcTest::heap()->CollectAllGarbage();
   3848 
   3849   CheckVectorIC(f, 0, POLYMORPHIC);
   3850 }
   3851 
   3852 
   3853 class SourceResource : public v8::String::ExternalOneByteStringResource {
   3854  public:
   3855   explicit SourceResource(const char* data)
   3856     : data_(data), length_(strlen(data)) { }
   3857 
   3858   virtual void Dispose() {
   3859     i::DeleteArray(data_);
   3860     data_ = NULL;
   3861   }
   3862 
   3863   const char* data() const { return data_; }
   3864 
   3865   size_t length() const { return length_; }
   3866 
   3867   bool IsDisposed() { return data_ == NULL; }
   3868 
   3869  private:
   3870   const char* data_;
   3871   size_t length_;
   3872 };
   3873 
   3874 
   3875 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
   3876                                const char* accessor) {
   3877   // Test that the data retained by the Error.stack accessor is released
   3878   // after the first time the accessor is fired.  We use external string
   3879   // to check whether the data is being released since the external string
   3880   // resource's callback is fired when the external string is GC'ed.
   3881   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
   3882   v8::HandleScope scope(isolate);
   3883   SourceResource* resource = new SourceResource(i::StrDup(source));
   3884   {
   3885     v8::HandleScope scope(isolate);
   3886     v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
   3887     v8::Local<v8::String> source_string =
   3888         v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
   3889     i_isolate->heap()->CollectAllAvailableGarbage();
   3890     v8::Script::Compile(ctx, source_string)
   3891         .ToLocalChecked()
   3892         ->Run(ctx)
   3893         .ToLocalChecked();
   3894     CHECK(!resource->IsDisposed());
   3895   }
   3896   // i_isolate->heap()->CollectAllAvailableGarbage();
   3897   CHECK(!resource->IsDisposed());
   3898 
   3899   CompileRun(accessor);
   3900   i_isolate->heap()->CollectAllAvailableGarbage();
   3901 
   3902   // External source has been released.
   3903   CHECK(resource->IsDisposed());
   3904   delete resource;
   3905 }
   3906 
   3907 
   3908 UNINITIALIZED_TEST(ReleaseStackTraceData) {
   3909   if (i::FLAG_always_opt) {
   3910     // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
   3911     // See: https://codereview.chromium.org/181833004/
   3912     return;
   3913   }
   3914   FLAG_use_ic = false;  // ICs retain objects.
   3915   FLAG_concurrent_recompilation = false;
   3916   v8::Isolate::CreateParams create_params;
   3917   create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
   3918   v8::Isolate* isolate = v8::Isolate::New(create_params);
   3919   {
   3920     v8::Isolate::Scope isolate_scope(isolate);
   3921     v8::HandleScope handle_scope(isolate);
   3922     v8::Context::New(isolate)->Enter();
   3923     static const char* source1 = "var error = null;            "
   3924     /* Normal Error */           "try {                        "
   3925                                  "  throw new Error();         "
   3926                                  "} catch (e) {                "
   3927                                  "  error = e;                 "
   3928                                  "}                            ";
   3929     static const char* source2 = "var error = null;            "
   3930     /* Stack overflow */         "try {                        "
   3931                                  "  (function f() { f(); })(); "
   3932                                  "} catch (e) {                "
   3933                                  "  error = e;                 "
   3934                                  "}                            ";
   3935     static const char* source3 = "var error = null;            "
   3936     /* Normal Error */           "try {                        "
   3937     /* as prototype */           "  throw new Error();         "
   3938                                  "} catch (e) {                "
   3939                                  "  error = {};                "
   3940                                  "  error.__proto__ = e;       "
   3941                                  "}                            ";
   3942     static const char* source4 = "var error = null;            "
   3943     /* Stack overflow */         "try {                        "
   3944     /* as prototype   */         "  (function f() { f(); })(); "
   3945                                  "} catch (e) {                "
   3946                                  "  error = {};                "
   3947                                  "  error.__proto__ = e;       "
   3948                                  "}                            ";
   3949     static const char* getter = "error.stack";
   3950     static const char* setter = "error.stack = 0";
   3951 
   3952     ReleaseStackTraceDataTest(isolate, source1, setter);
   3953     ReleaseStackTraceDataTest(isolate, source2, setter);
   3954     // We do not test source3 and source4 with setter, since the setter is
   3955     // supposed to (untypically) write to the receiver, not the holder.  This is
   3956     // to emulate the behavior of a data property.
   3957 
   3958     ReleaseStackTraceDataTest(isolate, source1, getter);
   3959     ReleaseStackTraceDataTest(isolate, source2, getter);
   3960     ReleaseStackTraceDataTest(isolate, source3, getter);
   3961     ReleaseStackTraceDataTest(isolate, source4, getter);
   3962   }
   3963   isolate->Dispose();
   3964 }
   3965 
   3966 
   3967 TEST(Regress159140) {
   3968   i::FLAG_allow_natives_syntax = true;
   3969   CcTest::InitializeVM();
   3970   Isolate* isolate = CcTest::i_isolate();
   3971   LocalContext env;
   3972   Heap* heap = isolate->heap();
   3973   HandleScope scope(isolate);
   3974 
   3975   // Perform one initial GC to enable code flushing.
   3976   heap->CollectAllGarbage();
   3977 
   3978   // Prepare several closures that are all eligible for code flushing
   3979   // because all reachable ones are not optimized. Make sure that the
   3980   // optimized code object is directly reachable through a handle so
   3981   // that it is marked black during incremental marking.
   3982   Handle<Code> code;
   3983   {
   3984     HandleScope inner_scope(isolate);
   3985     CompileRun("function h(x) {}"
   3986                "function mkClosure() {"
   3987                "  return function(x) { return x + 1; };"
   3988                "}"
   3989                "var f = mkClosure();"
   3990                "var g = mkClosure();"
   3991                "f(1); f(2);"
   3992                "g(1); g(2);"
   3993                "h(1); h(2);"
   3994                "%OptimizeFunctionOnNextCall(f); f(3);"
   3995                "%OptimizeFunctionOnNextCall(h); h(3);");
   3996 
   3997     Handle<JSFunction> f = Handle<JSFunction>::cast(
   3998         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   3999             CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
   4000     CHECK(f->is_compiled());
   4001     CompileRun("f = null;");
   4002 
   4003     Handle<JSFunction> g = Handle<JSFunction>::cast(
   4004         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4005             CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
   4006     CHECK(g->is_compiled());
   4007     const int kAgingThreshold = 6;
   4008     for (int i = 0; i < kAgingThreshold; i++) {
   4009       g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   4010     }
   4011 
   4012     code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
   4013   }
   4014 
   4015   // Simulate incremental marking so that the functions are enqueued as
   4016   // code flushing candidates. Then optimize one function. Finally
   4017   // finish the GC to complete code flushing.
   4018   heap::SimulateIncrementalMarking(heap);
   4019   CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
   4020   heap->CollectAllGarbage();
   4021 
   4022   // Unoptimized code is missing and the deoptimizer will go ballistic.
   4023   CompileRun("g('bozo');");
   4024 }
   4025 
   4026 
   4027 TEST(Regress165495) {
   4028   i::FLAG_allow_natives_syntax = true;
   4029   CcTest::InitializeVM();
   4030   Isolate* isolate = CcTest::i_isolate();
   4031   Heap* heap = isolate->heap();
   4032   HandleScope scope(isolate);
   4033 
   4034   // Perform one initial GC to enable code flushing.
   4035   heap->CollectAllGarbage();
   4036 
   4037   // Prepare an optimized closure that the optimized code map will get
   4038   // populated. Then age the unoptimized code to trigger code flushing
   4039   // but make sure the optimized code is unreachable.
   4040   {
   4041     HandleScope inner_scope(isolate);
   4042     LocalContext env;
   4043     CompileRun("function mkClosure() {"
   4044                "  return function(x) { return x + 1; };"
   4045                "}"
   4046                "var f = mkClosure();"
   4047                "f(1); f(2);"
   4048                "%OptimizeFunctionOnNextCall(f); f(3);");
   4049 
   4050     Handle<JSFunction> f = Handle<JSFunction>::cast(
   4051         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4052             CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
   4053     CHECK(f->is_compiled());
   4054     const int kAgingThreshold = 6;
   4055     for (int i = 0; i < kAgingThreshold; i++) {
   4056       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   4057     }
   4058 
   4059     CompileRun("f = null;");
   4060   }
   4061 
   4062   // Simulate incremental marking so that unoptimized code is flushed
   4063   // even though it still is cached in the optimized code map.
   4064   heap::SimulateIncrementalMarking(heap);
   4065   heap->CollectAllGarbage();
   4066 
   4067   // Make a new closure that will get code installed from the code map.
   4068   // Unoptimized code is missing and the deoptimizer will go ballistic.
   4069   CompileRun("var g = mkClosure(); g('bozo');");
   4070 }
   4071 
   4072 
   4073 TEST(Regress169209) {
   4074   i::FLAG_stress_compaction = false;
   4075   i::FLAG_allow_natives_syntax = true;
   4076 
   4077   CcTest::InitializeVM();
   4078   Isolate* isolate = CcTest::i_isolate();
   4079   Heap* heap = isolate->heap();
   4080   HandleScope scope(isolate);
   4081 
   4082   // Perform one initial GC to enable code flushing.
   4083   heap->CollectAllGarbage();
   4084 
   4085   // Prepare a shared function info eligible for code flushing for which
   4086   // the unoptimized code will be replaced during optimization.
   4087   Handle<SharedFunctionInfo> shared1;
   4088   {
   4089     HandleScope inner_scope(isolate);
   4090     LocalContext env;
   4091     CompileRun("function f() { return 'foobar'; }"
   4092                "function g(x) { if (x) f(); }"
   4093                "f();"
   4094                "g(false);"
   4095                "g(false);");
   4096 
   4097     Handle<JSFunction> f = Handle<JSFunction>::cast(
   4098         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4099             CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
   4100     CHECK(f->is_compiled());
   4101     const int kAgingThreshold = 6;
   4102     for (int i = 0; i < kAgingThreshold; i++) {
   4103       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   4104     }
   4105 
   4106     shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   4107   }
   4108 
   4109   // Prepare a shared function info eligible for code flushing that will
   4110   // represent the dangling tail of the candidate list.
   4111   Handle<SharedFunctionInfo> shared2;
   4112   {
   4113     HandleScope inner_scope(isolate);
   4114     LocalContext env;
   4115     CompileRun("function flushMe() { return 0; }"
   4116                "flushMe(1);");
   4117 
   4118     Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
   4119         *v8::Local<v8::Function>::Cast(CcTest::global()
   4120                                            ->Get(env.local(), v8_str("flushMe"))
   4121                                            .ToLocalChecked())));
   4122     CHECK(f->is_compiled());
   4123     const int kAgingThreshold = 6;
   4124     for (int i = 0; i < kAgingThreshold; i++) {
   4125       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   4126     }
   4127 
   4128     shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   4129   }
   4130 
   4131   // Simulate incremental marking and collect code flushing candidates.
   4132   heap::SimulateIncrementalMarking(heap);
   4133   CHECK(shared1->code()->gc_metadata() != NULL);
   4134 
   4135   // Optimize function and make sure the unoptimized code is replaced.
   4136   CompileRun("%OptimizeFunctionOnNextCall(g);"
   4137              "g(false);");
   4138 
   4139   // Finish garbage collection cycle.
   4140   heap->CollectAllGarbage();
   4141   CHECK(shared1->code()->gc_metadata() == NULL);
   4142 }
   4143 
   4144 
   4145 TEST(Regress169928) {
   4146   i::FLAG_allow_natives_syntax = true;
   4147   i::FLAG_crankshaft = false;
   4148   CcTest::InitializeVM();
   4149   Isolate* isolate = CcTest::i_isolate();
   4150   LocalContext env;
   4151   Factory* factory = isolate->factory();
   4152   v8::HandleScope scope(CcTest::isolate());
   4153 
   4154   // Some flags turn Scavenge collections into Mark-sweep collections
   4155   // and hence are incompatible with this test case.
   4156   if (FLAG_gc_global || FLAG_stress_compaction) return;
   4157 
   4158   // Prepare the environment
   4159   CompileRun("function fastliteralcase(literal, value) {"
   4160              "    literal[0] = value;"
   4161              "    return literal;"
   4162              "}"
   4163              "function get_standard_literal() {"
   4164              "    var literal = [1, 2, 3];"
   4165              "    return literal;"
   4166              "}"
   4167              "obj = fastliteralcase(get_standard_literal(), 1);"
   4168              "obj = fastliteralcase(get_standard_literal(), 1.5);"
   4169              "obj = fastliteralcase(get_standard_literal(), 2);");
   4170 
   4171   // prepare the heap
   4172   v8::Local<v8::String> mote_code_string =
   4173       v8_str("fastliteralcase(mote, 2.5);");
   4174 
   4175   v8::Local<v8::String> array_name = v8_str("mote");
   4176   CHECK(CcTest::global()
   4177             ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
   4178             .FromJust());
   4179 
   4180   // First make sure we flip spaces
   4181   CcTest::heap()->CollectGarbage(NEW_SPACE);
   4182 
   4183   // Allocate the object.
   4184   Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
   4185   array_data->set(0, Smi::FromInt(1));
   4186   array_data->set(1, Smi::FromInt(2));
   4187 
   4188   heap::AllocateAllButNBytes(
   4189       CcTest::heap()->new_space(),
   4190       JSArray::kSize + AllocationMemento::kSize + kPointerSize);
   4191 
   4192   Handle<JSArray> array =
   4193       factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
   4194 
   4195   CHECK_EQ(Smi::FromInt(2), array->length());
   4196   CHECK(array->HasFastSmiOrObjectElements());
   4197 
   4198   // We need filler the size of AllocationMemento object, plus an extra
   4199   // fill pointer value.
   4200   HeapObject* obj = NULL;
   4201   AllocationResult allocation =
   4202       CcTest::heap()->new_space()->AllocateRawUnaligned(
   4203           AllocationMemento::kSize + kPointerSize);
   4204   CHECK(allocation.To(&obj));
   4205   Address addr_obj = obj->address();
   4206   CcTest::heap()->CreateFillerObjectAt(addr_obj,
   4207                                        AllocationMemento::kSize + kPointerSize,
   4208                                        ClearRecordedSlots::kNo);
   4209 
   4210   // Give the array a name, making sure not to allocate strings.
   4211   v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
   4212   CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
   4213 
   4214   // This should crash with a protection violation if we are running a build
   4215   // with the bug.
   4216   AlwaysAllocateScope aa_scope(isolate);
   4217   v8::Script::Compile(env.local(), mote_code_string)
   4218       .ToLocalChecked()
   4219       ->Run(env.local())
   4220       .ToLocalChecked();
   4221 }
   4222 
   4223 
   4224 #ifdef DEBUG
   4225 TEST(Regress513507) {
   4226   i::FLAG_flush_optimized_code_cache = false;
   4227   i::FLAG_allow_natives_syntax = true;
   4228   i::FLAG_gc_global = true;
   4229   CcTest::InitializeVM();
   4230   Isolate* isolate = CcTest::i_isolate();
   4231   LocalContext env;
   4232   Heap* heap = isolate->heap();
   4233   HandleScope scope(isolate);
   4234 
   4235   // Prepare function whose optimized code map we can use.
   4236   Handle<SharedFunctionInfo> shared;
   4237   {
   4238     HandleScope inner_scope(isolate);
   4239     CompileRun("function f() { return 1 }"
   4240                "f(); %OptimizeFunctionOnNextCall(f); f();");
   4241 
   4242     Handle<JSFunction> f = Handle<JSFunction>::cast(
   4243         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4244             CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
   4245     shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   4246     CompileRun("f = null");
   4247   }
   4248 
   4249   // Prepare optimized code that we can use.
   4250   Handle<Code> code;
   4251   {
   4252     HandleScope inner_scope(isolate);
   4253     CompileRun("function g() { return 2 }"
   4254                "g(); %OptimizeFunctionOnNextCall(g); g();");
   4255 
   4256     Handle<JSFunction> g = Handle<JSFunction>::cast(
   4257         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4258             CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
   4259     code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
   4260     if (!code->is_optimized_code()) return;
   4261   }
   4262 
   4263   Handle<TypeFeedbackVector> vector =
   4264       TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
   4265   Handle<LiteralsArray> lit =
   4266       LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
   4267   Handle<Context> context(isolate->context());
   4268 
   4269   // Add the new code several times to the optimized code map and also set an
   4270   // allocation timeout so that expanding the code map will trigger a GC.
   4271   heap->set_allocation_timeout(5);
   4272   FLAG_gc_interval = 1000;
   4273   for (int i = 0; i < 10; ++i) {
   4274     BailoutId id = BailoutId(i);
   4275     SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
   4276   }
   4277 }
   4278 #endif  // DEBUG
   4279 
   4280 
   4281 TEST(Regress514122) {
   4282   i::FLAG_flush_optimized_code_cache = false;
   4283   i::FLAG_allow_natives_syntax = true;
   4284   CcTest::InitializeVM();
   4285   Isolate* isolate = CcTest::i_isolate();
   4286   LocalContext env;
   4287   Heap* heap = isolate->heap();
   4288   HandleScope scope(isolate);
   4289 
   4290   // Perfrom one initial GC to enable code flushing.
   4291   CcTest::heap()->CollectAllGarbage();
   4292 
   4293   // Prepare function whose optimized code map we can use.
   4294   Handle<SharedFunctionInfo> shared;
   4295   {
   4296     HandleScope inner_scope(isolate);
   4297     CompileRun("function f() { return 1 }"
   4298                "f(); %OptimizeFunctionOnNextCall(f); f();");
   4299 
   4300     Handle<JSFunction> f = Handle<JSFunction>::cast(
   4301         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4302             CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
   4303     shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   4304     CompileRun("f = null");
   4305   }
   4306 
   4307   // Prepare optimized code that we can use.
   4308   Handle<Code> code;
   4309   {
   4310     HandleScope inner_scope(isolate);
   4311     CompileRun("function g() { return 2 }"
   4312                "g(); %OptimizeFunctionOnNextCall(g); g();");
   4313 
   4314     Handle<JSFunction> g = Handle<JSFunction>::cast(
   4315         v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4316             CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
   4317     code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
   4318     if (!code->is_optimized_code()) return;
   4319   }
   4320 
   4321   Handle<TypeFeedbackVector> vector =
   4322       TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
   4323   Handle<LiteralsArray> lit =
   4324       LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
   4325   Handle<Context> context(isolate->context());
   4326 
   4327   // Add the code several times to the optimized code map.
   4328   for (int i = 0; i < 3; ++i) {
   4329     HandleScope inner_scope(isolate);
   4330     BailoutId id = BailoutId(i);
   4331     SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
   4332   }
   4333   shared->optimized_code_map()->Print();
   4334 
   4335   // Add the code with a literals array to be evacuated.
   4336   Page* evac_page;
   4337   {
   4338     HandleScope inner_scope(isolate);
   4339     AlwaysAllocateScope always_allocate(isolate);
   4340     // Make sure literal is placed on an old-space evacuation candidate.
   4341     heap::SimulateFullSpace(heap->old_space());
   4342 
   4343     // Make sure there the number of literals is > 0.
   4344     Handle<LiteralsArray> lit =
   4345         LiteralsArray::New(isolate, vector, 23, TENURED);
   4346 
   4347     evac_page = Page::FromAddress(lit->address());
   4348     BailoutId id = BailoutId(100);
   4349     SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
   4350   }
   4351 
   4352   // Heap is ready, force {lit_page} to become an evacuation candidate and
   4353   // simulate incremental marking to enqueue optimized code map.
   4354   FLAG_manual_evacuation_candidates_selection = true;
   4355   evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
   4356   heap::SimulateIncrementalMarking(heap);
   4357 
   4358   // No matter whether reachable or not, {boomer} is doomed.
   4359   Handle<Object> boomer(shared->optimized_code_map(), isolate);
   4360 
   4361   // Add the code several times to the optimized code map. This will leave old
   4362   // copies of the optimized code map unreachable but still marked.
   4363   for (int i = 3; i < 6; ++i) {
   4364     HandleScope inner_scope(isolate);
   4365     BailoutId id = BailoutId(i);
   4366     SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
   4367   }
   4368 
   4369   // Trigger a GC to flush out the bug.
   4370   heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
   4371   boomer->Print();
   4372 }
   4373 
   4374 
   4375 TEST(OptimizedCodeMapReuseEntries) {
   4376   i::FLAG_flush_optimized_code_cache = false;
   4377   i::FLAG_allow_natives_syntax = true;
   4378   // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
   4379   // run this test.
   4380   if (i::FLAG_turbo) return;
   4381   CcTest::InitializeVM();
   4382   v8::Isolate* v8_isolate = CcTest::isolate();
   4383   Isolate* isolate = CcTest::i_isolate();
   4384   Heap* heap = isolate->heap();
   4385   HandleScope scope(isolate);
   4386 
   4387   // Create 3 contexts, allow the 2nd one to be disposed, and verify that
   4388   // a 4th context will re-use the weak slots in the optimized code map
   4389   // to hold data, rather than expanding the map.
   4390   v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
   4391   const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
   4392   v8::ScriptCompiler::Source script_source(
   4393       v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
   4394           .ToLocalChecked());
   4395   v8::Local<v8::UnboundScript> indep =
   4396       v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
   4397           .ToLocalChecked();
   4398   const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
   4399   // Perfrom one initial GC to enable code flushing.
   4400   heap->CollectAllGarbage();
   4401 
   4402   c1->Enter();
   4403   indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
   4404   CompileRun(toplevel);
   4405 
   4406   Handle<SharedFunctionInfo> shared;
   4407   Handle<JSFunction> foo = Handle<JSFunction>::cast(
   4408       v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
   4409           CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
   4410   CHECK(foo->shared()->is_compiled());
   4411   shared = handle(foo->shared());
   4412   c1->Exit();
   4413 
   4414   {
   4415     HandleScope scope(isolate);
   4416     v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
   4417     c2->Enter();
   4418     indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
   4419     CompileRun(toplevel);
   4420     c2->Exit();
   4421   }
   4422 
   4423   {
   4424     HandleScope scope(isolate);
   4425     v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
   4426     c3->Enter();
   4427     indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
   4428     CompileRun(toplevel);
   4429     c3->Exit();
   4430 
   4431     // Now, collect garbage. Context c2 should have no roots to it, and it's
   4432     // entry in the optimized code map should be free for a new context.
   4433     for (int i = 0; i < 4; i++) {
   4434       heap->CollectAllGarbage();
   4435     }
   4436 
   4437     Handle<FixedArray> optimized_code_map =
   4438         handle(shared->optimized_code_map());
   4439     // There should be 3 entries in the map.
   4440     CHECK_EQ(
   4441         3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
   4442             SharedFunctionInfo::kEntryLength));
   4443     // But one of them (formerly for c2) should be cleared.
   4444     int cleared_count = 0;
   4445     for (int i = SharedFunctionInfo::kEntriesStart;
   4446          i < optimized_code_map->length();
   4447          i += SharedFunctionInfo::kEntryLength) {
   4448       cleared_count +=
   4449           WeakCell::cast(
   4450               optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
   4451                   ->cleared()
   4452               ? 1
   4453               : 0;
   4454     }
   4455     CHECK_EQ(1, cleared_count);
   4456 
   4457     // Verify that a new context uses the cleared entry rather than creating a
   4458     // new
   4459     // optimized code map array.
   4460     v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
   4461     c4->Enter();
   4462     indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
   4463     CompileRun(toplevel);
   4464     c4->Exit();
   4465     CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
   4466 
   4467     // Now each entry is in use.
   4468     cleared_count = 0;
   4469     for (int i = SharedFunctionInfo::kEntriesStart;
   4470          i < optimized_code_map->length();
   4471          i += SharedFunctionInfo::kEntryLength) {
   4472       cleared_count +=
   4473           WeakCell::cast(
   4474               optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
   4475                   ->cleared()
   4476               ? 1
   4477               : 0;
   4478     }
   4479     CHECK_EQ(0, cleared_count);
   4480   }
   4481 }
   4482 
   4483 
   4484 TEST(Regress513496) {
   4485   i::FLAG_flush_optimized_code_cache = false;
   4486   i::FLAG_allow_natives_syntax = true;
   4487   CcTest::InitializeVM();
   4488   Isolate* <