Home | History | Annotate | Download | only in cctest
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include <stdlib.h>
     29 
     30 #include "v8.h"
     31 
     32 #include "compilation-cache.h"
     33 #include "execution.h"
     34 #include "factory.h"
     35 #include "macro-assembler.h"
     36 #include "global-handles.h"
     37 #include "stub-cache.h"
     38 #include "cctest.h"
     39 
     40 using namespace v8::internal;
     41 
     42 
     43 // Go through all incremental marking steps in one swoop.
     44 static void SimulateIncrementalMarking() {
     45   MarkCompactCollector* collector = HEAP->mark_compact_collector();
     46   IncrementalMarking* marking = HEAP->incremental_marking();
     47   if (collector->IsConcurrentSweepingInProgress()) {
     48     collector->WaitUntilSweepingCompleted();
     49   }
     50   CHECK(marking->IsMarking() || marking->IsStopped());
     51   if (marking->IsStopped()) {
     52     marking->Start();
     53   }
     54   CHECK(marking->IsMarking());
     55   while (!marking->IsComplete()) {
     56     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
     57   }
     58   CHECK(marking->IsComplete());
     59 }
     60 
     61 
     62 static void CheckMap(Map* map, int type, int instance_size) {
     63   CHECK(map->IsHeapObject());
     64 #ifdef DEBUG
     65   CHECK(HEAP->Contains(map));
     66 #endif
     67   CHECK_EQ(HEAP->meta_map(), map->map());
     68   CHECK_EQ(type, map->instance_type());
     69   CHECK_EQ(instance_size, map->instance_size());
     70 }
     71 
     72 
     73 TEST(HeapMaps) {
     74   CcTest::InitializeVM();
     75   CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
     76   CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
     77   CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
     78   CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
     79 }
     80 
     81 
     82 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
     83   CHECK(obj->IsOddball());
     84   bool exc;
     85   Object* print_string =
     86       *Execution::ToString(Handle<Object>(obj, isolate), &exc);
     87   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
     88 }
     89 
     90 
     91 static void CheckSmi(Isolate* isolate, int value, const char* string) {
     92   bool exc;
     93   Object* print_string =
     94       *Execution::ToString(Handle<Object>(Smi::FromInt(value), isolate), &exc);
     95   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
     96 }
     97 
     98 
     99 static void CheckNumber(Isolate* isolate, double value, const char* string) {
    100   Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
    101   CHECK(obj->IsNumber());
    102   bool exc;
    103   Object* print_string =
    104       *Execution::ToString(Handle<Object>(obj, isolate), &exc);
    105   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
    106 }
    107 
    108 
    109 static void CheckFindCodeObject(Isolate* isolate) {
    110   // Test FindCodeObject
    111 #define __ assm.
    112 
    113   Assembler assm(isolate, NULL, 0);
    114 
    115   __ nop();  // supported on all architectures
    116 
    117   CodeDesc desc;
    118   assm.GetCode(&desc);
    119   Heap* heap = isolate->heap();
    120   Object* code = heap->CreateCode(
    121       desc,
    122       Code::ComputeFlags(Code::STUB),
    123       Handle<Code>())->ToObjectChecked();
    124   CHECK(code->IsCode());
    125 
    126   HeapObject* obj = HeapObject::cast(code);
    127   Address obj_addr = obj->address();
    128 
    129   for (int i = 0; i < obj->Size(); i += kPointerSize) {
    130     Object* found = isolate->FindCodeObject(obj_addr + i);
    131     CHECK_EQ(code, found);
    132   }
    133 
    134   Object* copy = heap->CreateCode(
    135       desc,
    136       Code::ComputeFlags(Code::STUB),
    137       Handle<Code>())->ToObjectChecked();
    138   CHECK(copy->IsCode());
    139   HeapObject* obj_copy = HeapObject::cast(copy);
    140   Object* not_right = isolate->FindCodeObject(obj_copy->address() +
    141                                               obj_copy->Size() / 2);
    142   CHECK(not_right != code);
    143 }
    144 
    145 
    146 TEST(HeapObjects) {
    147   CcTest::InitializeVM();
    148   Isolate* isolate = Isolate::Current();
    149   Factory* factory = isolate->factory();
    150   Heap* heap = isolate->heap();
    151 
    152   HandleScope sc(isolate);
    153   Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked();
    154   CHECK(value->IsHeapNumber());
    155   CHECK(value->IsNumber());
    156   CHECK_EQ(1.000123, value->Number());
    157 
    158   value = heap->NumberFromDouble(1.0)->ToObjectChecked();
    159   CHECK(value->IsSmi());
    160   CHECK(value->IsNumber());
    161   CHECK_EQ(1.0, value->Number());
    162 
    163   value = heap->NumberFromInt32(1024)->ToObjectChecked();
    164   CHECK(value->IsSmi());
    165   CHECK(value->IsNumber());
    166   CHECK_EQ(1024.0, value->Number());
    167 
    168   value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
    169   CHECK(value->IsSmi());
    170   CHECK(value->IsNumber());
    171   CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
    172 
    173   value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
    174   CHECK(value->IsSmi());
    175   CHECK(value->IsNumber());
    176   CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
    177 
    178 #ifndef V8_TARGET_ARCH_X64
    179   // TODO(lrn): We need a NumberFromIntptr function in order to test this.
    180   value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
    181   CHECK(value->IsHeapNumber());
    182   CHECK(value->IsNumber());
    183   CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
    184 #endif
    185 
    186   MaybeObject* maybe_value =
    187       heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
    188   value = maybe_value->ToObjectChecked();
    189   CHECK(value->IsHeapNumber());
    190   CHECK(value->IsNumber());
    191   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
    192            value->Number());
    193 
    194   maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31);
    195   value = maybe_value->ToObjectChecked();
    196   CHECK(value->IsHeapNumber());
    197   CHECK(value->IsNumber());
    198   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
    199            value->Number());
    200 
    201   // nan oddball checks
    202   CHECK(heap->nan_value()->IsNumber());
    203   CHECK(std::isnan(heap->nan_value()->Number()));
    204 
    205   Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest "));
    206   CHECK(s->IsString());
    207   CHECK_EQ(10, s->length());
    208 
    209   String* object_string = String::cast(heap->Object_string());
    210   CHECK(
    211       Isolate::Current()->context()->global_object()->HasLocalProperty(
    212           object_string));
    213 
    214   // Check ToString for oddballs
    215   CheckOddball(isolate, heap->true_value(), "true");
    216   CheckOddball(isolate, heap->false_value(), "false");
    217   CheckOddball(isolate, heap->null_value(), "null");
    218   CheckOddball(isolate, heap->undefined_value(), "undefined");
    219 
    220   // Check ToString for Smis
    221   CheckSmi(isolate, 0, "0");
    222   CheckSmi(isolate, 42, "42");
    223   CheckSmi(isolate, -42, "-42");
    224 
    225   // Check ToString for Numbers
    226   CheckNumber(isolate, 1.1, "1.1");
    227 
    228   CheckFindCodeObject(isolate);
    229 }
    230 
    231 
    232 TEST(Tagging) {
    233   CcTest::InitializeVM();
    234   int request = 24;
    235   CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
    236   CHECK(Smi::FromInt(42)->IsSmi());
    237   CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
    238   CHECK_EQ(NEW_SPACE,
    239            Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
    240   CHECK_EQ(OLD_POINTER_SPACE,
    241            Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
    242   CHECK(Failure::Exception()->IsFailure());
    243   CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
    244   CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
    245 }
    246 
    247 
    248 TEST(GarbageCollection) {
    249   CcTest::InitializeVM();
    250   Isolate* isolate = Isolate::Current();
    251   Heap* heap = isolate->heap();
    252   Factory* factory = isolate->factory();
    253 
    254   HandleScope sc(isolate);
    255   // Check GC.
    256   heap->CollectGarbage(NEW_SPACE);
    257 
    258   Handle<String> name = factory->InternalizeUtf8String("theFunction");
    259   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    260   Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
    261   Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
    262 
    263   {
    264     HandleScope inner_scope(isolate);
    265     // Allocate a function and keep it in global object's property.
    266     Handle<JSFunction> function =
    267         factory->NewFunction(name, factory->undefined_value());
    268     Handle<Map> initial_map =
    269         factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
    270     function->set_initial_map(*initial_map);
    271     Isolate::Current()->context()->global_object()->SetProperty(
    272         *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
    273     // Allocate an object.  Unrooted after leaving the scope.
    274     Handle<JSObject> obj = factory->NewJSObject(function);
    275     obj->SetProperty(
    276         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
    277     obj->SetProperty(
    278         *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
    279 
    280     CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
    281     CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
    282   }
    283 
    284   heap->CollectGarbage(NEW_SPACE);
    285 
    286   // Function should be alive.
    287   CHECK(Isolate::Current()->context()->global_object()->
    288         HasLocalProperty(*name));
    289   // Check function is retained.
    290   Object* func_value = Isolate::Current()->context()->global_object()->
    291       GetProperty(*name)->ToObjectChecked();
    292   CHECK(func_value->IsJSFunction());
    293   Handle<JSFunction> function(JSFunction::cast(func_value));
    294 
    295   {
    296     HandleScope inner_scope(isolate);
    297     // Allocate another object, make it reachable from global.
    298     Handle<JSObject> obj = factory->NewJSObject(function);
    299     Isolate::Current()->context()->global_object()->SetProperty(
    300         *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
    301     obj->SetProperty(
    302         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
    303   }
    304 
    305   // After gc, it should survive.
    306   heap->CollectGarbage(NEW_SPACE);
    307 
    308   CHECK(Isolate::Current()->context()->global_object()->
    309         HasLocalProperty(*obj_name));
    310   CHECK(Isolate::Current()->context()->global_object()->
    311         GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
    312   Object* obj = Isolate::Current()->context()->global_object()->
    313       GetProperty(*obj_name)->ToObjectChecked();
    314   JSObject* js_obj = JSObject::cast(obj);
    315   CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
    316 }
    317 
    318 
    319 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
    320   HandleScope scope(isolate);
    321   Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string));
    322   CHECK_EQ(StrLength(string), s->length());
    323   for (int index = 0; index < s->length(); index++) {
    324     CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
    325   }
    326 }
    327 
    328 
    329 TEST(String) {
    330   CcTest::InitializeVM();
    331   Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
    332 
    333   VerifyStringAllocation(isolate, "a");
    334   VerifyStringAllocation(isolate, "ab");
    335   VerifyStringAllocation(isolate, "abc");
    336   VerifyStringAllocation(isolate, "abcd");
    337   VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
    338 }
    339 
    340 
    341 TEST(LocalHandles) {
    342   CcTest::InitializeVM();
    343   Isolate* isolate = Isolate::Current();
    344   Factory* factory = isolate->factory();
    345 
    346   v8::HandleScope scope(CcTest::isolate());
    347   const char* name = "Kasper the spunky";
    348   Handle<String> string = factory->NewStringFromAscii(CStrVector(name));
    349   CHECK_EQ(StrLength(name), string->length());
    350 }
    351 
    352 
    353 TEST(GlobalHandles) {
    354   CcTest::InitializeVM();
    355   Isolate* isolate = Isolate::Current();
    356   Heap* heap = isolate->heap();
    357   Factory* factory = isolate->factory();
    358   GlobalHandles* global_handles = isolate->global_handles();
    359 
    360   Handle<Object> h1;
    361   Handle<Object> h2;
    362   Handle<Object> h3;
    363   Handle<Object> h4;
    364 
    365   {
    366     HandleScope scope(isolate);
    367 
    368     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
    369     Handle<Object> u = factory->NewNumber(1.12344);
    370 
    371     h1 = global_handles->Create(*i);
    372     h2 = global_handles->Create(*u);
    373     h3 = global_handles->Create(*i);
    374     h4 = global_handles->Create(*u);
    375   }
    376 
    377   // after gc, it should survive
    378   heap->CollectGarbage(NEW_SPACE);
    379 
    380   CHECK((*h1)->IsString());
    381   CHECK((*h2)->IsHeapNumber());
    382   CHECK((*h3)->IsString());
    383   CHECK((*h4)->IsHeapNumber());
    384 
    385   CHECK_EQ(*h3, *h1);
    386   global_handles->Destroy(h1.location());
    387   global_handles->Destroy(h3.location());
    388 
    389   CHECK_EQ(*h4, *h2);
    390   global_handles->Destroy(h2.location());
    391   global_handles->Destroy(h4.location());
    392 }
    393 
    394 
    395 static bool WeakPointerCleared = false;
    396 
    397 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
    398                                          v8::Persistent<v8::Value>* handle,
    399                                          void* id) {
    400   if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
    401   handle->Dispose(isolate);
    402 }
    403 
    404 
    405 TEST(WeakGlobalHandlesScavenge) {
    406   i::FLAG_stress_compaction = false;
    407   CcTest::InitializeVM();
    408   Isolate* isolate = Isolate::Current();
    409   Heap* heap = isolate->heap();
    410   Factory* factory = isolate->factory();
    411   GlobalHandles* global_handles = isolate->global_handles();
    412 
    413   WeakPointerCleared = false;
    414 
    415   Handle<Object> h1;
    416   Handle<Object> h2;
    417 
    418   {
    419     HandleScope scope(isolate);
    420 
    421     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
    422     Handle<Object> u = factory->NewNumber(1.12344);
    423 
    424     h1 = global_handles->Create(*i);
    425     h2 = global_handles->Create(*u);
    426   }
    427 
    428   global_handles->MakeWeak(h2.location(),
    429                            reinterpret_cast<void*>(1234),
    430                            &TestWeakGlobalHandleCallback);
    431 
    432   // Scavenge treats weak pointers as normal roots.
    433   heap->PerformScavenge();
    434 
    435   CHECK((*h1)->IsString());
    436   CHECK((*h2)->IsHeapNumber());
    437 
    438   CHECK(!WeakPointerCleared);
    439   CHECK(!global_handles->IsNearDeath(h2.location()));
    440   CHECK(!global_handles->IsNearDeath(h1.location()));
    441 
    442   global_handles->Destroy(h1.location());
    443   global_handles->Destroy(h2.location());
    444 }
    445 
    446 
    447 TEST(WeakGlobalHandlesMark) {
    448   CcTest::InitializeVM();
    449   Isolate* isolate = Isolate::Current();
    450   Heap* heap = isolate->heap();
    451   Factory* factory = isolate->factory();
    452   GlobalHandles* global_handles = isolate->global_handles();
    453 
    454   WeakPointerCleared = false;
    455 
    456   Handle<Object> h1;
    457   Handle<Object> h2;
    458 
    459   {
    460     HandleScope scope(isolate);
    461 
    462     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
    463     Handle<Object> u = factory->NewNumber(1.12344);
    464 
    465     h1 = global_handles->Create(*i);
    466     h2 = global_handles->Create(*u);
    467   }
    468 
    469   // Make sure the objects are promoted.
    470   heap->CollectGarbage(OLD_POINTER_SPACE);
    471   heap->CollectGarbage(NEW_SPACE);
    472   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
    473 
    474   global_handles->MakeWeak(h2.location(),
    475                            reinterpret_cast<void*>(1234),
    476                            &TestWeakGlobalHandleCallback);
    477   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
    478   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
    479 
    480   // Incremental marking potentially marked handles before they turned weak.
    481   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
    482 
    483   CHECK((*h1)->IsString());
    484 
    485   CHECK(WeakPointerCleared);
    486   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
    487 
    488   global_handles->Destroy(h1.location());
    489 }
    490 
    491 
    492 TEST(DeleteWeakGlobalHandle) {
    493   i::FLAG_stress_compaction = false;
    494   CcTest::InitializeVM();
    495   Isolate* isolate = Isolate::Current();
    496   Heap* heap = isolate->heap();
    497   Factory* factory = isolate->factory();
    498   GlobalHandles* global_handles = isolate->global_handles();
    499 
    500   WeakPointerCleared = false;
    501 
    502   Handle<Object> h;
    503 
    504   {
    505     HandleScope scope(isolate);
    506 
    507     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
    508     h = global_handles->Create(*i);
    509   }
    510 
    511   global_handles->MakeWeak(h.location(),
    512                            reinterpret_cast<void*>(1234),
    513                            &TestWeakGlobalHandleCallback);
    514 
    515   // Scanvenge does not recognize weak reference.
    516   heap->PerformScavenge();
    517 
    518   CHECK(!WeakPointerCleared);
    519 
    520   // Mark-compact treats weak reference properly.
    521   heap->CollectGarbage(OLD_POINTER_SPACE);
    522 
    523   CHECK(WeakPointerCleared);
    524 }
    525 
    526 
    527 static const char* not_so_random_string_table[] = {
    528   "abstract",
    529   "boolean",
    530   "break",
    531   "byte",
    532   "case",
    533   "catch",
    534   "char",
    535   "class",
    536   "const",
    537   "continue",
    538   "debugger",
    539   "default",
    540   "delete",
    541   "do",
    542   "double",
    543   "else",
    544   "enum",
    545   "export",
    546   "extends",
    547   "false",
    548   "final",
    549   "finally",
    550   "float",
    551   "for",
    552   "function",
    553   "goto",
    554   "if",
    555   "implements",
    556   "import",
    557   "in",
    558   "instanceof",
    559   "int",
    560   "interface",
    561   "long",
    562   "native",
    563   "new",
    564   "null",
    565   "package",
    566   "private",
    567   "protected",
    568   "public",
    569   "return",
    570   "short",
    571   "static",
    572   "super",
    573   "switch",
    574   "synchronized",
    575   "this",
    576   "throw",
    577   "throws",
    578   "transient",
    579   "true",
    580   "try",
    581   "typeof",
    582   "var",
    583   "void",
    584   "volatile",
    585   "while",
    586   "with",
    587   0
    588 };
    589 
    590 
    591 static void CheckInternalizedStrings(const char** strings) {
    592   for (const char* string = *strings; *strings != 0; string = *strings++) {
    593     Object* a;
    594     MaybeObject* maybe_a = HEAP->InternalizeUtf8String(string);
    595     // InternalizeUtf8String may return a failure if a GC is needed.
    596     if (!maybe_a->ToObject(&a)) continue;
    597     CHECK(a->IsInternalizedString());
    598     Object* b;
    599     MaybeObject* maybe_b = HEAP->InternalizeUtf8String(string);
    600     if (!maybe_b->ToObject(&b)) continue;
    601     CHECK_EQ(b, a);
    602     CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string)));
    603   }
    604 }
    605 
    606 
    607 TEST(StringTable) {
    608   CcTest::InitializeVM();
    609 
    610   CheckInternalizedStrings(not_so_random_string_table);
    611   CheckInternalizedStrings(not_so_random_string_table);
    612 }
    613 
    614 
    615 TEST(FunctionAllocation) {
    616   CcTest::InitializeVM();
    617   Isolate* isolate = Isolate::Current();
    618   Factory* factory = isolate->factory();
    619 
    620   v8::HandleScope sc(CcTest::isolate());
    621   Handle<String> name = factory->InternalizeUtf8String("theFunction");
    622   Handle<JSFunction> function =
    623       factory->NewFunction(name, factory->undefined_value());
    624   Handle<Map> initial_map =
    625       factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
    626   function->set_initial_map(*initial_map);
    627 
    628   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    629   Handle<JSObject> obj = factory->NewJSObject(function);
    630   obj->SetProperty(
    631       *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
    632   CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
    633   // Check that we can add properties to function objects.
    634   function->SetProperty(
    635       *prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
    636   CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
    637 }
    638 
    639 
    640 TEST(ObjectProperties) {
    641   CcTest::InitializeVM();
    642   Isolate* isolate = Isolate::Current();
    643   Factory* factory = isolate->factory();
    644 
    645   v8::HandleScope sc(CcTest::isolate());
    646   String* object_string = String::cast(HEAP->Object_string());
    647   Object* raw_object = Isolate::Current()->context()->global_object()->
    648       GetProperty(object_string)->ToObjectChecked();
    649   JSFunction* object_function = JSFunction::cast(raw_object);
    650   Handle<JSFunction> constructor(object_function);
    651   Handle<JSObject> obj = factory->NewJSObject(constructor);
    652   Handle<String> first = factory->InternalizeUtf8String("first");
    653   Handle<String> second = factory->InternalizeUtf8String("second");
    654 
    655   // check for empty
    656   CHECK(!obj->HasLocalProperty(*first));
    657 
    658   // add first
    659   obj->SetProperty(
    660       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    661   CHECK(obj->HasLocalProperty(*first));
    662 
    663   // delete first
    664   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
    665   CHECK(!obj->HasLocalProperty(*first));
    666 
    667   // add first and then second
    668   obj->SetProperty(
    669       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    670   obj->SetProperty(
    671       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
    672   CHECK(obj->HasLocalProperty(*first));
    673   CHECK(obj->HasLocalProperty(*second));
    674 
    675   // delete first and then second
    676   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
    677   CHECK(obj->HasLocalProperty(*second));
    678   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
    679   CHECK(!obj->HasLocalProperty(*first));
    680   CHECK(!obj->HasLocalProperty(*second));
    681 
    682   // add first and then second
    683   obj->SetProperty(
    684       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    685   obj->SetProperty(
    686       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
    687   CHECK(obj->HasLocalProperty(*first));
    688   CHECK(obj->HasLocalProperty(*second));
    689 
    690   // delete second and then first
    691   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
    692   CHECK(obj->HasLocalProperty(*first));
    693   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
    694   CHECK(!obj->HasLocalProperty(*first));
    695   CHECK(!obj->HasLocalProperty(*second));
    696 
    697   // check string and internalized string match
    698   const char* string1 = "fisk";
    699   Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1));
    700   obj->SetProperty(
    701       *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    702   Handle<String> s1_string = factory->InternalizeUtf8String(string1);
    703   CHECK(obj->HasLocalProperty(*s1_string));
    704 
    705   // check internalized string and string match
    706   const char* string2 = "fugl";
    707   Handle<String> s2_string = factory->InternalizeUtf8String(string2);
    708   obj->SetProperty(
    709       *s2_string, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    710   Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2));
    711   CHECK(obj->HasLocalProperty(*s2));
    712 }
    713 
    714 
    715 TEST(JSObjectMaps) {
    716   CcTest::InitializeVM();
    717   Isolate* isolate = Isolate::Current();
    718   Factory* factory = isolate->factory();
    719 
    720   v8::HandleScope sc(CcTest::isolate());
    721   Handle<String> name = factory->InternalizeUtf8String("theFunction");
    722   Handle<JSFunction> function =
    723       factory->NewFunction(name, factory->undefined_value());
    724   Handle<Map> initial_map =
    725       factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
    726   function->set_initial_map(*initial_map);
    727 
    728   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
    729   Handle<JSObject> obj = factory->NewJSObject(function);
    730 
    731   // Set a propery
    732   obj->SetProperty(
    733       *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
    734   CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
    735 
    736   // Check the map has changed
    737   CHECK(*initial_map != obj->map());
    738 }
    739 
    740 
    741 TEST(JSArray) {
    742   CcTest::InitializeVM();
    743   Isolate* isolate = Isolate::Current();
    744   Factory* factory = isolate->factory();
    745 
    746   v8::HandleScope sc(CcTest::isolate());
    747   Handle<String> name = factory->InternalizeUtf8String("Array");
    748   Object* raw_object = Isolate::Current()->context()->global_object()->
    749       GetProperty(*name)->ToObjectChecked();
    750   Handle<JSFunction> function = Handle<JSFunction>(
    751       JSFunction::cast(raw_object));
    752 
    753   // Allocate the object.
    754   Handle<JSObject> object = factory->NewJSObject(function);
    755   Handle<JSArray> array = Handle<JSArray>::cast(object);
    756   // We just initialized the VM, no heap allocation failure yet.
    757   array->Initialize(0)->ToObjectChecked();
    758 
    759   // Set array length to 0.
    760   array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
    761   CHECK_EQ(Smi::FromInt(0), array->length());
    762   // Must be in fast mode.
    763   CHECK(array->HasFastSmiOrObjectElements());
    764 
    765   // array[length] = name.
    766   array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
    767   CHECK_EQ(Smi::FromInt(1), array->length());
    768   CHECK_EQ(array->GetElement(0), *name);
    769 
    770   // Set array length with larger than smi value.
    771   Handle<Object> length =
    772       factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
    773   array->SetElementsLength(*length)->ToObjectChecked();
    774 
    775   uint32_t int_length = 0;
    776   CHECK(length->ToArrayIndex(&int_length));
    777   CHECK_EQ(*length, array->length());
    778   CHECK(array->HasDictionaryElements());  // Must be in slow mode.
    779 
    780   // array[length] = name.
    781   array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
    782   uint32_t new_int_length = 0;
    783   CHECK(array->length()->ToArrayIndex(&new_int_length));
    784   CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
    785   CHECK_EQ(array->GetElement(int_length), *name);
    786   CHECK_EQ(array->GetElement(0), *name);
    787 }
    788 
    789 
    790 TEST(JSObjectCopy) {
    791   CcTest::InitializeVM();
    792   Isolate* isolate = Isolate::Current();
    793   Factory* factory = isolate->factory();
    794 
    795   v8::HandleScope sc(CcTest::isolate());
    796   String* object_string = String::cast(HEAP->Object_string());
    797   Object* raw_object = Isolate::Current()->context()->global_object()->
    798       GetProperty(object_string)->ToObjectChecked();
    799   JSFunction* object_function = JSFunction::cast(raw_object);
    800   Handle<JSFunction> constructor(object_function);
    801   Handle<JSObject> obj = factory->NewJSObject(constructor);
    802   Handle<String> first = factory->InternalizeUtf8String("first");
    803   Handle<String> second = factory->InternalizeUtf8String("second");
    804 
    805   obj->SetProperty(
    806       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    807   obj->SetProperty(
    808       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
    809 
    810   obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
    811   obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
    812 
    813   // Make the clone.
    814   Handle<JSObject> clone = Copy(obj);
    815   CHECK(!clone.is_identical_to(obj));
    816 
    817   CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
    818   CHECK_EQ(obj->GetElement(1), clone->GetElement(1));
    819 
    820   CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
    821   CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
    822 
    823   // Flip the values.
    824   clone->SetProperty(
    825       *first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
    826   clone->SetProperty(
    827       *second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
    828 
    829   clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
    830   clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
    831 
    832   CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
    833   CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
    834 
    835   CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
    836   CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
    837 }
    838 
    839 
    840 TEST(StringAllocation) {
    841   CcTest::InitializeVM();
    842   Isolate* isolate = Isolate::Current();
    843   Factory* factory = isolate->factory();
    844 
    845   const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
    846   for (int length = 0; length < 100; length++) {
    847     v8::HandleScope scope(CcTest::isolate());
    848     char* non_ascii = NewArray<char>(3 * length + 1);
    849     char* ascii = NewArray<char>(length + 1);
    850     non_ascii[3 * length] = 0;
    851     ascii[length] = 0;
    852     for (int i = 0; i < length; i++) {
    853       ascii[i] = 'a';
    854       non_ascii[3 * i] = chars[0];
    855       non_ascii[3 * i + 1] = chars[1];
    856       non_ascii[3 * i + 2] = chars[2];
    857     }
    858     Handle<String> non_ascii_sym =
    859         factory->InternalizeUtf8String(
    860             Vector<const char>(non_ascii, 3 * length));
    861     CHECK_EQ(length, non_ascii_sym->length());
    862     Handle<String> ascii_sym =
    863         factory->InternalizeOneByteString(OneByteVector(ascii, length));
    864     CHECK_EQ(length, ascii_sym->length());
    865     Handle<String> non_ascii_str =
    866         factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
    867     non_ascii_str->Hash();
    868     CHECK_EQ(length, non_ascii_str->length());
    869     Handle<String> ascii_str =
    870         factory->NewStringFromUtf8(Vector<const char>(ascii, length));
    871     ascii_str->Hash();
    872     CHECK_EQ(length, ascii_str->length());
    873     DeleteArray(non_ascii);
    874     DeleteArray(ascii);
    875   }
    876 }
    877 
    878 
    879 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
    880   // Count the number of objects found in the heap.
    881   int found_count = 0;
    882   heap->EnsureHeapIsIterable();
    883   HeapIterator iterator(heap);
    884   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
    885     for (int i = 0; i < size; i++) {
    886       if (*objs[i] == obj) {
    887         found_count++;
    888       }
    889     }
    890   }
    891   return found_count;
    892 }
    893 
    894 
    895 TEST(Iteration) {
    896   CcTest::InitializeVM();
    897   Isolate* isolate = Isolate::Current();
    898   Factory* factory = isolate->factory();
    899   v8::HandleScope scope(CcTest::isolate());
    900 
    901   // Array of objects to scan haep for.
    902   const int objs_count = 6;
    903   Handle<Object> objs[objs_count];
    904   int next_objs_index = 0;
    905 
    906   // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
    907   objs[next_objs_index++] = factory->NewJSArray(10);
    908   objs[next_objs_index++] = factory->NewJSArray(10,
    909                                                 FAST_HOLEY_ELEMENTS,
    910                                                 TENURED);
    911 
    912   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
    913   objs[next_objs_index++] =
    914       factory->NewStringFromAscii(CStrVector("abcdefghij"));
    915   objs[next_objs_index++] =
    916       factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
    917 
    918   // Allocate a large string (for large object space).
    919   int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
    920   char* str = new char[large_size];
    921   for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
    922   str[large_size - 1] = '\0';
    923   objs[next_objs_index++] =
    924       factory->NewStringFromAscii(CStrVector(str), TENURED);
    925   delete[] str;
    926 
    927   // Add a Map object to look for.
    928   objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
    929 
    930   CHECK_EQ(objs_count, next_objs_index);
    931   CHECK_EQ(objs_count, ObjectsFoundInHeap(HEAP, objs, objs_count));
    932 }
    933 
    934 
    935 TEST(EmptyHandleEscapeFrom) {
    936   CcTest::InitializeVM();
    937 
    938   v8::HandleScope scope(CcTest::isolate());
    939   Handle<JSObject> runaway;
    940 
    941   {
    942       v8::HandleScope nested(CcTest::isolate());
    943       Handle<JSObject> empty;
    944       runaway = empty.EscapeFrom(&nested);
    945   }
    946 
    947   CHECK(runaway.is_null());
    948 }
    949 
    950 
    951 static int LenFromSize(int size) {
    952   return (size - FixedArray::kHeaderSize) / kPointerSize;
    953 }
    954 
    955 
    956 TEST(Regression39128) {
    957   // Test case for crbug.com/39128.
    958   CcTest::InitializeVM();
    959   Isolate* isolate = Isolate::Current();
    960   Factory* factory = isolate->factory();
    961 
    962   // Increase the chance of 'bump-the-pointer' allocation in old space.
    963   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
    964 
    965   v8::HandleScope scope(CcTest::isolate());
    966 
    967   // The plan: create JSObject which references objects in new space.
    968   // Then clone this object (forcing it to go into old space) and check
    969   // that region dirty marks are updated correctly.
    970 
    971   // Step 1: prepare a map for the object.  We add 1 inobject property to it.
    972   Handle<JSFunction> object_ctor(
    973       Isolate::Current()->native_context()->object_function());
    974   CHECK(object_ctor->has_initial_map());
    975   Handle<Map> object_map(object_ctor->initial_map());
    976   // Create a map with single inobject property.
    977   Handle<Map> my_map = factory->CopyMap(object_map, 1);
    978   int n_properties = my_map->inobject_properties();
    979   CHECK_GT(n_properties, 0);
    980 
    981   int object_size = my_map->instance_size();
    982 
    983   // Step 2: allocate a lot of objects so to almost fill new space: we need
    984   // just enough room to allocate JSObject and thus fill the newspace.
    985 
    986   int allocation_amount = Min(FixedArray::kMaxSize,
    987                               Page::kMaxNonCodeHeapObjectSize + kPointerSize);
    988   int allocation_len = LenFromSize(allocation_amount);
    989   NewSpace* new_space = HEAP->new_space();
    990   Address* top_addr = new_space->allocation_top_address();
    991   Address* limit_addr = new_space->allocation_limit_address();
    992   while ((*limit_addr - *top_addr) > allocation_amount) {
    993     CHECK(!HEAP->always_allocate());
    994     Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
    995     CHECK(!array->IsFailure());
    996     CHECK(new_space->Contains(array));
    997   }
    998 
    999   // Step 3: now allocate fixed array and JSObject to fill the whole new space.
   1000   int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
   1001   int fixed_array_len = LenFromSize(to_fill);
   1002   CHECK(fixed_array_len < FixedArray::kMaxLength);
   1003 
   1004   CHECK(!HEAP->always_allocate());
   1005   Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
   1006   CHECK(!array->IsFailure());
   1007   CHECK(new_space->Contains(array));
   1008 
   1009   Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
   1010   CHECK(new_space->Contains(object));
   1011   JSObject* jsobject = JSObject::cast(object);
   1012   CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
   1013   CHECK_EQ(0, jsobject->properties()->length());
   1014   // Create a reference to object in new space in jsobject.
   1015   jsobject->FastPropertyAtPut(-1, array);
   1016 
   1017   CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
   1018 
   1019   // Step 4: clone jsobject, but force always allocate first to create a clone
   1020   // in old pointer space.
   1021   Address old_pointer_space_top = HEAP->old_pointer_space()->top();
   1022   AlwaysAllocateScope aa_scope;
   1023   Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
   1024   JSObject* clone = JSObject::cast(clone_obj);
   1025   if (clone->address() != old_pointer_space_top) {
   1026     // Alas, got allocated from free list, we cannot do checks.
   1027     return;
   1028   }
   1029   CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
   1030 }
   1031 
   1032 
   1033 TEST(TestCodeFlushing) {
   1034   // If we do not flush code this test is invalid.
   1035   if (!FLAG_flush_code) return;
   1036   i::FLAG_allow_natives_syntax = true;
   1037   CcTest::InitializeVM();
   1038   Isolate* isolate = Isolate::Current();
   1039   Factory* factory = isolate->factory();
   1040   v8::HandleScope scope(CcTest::isolate());
   1041   const char* source = "function foo() {"
   1042                        "  var x = 42;"
   1043                        "  var y = 42;"
   1044                        "  var z = x + y;"
   1045                        "};"
   1046                        "foo()";
   1047   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1048 
   1049   // This compile will add the code to the compilation cache.
   1050   { v8::HandleScope scope(CcTest::isolate());
   1051     CompileRun(source);
   1052   }
   1053 
   1054   // Check function is compiled.
   1055   Object* func_value = Isolate::Current()->context()->global_object()->
   1056       GetProperty(*foo_name)->ToObjectChecked();
   1057   CHECK(func_value->IsJSFunction());
   1058   Handle<JSFunction> function(JSFunction::cast(func_value));
   1059   CHECK(function->shared()->is_compiled());
   1060 
   1061   // The code will survive at least two GCs.
   1062   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1063   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1064   CHECK(function->shared()->is_compiled());
   1065 
   1066   // Simulate several GCs that use full marking.
   1067   const int kAgingThreshold = 6;
   1068   for (int i = 0; i < kAgingThreshold; i++) {
   1069     HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1070   }
   1071 
   1072   // foo should no longer be in the compilation cache
   1073   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1074   CHECK(!function->is_compiled() || function->IsOptimized());
   1075   // Call foo to get it recompiled.
   1076   CompileRun("foo()");
   1077   CHECK(function->shared()->is_compiled());
   1078   CHECK(function->is_compiled());
   1079 }
   1080 
   1081 
   1082 TEST(TestCodeFlushingIncremental) {
   1083   // If we do not flush code this test is invalid.
   1084   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
   1085   i::FLAG_allow_natives_syntax = true;
   1086   CcTest::InitializeVM();
   1087   Isolate* isolate = Isolate::Current();
   1088   Factory* factory = isolate->factory();
   1089   v8::HandleScope scope(CcTest::isolate());
   1090   const char* source = "function foo() {"
   1091                        "  var x = 42;"
   1092                        "  var y = 42;"
   1093                        "  var z = x + y;"
   1094                        "};"
   1095                        "foo()";
   1096   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1097 
   1098   // This compile will add the code to the compilation cache.
   1099   { v8::HandleScope scope(CcTest::isolate());
   1100     CompileRun(source);
   1101   }
   1102 
   1103   // Check function is compiled.
   1104   Object* func_value = Isolate::Current()->context()->global_object()->
   1105       GetProperty(*foo_name)->ToObjectChecked();
   1106   CHECK(func_value->IsJSFunction());
   1107   Handle<JSFunction> function(JSFunction::cast(func_value));
   1108   CHECK(function->shared()->is_compiled());
   1109 
   1110   // The code will survive at least two GCs.
   1111   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1112   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1113   CHECK(function->shared()->is_compiled());
   1114 
   1115   // Simulate several GCs that use incremental marking.
   1116   const int kAgingThreshold = 6;
   1117   for (int i = 0; i < kAgingThreshold; i++) {
   1118     SimulateIncrementalMarking();
   1119     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1120   }
   1121   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1122   CHECK(!function->is_compiled() || function->IsOptimized());
   1123 
   1124   // This compile will compile the function again.
   1125   { v8::HandleScope scope(CcTest::isolate());
   1126     CompileRun("foo();");
   1127   }
   1128 
   1129   // Simulate several GCs that use incremental marking but make sure
   1130   // the loop breaks once the function is enqueued as a candidate.
   1131   for (int i = 0; i < kAgingThreshold; i++) {
   1132     SimulateIncrementalMarking();
   1133     if (!function->next_function_link()->IsUndefined()) break;
   1134     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1135   }
   1136 
   1137   // Force optimization while incremental marking is active and while
   1138   // the function is enqueued as a candidate.
   1139   { v8::HandleScope scope(CcTest::isolate());
   1140     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
   1141   }
   1142 
   1143   // Simulate one final GC to make sure the candidate queue is sane.
   1144   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1145   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
   1146   CHECK(function->is_compiled() || !function->IsOptimized());
   1147 }
   1148 
   1149 
   1150 TEST(TestCodeFlushingIncrementalScavenge) {
   1151   // If we do not flush code this test is invalid.
   1152   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
   1153   i::FLAG_allow_natives_syntax = true;
   1154   CcTest::InitializeVM();
   1155   Isolate* isolate = Isolate::Current();
   1156   Factory* factory = isolate->factory();
   1157   v8::HandleScope scope(CcTest::isolate());
   1158   const char* source = "var foo = function() {"
   1159                        "  var x = 42;"
   1160                        "  var y = 42;"
   1161                        "  var z = x + y;"
   1162                        "};"
   1163                        "foo();"
   1164                        "var bar = function() {"
   1165                        "  var x = 23;"
   1166                        "};"
   1167                        "bar();";
   1168   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1169   Handle<String> bar_name = factory->InternalizeUtf8String("bar");
   1170 
   1171   // Perfrom one initial GC to enable code flushing.
   1172   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1173 
   1174   // This compile will add the code to the compilation cache.
   1175   { v8::HandleScope scope(CcTest::isolate());
   1176     CompileRun(source);
   1177   }
   1178 
   1179   // Check functions are compiled.
   1180   Object* func_value = Isolate::Current()->context()->global_object()->
   1181       GetProperty(*foo_name)->ToObjectChecked();
   1182   CHECK(func_value->IsJSFunction());
   1183   Handle<JSFunction> function(JSFunction::cast(func_value));
   1184   CHECK(function->shared()->is_compiled());
   1185   Object* func_value2 = Isolate::Current()->context()->global_object()->
   1186       GetProperty(*bar_name)->ToObjectChecked();
   1187   CHECK(func_value2->IsJSFunction());
   1188   Handle<JSFunction> function2(JSFunction::cast(func_value2));
   1189   CHECK(function2->shared()->is_compiled());
   1190 
   1191   // Clear references to functions so that one of them can die.
   1192   { v8::HandleScope scope(CcTest::isolate());
   1193     CompileRun("foo = 0; bar = 0;");
   1194   }
   1195 
   1196   // Bump the code age so that flushing is triggered while the function
   1197   // object is still located in new-space.
   1198   const int kAgingThreshold = 6;
   1199   for (int i = 0; i < kAgingThreshold; i++) {
   1200     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1201     function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1202   }
   1203 
   1204   // Simulate incremental marking so that the functions are enqueued as
   1205   // code flushing candidates. Then kill one of the functions. Finally
   1206   // perform a scavenge while incremental marking is still running.
   1207   SimulateIncrementalMarking();
   1208   *function2.location() = NULL;
   1209   HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
   1210 
   1211   // Simulate one final GC to make sure the candidate queue is sane.
   1212   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1213   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
   1214   CHECK(!function->is_compiled() || function->IsOptimized());
   1215 }
   1216 
   1217 
   1218 TEST(TestCodeFlushingIncrementalAbort) {
   1219   // If we do not flush code this test is invalid.
   1220   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
   1221   i::FLAG_allow_natives_syntax = true;
   1222   CcTest::InitializeVM();
   1223   Isolate* isolate = Isolate::Current();
   1224   Factory* factory = isolate->factory();
   1225   Heap* heap = isolate->heap();
   1226   v8::HandleScope scope(CcTest::isolate());
   1227   const char* source = "function foo() {"
   1228                        "  var x = 42;"
   1229                        "  var y = 42;"
   1230                        "  var z = x + y;"
   1231                        "};"
   1232                        "foo()";
   1233   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
   1234 
   1235   // This compile will add the code to the compilation cache.
   1236   { v8::HandleScope scope(CcTest::isolate());
   1237     CompileRun(source);
   1238   }
   1239 
   1240   // Check function is compiled.
   1241   Object* func_value = Isolate::Current()->context()->global_object()->
   1242       GetProperty(*foo_name)->ToObjectChecked();
   1243   CHECK(func_value->IsJSFunction());
   1244   Handle<JSFunction> function(JSFunction::cast(func_value));
   1245   CHECK(function->shared()->is_compiled());
   1246 
   1247   // The code will survive at least two GCs.
   1248   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1249   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1250   CHECK(function->shared()->is_compiled());
   1251 
   1252   // Bump the code age so that flushing is triggered.
   1253   const int kAgingThreshold = 6;
   1254   for (int i = 0; i < kAgingThreshold; i++) {
   1255     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   1256   }
   1257 
   1258   // Simulate incremental marking so that the function is enqueued as
   1259   // code flushing candidate.
   1260   SimulateIncrementalMarking();
   1261 
   1262 #ifdef ENABLE_DEBUGGER_SUPPORT
   1263   // Enable the debugger and add a breakpoint while incremental marking
   1264   // is running so that incremental marking aborts and code flushing is
   1265   // disabled.
   1266   int position = 0;
   1267   Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
   1268   isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
   1269   isolate->debug()->ClearAllBreakPoints();
   1270 #endif  // ENABLE_DEBUGGER_SUPPORT
   1271 
   1272   // Force optimization now that code flushing is disabled.
   1273   { v8::HandleScope scope(CcTest::isolate());
   1274     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
   1275   }
   1276 
   1277   // Simulate one final GC to make sure the candidate queue is sane.
   1278   heap->CollectAllGarbage(Heap::kNoGCFlags);
   1279   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
   1280   CHECK(function->is_compiled() || !function->IsOptimized());
   1281 }
   1282 
   1283 
   1284 // Count the number of native contexts in the weak list of native contexts.
   1285 int CountNativeContexts() {
   1286   int count = 0;
   1287   Object* object = HEAP->native_contexts_list();
   1288   while (!object->IsUndefined()) {
   1289     count++;
   1290     object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
   1291   }
   1292   return count;
   1293 }
   1294 
   1295 
   1296 // Count the number of user functions in the weak list of optimized
   1297 // functions attached to a native context.
   1298 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
   1299   int count = 0;
   1300   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
   1301   Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
   1302   while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
   1303     count++;
   1304     object = JSFunction::cast(object)->next_function_link();
   1305   }
   1306   return count;
   1307 }
   1308 
   1309 
   1310 TEST(TestInternalWeakLists) {
   1311   v8::V8::Initialize();
   1312 
   1313   // Some flags turn Scavenge collections into Mark-sweep collections
   1314   // and hence are incompatible with this test case.
   1315   if (FLAG_gc_global || FLAG_stress_compaction) return;
   1316 
   1317   static const int kNumTestContexts = 10;
   1318 
   1319   Isolate* isolate = Isolate::Current();
   1320   Heap* heap = isolate->heap();
   1321   HandleScope scope(isolate);
   1322   v8::Handle<v8::Context> ctx[kNumTestContexts];
   1323 
   1324   CHECK_EQ(0, CountNativeContexts());
   1325 
   1326   // Create a number of global contests which gets linked together.
   1327   for (int i = 0; i < kNumTestContexts; i++) {
   1328     ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
   1329 
   1330     // Collect garbage that might have been created by one of the
   1331     // installed extensions.
   1332     isolate->compilation_cache()->Clear();
   1333     heap->CollectAllGarbage(Heap::kNoGCFlags);
   1334 
   1335     bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
   1336 
   1337     CHECK_EQ(i + 1, CountNativeContexts());
   1338 
   1339     ctx[i]->Enter();
   1340 
   1341     // Create a handle scope so no function objects get stuch in the outer
   1342     // handle scope
   1343     HandleScope scope(isolate);
   1344     const char* source = "function f1() { };"
   1345                          "function f2() { };"
   1346                          "function f3() { };"
   1347                          "function f4() { };"
   1348                          "function f5() { };";
   1349     CompileRun(source);
   1350     CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
   1351     CompileRun("f1()");
   1352     CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
   1353     CompileRun("f2()");
   1354     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
   1355     CompileRun("f3()");
   1356     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
   1357     CompileRun("f4()");
   1358     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
   1359     CompileRun("f5()");
   1360     CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
   1361 
   1362     // Remove function f1, and
   1363     CompileRun("f1=null");
   1364 
   1365     // Scavenge treats these references as strong.
   1366     for (int j = 0; j < 10; j++) {
   1367       HEAP->PerformScavenge();
   1368       CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
   1369     }
   1370 
   1371     // Mark compact handles the weak references.
   1372     isolate->compilation_cache()->Clear();
   1373     heap->CollectAllGarbage(Heap::kNoGCFlags);
   1374     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
   1375 
   1376     // Get rid of f3 and f5 in the same way.
   1377     CompileRun("f3=null");
   1378     for (int j = 0; j < 10; j++) {
   1379       HEAP->PerformScavenge();
   1380       CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
   1381     }
   1382     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1383     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
   1384     CompileRun("f5=null");
   1385     for (int j = 0; j < 10; j++) {
   1386       HEAP->PerformScavenge();
   1387       CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
   1388     }
   1389     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1390     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
   1391 
   1392     ctx[i]->Exit();
   1393   }
   1394 
   1395   // Force compilation cache cleanup.
   1396   HEAP->NotifyContextDisposed();
   1397   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1398 
   1399   // Dispose the native contexts one by one.
   1400   for (int i = 0; i < kNumTestContexts; i++) {
   1401     // TODO(dcarney): is there a better way to do this?
   1402     i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
   1403     *unsafe = HEAP->undefined_value();
   1404     ctx[i].Clear();
   1405 
   1406     // Scavenge treats these references as strong.
   1407     for (int j = 0; j < 10; j++) {
   1408       HEAP->PerformScavenge();
   1409       CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
   1410     }
   1411 
   1412     // Mark compact handles the weak references.
   1413     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1414     CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
   1415   }
   1416 
   1417   CHECK_EQ(0, CountNativeContexts());
   1418 }
   1419 
   1420 
   1421 // Count the number of native contexts in the weak list of native contexts
   1422 // causing a GC after the specified number of elements.
   1423 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
   1424   Heap* heap = isolate->heap();
   1425   int count = 0;
   1426   Handle<Object> object(heap->native_contexts_list(), isolate);
   1427   while (!object->IsUndefined()) {
   1428     count++;
   1429     if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
   1430     object =
   1431         Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
   1432                        isolate);
   1433   }
   1434   return count;
   1435 }
   1436 
   1437 
   1438 // Count the number of user functions in the weak list of optimized
   1439 // functions attached to a native context causing a GC after the
   1440 // specified number of elements.
   1441 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
   1442                                              int n) {
   1443   int count = 0;
   1444   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
   1445   Isolate* isolate = icontext->GetIsolate();
   1446   Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
   1447                         isolate);
   1448   while (object->IsJSFunction() &&
   1449          !Handle<JSFunction>::cast(object)->IsBuiltin()) {
   1450     count++;
   1451     if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
   1452     object = Handle<Object>(
   1453         Object::cast(JSFunction::cast(*object)->next_function_link()),
   1454         isolate);
   1455   }
   1456   return count;
   1457 }
   1458 
   1459 
   1460 TEST(TestInternalWeakListsTraverseWithGC) {
   1461   v8::V8::Initialize();
   1462   Isolate* isolate = Isolate::Current();
   1463 
   1464   static const int kNumTestContexts = 10;
   1465 
   1466   HandleScope scope(isolate);
   1467   v8::Handle<v8::Context> ctx[kNumTestContexts];
   1468 
   1469   CHECK_EQ(0, CountNativeContexts());
   1470 
   1471   // Create an number of contexts and check the length of the weak list both
   1472   // with and without GCs while iterating the list.
   1473   for (int i = 0; i < kNumTestContexts; i++) {
   1474     ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
   1475     CHECK_EQ(i + 1, CountNativeContexts());
   1476     CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
   1477   }
   1478 
   1479   bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
   1480 
   1481   // Compile a number of functions the length of the weak list of optimized
   1482   // functions both with and without GCs while iterating the list.
   1483   ctx[0]->Enter();
   1484   const char* source = "function f1() { };"
   1485                        "function f2() { };"
   1486                        "function f3() { };"
   1487                        "function f4() { };"
   1488                        "function f5() { };";
   1489   CompileRun(source);
   1490   CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
   1491   CompileRun("f1()");
   1492   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
   1493   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1494   CompileRun("f2()");
   1495   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
   1496   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1497   CompileRun("f3()");
   1498   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
   1499   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
   1500   CompileRun("f4()");
   1501   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
   1502   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
   1503   CompileRun("f5()");
   1504   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
   1505   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
   1506 
   1507   ctx[0]->Exit();
   1508 }
   1509 
   1510 
   1511 TEST(TestSizeOfObjects) {
   1512   v8::V8::Initialize();
   1513 
   1514   // Get initial heap size after several full GCs, which will stabilize
   1515   // the heap size and return with sweeping finished completely.
   1516   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1517   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1518   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1519   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1520   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1521   CHECK(HEAP->old_pointer_space()->IsLazySweepingComplete());
   1522   int initial_size = static_cast<int>(HEAP->SizeOfObjects());
   1523 
   1524   {
   1525     // Allocate objects on several different old-space pages so that
   1526     // lazy sweeping kicks in for subsequent GC runs.
   1527     AlwaysAllocateScope always_allocate;
   1528     int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
   1529     for (int i = 1; i <= 100; i++) {
   1530       HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
   1531       CHECK_EQ(initial_size + i * filler_size,
   1532                static_cast<int>(HEAP->SizeOfObjects()));
   1533     }
   1534   }
   1535 
   1536   // The heap size should go back to initial size after a full GC, even
   1537   // though sweeping didn't finish yet.
   1538   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1539 
   1540   // Normally sweeping would not be complete here, but no guarantees.
   1541 
   1542   CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
   1543 
   1544   // Advancing the sweeper step-wise should not change the heap size.
   1545   while (!HEAP->old_pointer_space()->IsLazySweepingComplete()) {
   1546     HEAP->old_pointer_space()->AdvanceSweeper(KB);
   1547     CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
   1548   }
   1549 }
   1550 
   1551 
   1552 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
   1553   CcTest::InitializeVM();
   1554   HEAP->EnsureHeapIsIterable();
   1555   intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
   1556   HeapIterator iterator(HEAP);
   1557   intptr_t size_of_objects_2 = 0;
   1558   for (HeapObject* obj = iterator.next();
   1559        obj != NULL;
   1560        obj = iterator.next()) {
   1561     if (!obj->IsFreeSpace()) {
   1562       size_of_objects_2 += obj->Size();
   1563     }
   1564   }
   1565   // Delta must be within 5% of the larger result.
   1566   // TODO(gc): Tighten this up by distinguishing between byte
   1567   // arrays that are real and those that merely mark free space
   1568   // on the heap.
   1569   if (size_of_objects_1 > size_of_objects_2) {
   1570     intptr_t delta = size_of_objects_1 - size_of_objects_2;
   1571     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
   1572            "Iterator: %" V8_PTR_PREFIX "d, "
   1573            "delta: %" V8_PTR_PREFIX "d\n",
   1574            size_of_objects_1, size_of_objects_2, delta);
   1575     CHECK_GT(size_of_objects_1 / 20, delta);
   1576   } else {
   1577     intptr_t delta = size_of_objects_2 - size_of_objects_1;
   1578     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
   1579            "Iterator: %" V8_PTR_PREFIX "d, "
   1580            "delta: %" V8_PTR_PREFIX "d\n",
   1581            size_of_objects_1, size_of_objects_2, delta);
   1582     CHECK_GT(size_of_objects_2 / 20, delta);
   1583   }
   1584 }
   1585 
   1586 
   1587 static void FillUpNewSpace(NewSpace* new_space) {
   1588   // Fill up new space to the point that it is completely full. Make sure
   1589   // that the scavenger does not undo the filling.
   1590   Heap* heap = new_space->heap();
   1591   Isolate* isolate = heap->isolate();
   1592   Factory* factory = isolate->factory();
   1593   HandleScope scope(isolate);
   1594   AlwaysAllocateScope always_allocate;
   1595   intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
   1596   intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
   1597   for (intptr_t i = 0; i < number_of_fillers; i++) {
   1598     CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
   1599   }
   1600 }
   1601 
   1602 
   1603 TEST(GrowAndShrinkNewSpace) {
   1604   CcTest::InitializeVM();
   1605   NewSpace* new_space = HEAP->new_space();
   1606 
   1607   if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
   1608       HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
   1609     // The max size cannot exceed the reserved size, since semispaces must be
   1610     // always within the reserved space.  We can't test new space growing and
   1611     // shrinking if the reserved size is the same as the minimum (initial) size.
   1612     return;
   1613   }
   1614 
   1615   // Explicitly growing should double the space capacity.
   1616   intptr_t old_capacity, new_capacity;
   1617   old_capacity = new_space->Capacity();
   1618   new_space->Grow();
   1619   new_capacity = new_space->Capacity();
   1620   CHECK(2 * old_capacity == new_capacity);
   1621 
   1622   old_capacity = new_space->Capacity();
   1623   FillUpNewSpace(new_space);
   1624   new_capacity = new_space->Capacity();
   1625   CHECK(old_capacity == new_capacity);
   1626 
   1627   // Explicitly shrinking should not affect space capacity.
   1628   old_capacity = new_space->Capacity();
   1629   new_space->Shrink();
   1630   new_capacity = new_space->Capacity();
   1631   CHECK(old_capacity == new_capacity);
   1632 
   1633   // Let the scavenger empty the new space.
   1634   HEAP->CollectGarbage(NEW_SPACE);
   1635   CHECK_LE(new_space->Size(), old_capacity);
   1636 
   1637   // Explicitly shrinking should halve the space capacity.
   1638   old_capacity = new_space->Capacity();
   1639   new_space->Shrink();
   1640   new_capacity = new_space->Capacity();
   1641   CHECK(old_capacity == 2 * new_capacity);
   1642 
   1643   // Consecutive shrinking should not affect space capacity.
   1644   old_capacity = new_space->Capacity();
   1645   new_space->Shrink();
   1646   new_space->Shrink();
   1647   new_space->Shrink();
   1648   new_capacity = new_space->Capacity();
   1649   CHECK(old_capacity == new_capacity);
   1650 }
   1651 
   1652 
   1653 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
   1654   CcTest::InitializeVM();
   1655 
   1656   if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
   1657       HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
   1658     // The max size cannot exceed the reserved size, since semispaces must be
   1659     // always within the reserved space.  We can't test new space growing and
   1660     // shrinking if the reserved size is the same as the minimum (initial) size.
   1661     return;
   1662   }
   1663 
   1664   v8::HandleScope scope(CcTest::isolate());
   1665   NewSpace* new_space = HEAP->new_space();
   1666   intptr_t old_capacity, new_capacity;
   1667   old_capacity = new_space->Capacity();
   1668   new_space->Grow();
   1669   new_capacity = new_space->Capacity();
   1670   CHECK(2 * old_capacity == new_capacity);
   1671   FillUpNewSpace(new_space);
   1672   HEAP->CollectAllAvailableGarbage();
   1673   new_capacity = new_space->Capacity();
   1674   CHECK(old_capacity == new_capacity);
   1675 }
   1676 
   1677 
   1678 static int NumberOfGlobalObjects() {
   1679   int count = 0;
   1680   HeapIterator iterator(HEAP);
   1681   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
   1682     if (obj->IsGlobalObject()) count++;
   1683   }
   1684   return count;
   1685 }
   1686 
   1687 
   1688 // Test that we don't embed maps from foreign contexts into
   1689 // optimized code.
   1690 TEST(LeakNativeContextViaMap) {
   1691   i::FLAG_allow_natives_syntax = true;
   1692   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   1693   v8::HandleScope outer_scope(isolate);
   1694   v8::Persistent<v8::Context> ctx1p;
   1695   v8::Persistent<v8::Context> ctx2p;
   1696   {
   1697     v8::HandleScope scope(isolate);
   1698     ctx1p.Reset(isolate, v8::Context::New(isolate));
   1699     ctx2p.Reset(isolate, v8::Context::New(isolate));
   1700     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   1701   }
   1702 
   1703   HEAP->CollectAllAvailableGarbage();
   1704   CHECK_EQ(4, NumberOfGlobalObjects());
   1705 
   1706   {
   1707     v8::HandleScope inner_scope(isolate);
   1708     CompileRun("var v = {x: 42}");
   1709     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   1710     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   1711     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
   1712     ctx2->Enter();
   1713     ctx2->Global()->Set(v8_str("o"), v);
   1714     v8::Local<v8::Value> res = CompileRun(
   1715         "function f() { return o.x; }"
   1716         "for (var i = 0; i < 10; ++i) f();"
   1717         "%OptimizeFunctionOnNextCall(f);"
   1718         "f();");
   1719     CHECK_EQ(42, res->Int32Value());
   1720     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
   1721     ctx2->Exit();
   1722     v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
   1723     ctx1p.Dispose(isolate);
   1724     v8::V8::ContextDisposedNotification();
   1725   }
   1726   HEAP->CollectAllAvailableGarbage();
   1727   CHECK_EQ(2, NumberOfGlobalObjects());
   1728   ctx2p.Dispose(isolate);
   1729   HEAP->CollectAllAvailableGarbage();
   1730   CHECK_EQ(0, NumberOfGlobalObjects());
   1731 }
   1732 
   1733 
   1734 // Test that we don't embed functions from foreign contexts into
   1735 // optimized code.
   1736 TEST(LeakNativeContextViaFunction) {
   1737   i::FLAG_allow_natives_syntax = true;
   1738   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   1739   v8::HandleScope outer_scope(isolate);
   1740   v8::Persistent<v8::Context> ctx1p;
   1741   v8::Persistent<v8::Context> ctx2p;
   1742   {
   1743     v8::HandleScope scope(isolate);
   1744     ctx1p.Reset(isolate, v8::Context::New(isolate));
   1745     ctx2p.Reset(isolate, v8::Context::New(isolate));
   1746     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   1747   }
   1748 
   1749   HEAP->CollectAllAvailableGarbage();
   1750   CHECK_EQ(4, NumberOfGlobalObjects());
   1751 
   1752   {
   1753     v8::HandleScope inner_scope(isolate);
   1754     CompileRun("var v = function() { return 42; }");
   1755     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   1756     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   1757     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
   1758     ctx2->Enter();
   1759     ctx2->Global()->Set(v8_str("o"), v);
   1760     v8::Local<v8::Value> res = CompileRun(
   1761         "function f(x) { return x(); }"
   1762         "for (var i = 0; i < 10; ++i) f(o);"
   1763         "%OptimizeFunctionOnNextCall(f);"
   1764         "f(o);");
   1765     CHECK_EQ(42, res->Int32Value());
   1766     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
   1767     ctx2->Exit();
   1768     ctx1->Exit();
   1769     ctx1p.Dispose(ctx1->GetIsolate());
   1770     v8::V8::ContextDisposedNotification();
   1771   }
   1772   HEAP->CollectAllAvailableGarbage();
   1773   CHECK_EQ(2, NumberOfGlobalObjects());
   1774   ctx2p.Dispose(isolate);
   1775   HEAP->CollectAllAvailableGarbage();
   1776   CHECK_EQ(0, NumberOfGlobalObjects());
   1777 }
   1778 
   1779 
   1780 TEST(LeakNativeContextViaMapKeyed) {
   1781   i::FLAG_allow_natives_syntax = true;
   1782   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   1783   v8::HandleScope outer_scope(isolate);
   1784   v8::Persistent<v8::Context> ctx1p;
   1785   v8::Persistent<v8::Context> ctx2p;
   1786   {
   1787     v8::HandleScope scope(isolate);
   1788     ctx1p.Reset(isolate, v8::Context::New(isolate));
   1789     ctx2p.Reset(isolate, v8::Context::New(isolate));
   1790     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   1791   }
   1792 
   1793   HEAP->CollectAllAvailableGarbage();
   1794   CHECK_EQ(4, NumberOfGlobalObjects());
   1795 
   1796   {
   1797     v8::HandleScope inner_scope(isolate);
   1798     CompileRun("var v = [42, 43]");
   1799     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   1800     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   1801     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
   1802     ctx2->Enter();
   1803     ctx2->Global()->Set(v8_str("o"), v);
   1804     v8::Local<v8::Value> res = CompileRun(
   1805         "function f() { return o[0]; }"
   1806         "for (var i = 0; i < 10; ++i) f();"
   1807         "%OptimizeFunctionOnNextCall(f);"
   1808         "f();");
   1809     CHECK_EQ(42, res->Int32Value());
   1810     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
   1811     ctx2->Exit();
   1812     ctx1->Exit();
   1813     ctx1p.Dispose(ctx1->GetIsolate());
   1814     v8::V8::ContextDisposedNotification();
   1815   }
   1816   HEAP->CollectAllAvailableGarbage();
   1817   CHECK_EQ(2, NumberOfGlobalObjects());
   1818   ctx2p.Dispose(isolate);
   1819   HEAP->CollectAllAvailableGarbage();
   1820   CHECK_EQ(0, NumberOfGlobalObjects());
   1821 }
   1822 
   1823 
   1824 TEST(LeakNativeContextViaMapProto) {
   1825   i::FLAG_allow_natives_syntax = true;
   1826   v8::Isolate* isolate = v8::Isolate::GetCurrent();
   1827   v8::HandleScope outer_scope(isolate);
   1828   v8::Persistent<v8::Context> ctx1p;
   1829   v8::Persistent<v8::Context> ctx2p;
   1830   {
   1831     v8::HandleScope scope(isolate);
   1832     ctx1p.Reset(isolate, v8::Context::New(isolate));
   1833     ctx2p.Reset(isolate, v8::Context::New(isolate));
   1834     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
   1835   }
   1836 
   1837   HEAP->CollectAllAvailableGarbage();
   1838   CHECK_EQ(4, NumberOfGlobalObjects());
   1839 
   1840   {
   1841     v8::HandleScope inner_scope(isolate);
   1842     CompileRun("var v = { y: 42}");
   1843     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
   1844     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
   1845     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
   1846     ctx2->Enter();
   1847     ctx2->Global()->Set(v8_str("o"), v);
   1848     v8::Local<v8::Value> res = CompileRun(
   1849         "function f() {"
   1850         "  var p = {x: 42};"
   1851         "  p.__proto__ = o;"
   1852         "  return p.x;"
   1853         "}"
   1854         "for (var i = 0; i < 10; ++i) f();"
   1855         "%OptimizeFunctionOnNextCall(f);"
   1856         "f();");
   1857     CHECK_EQ(42, res->Int32Value());
   1858     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
   1859     ctx2->Exit();
   1860     ctx1->Exit();
   1861     ctx1p.Dispose(isolate);
   1862     v8::V8::ContextDisposedNotification();
   1863   }
   1864   HEAP->CollectAllAvailableGarbage();
   1865   CHECK_EQ(2, NumberOfGlobalObjects());
   1866   ctx2p.Dispose(isolate);
   1867   HEAP->CollectAllAvailableGarbage();
   1868   CHECK_EQ(0, NumberOfGlobalObjects());
   1869 }
   1870 
   1871 
   1872 TEST(InstanceOfStubWriteBarrier) {
   1873   i::FLAG_allow_natives_syntax = true;
   1874 #ifdef VERIFY_HEAP
   1875   i::FLAG_verify_heap = true;
   1876 #endif
   1877 
   1878   CcTest::InitializeVM();
   1879   if (!i::V8::UseCrankshaft()) return;
   1880   if (i::FLAG_force_marking_deque_overflows) return;
   1881   v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
   1882 
   1883   {
   1884     v8::HandleScope scope(v8::Isolate::GetCurrent());
   1885     CompileRun(
   1886         "function foo () { }"
   1887         "function mkbar () { return new (new Function(\"\")) (); }"
   1888         "function f (x) { return (x instanceof foo); }"
   1889         "function g () { f(mkbar()); }"
   1890         "f(new foo()); f(new foo());"
   1891         "%OptimizeFunctionOnNextCall(f);"
   1892         "f(new foo()); g();");
   1893   }
   1894 
   1895   IncrementalMarking* marking = HEAP->incremental_marking();
   1896   marking->Abort();
   1897   marking->Start();
   1898 
   1899   Handle<JSFunction> f =
   1900       v8::Utils::OpenHandle(
   1901           *v8::Handle<v8::Function>::Cast(
   1902               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   1903 
   1904   CHECK(f->IsOptimized());
   1905 
   1906   while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
   1907          !marking->IsStopped()) {
   1908     // Discard any pending GC requests otherwise we will get GC when we enter
   1909     // code below.
   1910     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
   1911   }
   1912 
   1913   CHECK(marking->IsMarking());
   1914 
   1915   {
   1916     v8::HandleScope scope(v8::Isolate::GetCurrent());
   1917     v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
   1918     v8::Handle<v8::Function> g =
   1919         v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
   1920     g->Call(global, 0, NULL);
   1921   }
   1922 
   1923   HEAP->incremental_marking()->set_should_hurry(true);
   1924   HEAP->CollectGarbage(OLD_POINTER_SPACE);
   1925 }
   1926 
   1927 
   1928 TEST(PrototypeTransitionClearing) {
   1929   CcTest::InitializeVM();
   1930   Isolate* isolate = Isolate::Current();
   1931   Factory* factory = isolate->factory();
   1932   v8::HandleScope scope(CcTest::isolate());
   1933 
   1934   CompileRun(
   1935       "var base = {};"
   1936       "var live = [];"
   1937       "for (var i = 0; i < 10; i++) {"
   1938       "  var object = {};"
   1939       "  var prototype = {};"
   1940       "  object.__proto__ = prototype;"
   1941       "  if (i >= 3) live.push(object, prototype);"
   1942       "}");
   1943 
   1944   Handle<JSObject> baseObject =
   1945       v8::Utils::OpenHandle(
   1946           *v8::Handle<v8::Object>::Cast(
   1947               v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
   1948 
   1949   // Verify that only dead prototype transitions are cleared.
   1950   CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
   1951   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   1952   const int transitions = 10 - 3;
   1953   CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
   1954 
   1955   // Verify that prototype transitions array was compacted.
   1956   FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
   1957   for (int i = 0; i < transitions; i++) {
   1958     int j = Map::kProtoTransitionHeaderSize +
   1959         i * Map::kProtoTransitionElementsPerEntry;
   1960     CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
   1961     Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
   1962     CHECK(proto->IsTheHole() || proto->IsJSObject());
   1963   }
   1964 
   1965   // Make sure next prototype is placed on an old-space evacuation candidate.
   1966   Handle<JSObject> prototype;
   1967   PagedSpace* space = HEAP->old_pointer_space();
   1968   {
   1969     AlwaysAllocateScope always_allocate;
   1970     SimulateFullSpace(space);
   1971     prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
   1972   }
   1973 
   1974   // Add a prototype on an evacuation candidate and verify that transition
   1975   // clearing correctly records slots in prototype transition array.
   1976   i::FLAG_always_compact = true;
   1977   Handle<Map> map(baseObject->map());
   1978   CHECK(!space->LastPage()->Contains(
   1979       map->GetPrototypeTransitions()->address()));
   1980   CHECK(space->LastPage()->Contains(prototype->address()));
   1981   JSObject::SetPrototype(baseObject, prototype, false);
   1982   CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
   1983   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   1984   CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
   1985 }
   1986 
   1987 
   1988 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
   1989   i::FLAG_stress_compaction = false;
   1990   i::FLAG_allow_natives_syntax = true;
   1991 #ifdef VERIFY_HEAP
   1992   i::FLAG_verify_heap = true;
   1993 #endif
   1994 
   1995   CcTest::InitializeVM();
   1996   if (!i::V8::UseCrankshaft()) return;
   1997   v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
   1998 
   1999   {
   2000     v8::HandleScope scope(v8::Isolate::GetCurrent());
   2001     CompileRun(
   2002         "function f () {"
   2003         "  var s = 0;"
   2004         "  for (var i = 0; i < 100; i++)  s += i;"
   2005         "  return s;"
   2006         "}"
   2007         "f(); f();"
   2008         "%OptimizeFunctionOnNextCall(f);"
   2009         "f();");
   2010   }
   2011   Handle<JSFunction> f =
   2012       v8::Utils::OpenHandle(
   2013           *v8::Handle<v8::Function>::Cast(
   2014               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2015   CHECK(f->IsOptimized());
   2016 
   2017   IncrementalMarking* marking = HEAP->incremental_marking();
   2018   marking->Abort();
   2019   marking->Start();
   2020 
   2021   // The following two calls will increment HEAP->global_ic_age().
   2022   const int kLongIdlePauseInMs = 1000;
   2023   v8::V8::ContextDisposedNotification();
   2024   v8::V8::IdleNotification(kLongIdlePauseInMs);
   2025 
   2026   while (!marking->IsStopped() && !marking->IsComplete()) {
   2027     marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
   2028   }
   2029   if (!marking->IsStopped() || marking->should_hurry()) {
   2030     // We don't normally finish a GC via Step(), we normally finish by
   2031     // setting the stack guard and then do the final steps in the stack
   2032     // guard interrupt.  But here we didn't ask for that, and there is no
   2033     // JS code running to trigger the interrupt, so we explicitly finalize
   2034     // here.
   2035     HEAP->CollectAllGarbage(Heap::kNoGCFlags,
   2036                             "Test finalizing incremental mark-sweep");
   2037   }
   2038 
   2039   CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
   2040   CHECK_EQ(0, f->shared()->opt_count());
   2041   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
   2042 }
   2043 
   2044 
   2045 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
   2046   i::FLAG_stress_compaction = false;
   2047   i::FLAG_allow_natives_syntax = true;
   2048 #ifdef VERIFY_HEAP
   2049   i::FLAG_verify_heap = true;
   2050 #endif
   2051 
   2052   CcTest::InitializeVM();
   2053   if (!i::V8::UseCrankshaft()) return;
   2054   v8::HandleScope outer_scope(CcTest::isolate());
   2055 
   2056   {
   2057     v8::HandleScope scope(CcTest::isolate());
   2058     CompileRun(
   2059         "function f () {"
   2060         "  var s = 0;"
   2061         "  for (var i = 0; i < 100; i++)  s += i;"
   2062         "  return s;"
   2063         "}"
   2064         "f(); f();"
   2065         "%OptimizeFunctionOnNextCall(f);"
   2066         "f();");
   2067   }
   2068   Handle<JSFunction> f =
   2069       v8::Utils::OpenHandle(
   2070           *v8::Handle<v8::Function>::Cast(
   2071               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2072   CHECK(f->IsOptimized());
   2073 
   2074   HEAP->incremental_marking()->Abort();
   2075 
   2076   // The following two calls will increment HEAP->global_ic_age().
   2077   // Since incremental marking is off, IdleNotification will do full GC.
   2078   const int kLongIdlePauseInMs = 1000;
   2079   v8::V8::ContextDisposedNotification();
   2080   v8::V8::IdleNotification(kLongIdlePauseInMs);
   2081 
   2082   CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
   2083   CHECK_EQ(0, f->shared()->opt_count());
   2084   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
   2085 }
   2086 
   2087 
   2088 // Test that HAllocateObject will always return an object in new-space.
   2089 TEST(OptimizedAllocationAlwaysInNewSpace) {
   2090   i::FLAG_allow_natives_syntax = true;
   2091   CcTest::InitializeVM();
   2092   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2093   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2094   v8::HandleScope scope(CcTest::isolate());
   2095 
   2096   SimulateFullSpace(HEAP->new_space());
   2097   AlwaysAllocateScope always_allocate;
   2098   v8::Local<v8::Value> res = CompileRun(
   2099       "function c(x) {"
   2100       "  this.x = x;"
   2101       "  for (var i = 0; i < 32; i++) {"
   2102       "    this['x' + i] = x;"
   2103       "  }"
   2104       "}"
   2105       "function f(x) { return new c(x); };"
   2106       "f(1); f(2); f(3);"
   2107       "%OptimizeFunctionOnNextCall(f);"
   2108       "f(4);");
   2109   CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
   2110 
   2111   Handle<JSObject> o =
   2112       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2113 
   2114   CHECK(HEAP->InNewSpace(*o));
   2115 }
   2116 
   2117 
   2118 TEST(OptimizedPretenuringAllocationFolding) {
   2119   i::FLAG_allow_natives_syntax = true;
   2120   CcTest::InitializeVM();
   2121   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2122   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2123   v8::HandleScope scope(CcTest::isolate());
   2124   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2125 
   2126   v8::Local<v8::Value> res = CompileRun(
   2127       "function DataObject() {"
   2128       "  this.a = 1.1;"
   2129       "  this.b = [{}];"
   2130       "  this.c = 1.2;"
   2131       "  this.d = [{}];"
   2132       "  this.e = 1.3;"
   2133       "  this.f = [{}];"
   2134       "}"
   2135       "function f() {"
   2136       "  return new DataObject();"
   2137       "};"
   2138       "f(); f(); f();"
   2139       "%OptimizeFunctionOnNextCall(f);"
   2140       "f();");
   2141 
   2142   Handle<JSObject> o =
   2143       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2144 
   2145   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(0)));
   2146   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
   2147   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
   2148   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(3)));
   2149   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(4)));
   2150   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(5)));
   2151 }
   2152 
   2153 
   2154 TEST(OptimizedPretenuringAllocationFoldingBlocks) {
   2155   i::FLAG_allow_natives_syntax = true;
   2156   CcTest::InitializeVM();
   2157   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2158   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2159   v8::HandleScope scope(CcTest::isolate());
   2160   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2161 
   2162   v8::Local<v8::Value> res = CompileRun(
   2163       "function DataObject() {"
   2164       "  this.a = [{}];"
   2165       "  this.b = [{}];"
   2166       "  this.c = 1.1;"
   2167       "  this.d = 1.2;"
   2168       "  this.e = [{}];"
   2169       "  this.f = 1.3;"
   2170       "}"
   2171       "function f() {"
   2172       "  return new DataObject();"
   2173       "};"
   2174       "f(); f(); f();"
   2175       "%OptimizeFunctionOnNextCall(f);"
   2176       "f();");
   2177 
   2178   Handle<JSObject> o =
   2179       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2180 
   2181   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
   2182   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
   2183   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
   2184   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(3)));
   2185   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(4)));
   2186   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(5)));
   2187 }
   2188 
   2189 
   2190 TEST(OptimizedPretenuringObjectArrayLiterals) {
   2191   i::FLAG_allow_natives_syntax = true;
   2192   CcTest::InitializeVM();
   2193   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2194   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2195   v8::HandleScope scope(CcTest::isolate());
   2196   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2197 
   2198   v8::Local<v8::Value> res = CompileRun(
   2199       "function f() {"
   2200       "  var numbers = [{}, {}, {}];"
   2201       "  return numbers;"
   2202       "};"
   2203       "f(); f(); f();"
   2204       "%OptimizeFunctionOnNextCall(f);"
   2205       "f();");
   2206 
   2207   Handle<JSObject> o =
   2208       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2209 
   2210   CHECK(HEAP->InOldPointerSpace(o->elements()));
   2211   CHECK(HEAP->InOldPointerSpace(*o));
   2212 }
   2213 
   2214 
   2215 TEST(OptimizedPretenuringMixedInObjectProperties) {
   2216   i::FLAG_allow_natives_syntax = true;
   2217   CcTest::InitializeVM();
   2218   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2219   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2220   v8::HandleScope scope(CcTest::isolate());
   2221   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2222 
   2223   v8::Local<v8::Value> res = CompileRun(
   2224       "function f() {"
   2225       "  var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
   2226       "  return numbers;"
   2227       "};"
   2228       "f(); f(); f();"
   2229       "%OptimizeFunctionOnNextCall(f);"
   2230       "f();");
   2231 
   2232   Handle<JSObject> o =
   2233       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2234 
   2235   CHECK(HEAP->InOldPointerSpace(*o));
   2236   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
   2237   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(1)));
   2238 
   2239   JSObject* inner_object = reinterpret_cast<JSObject*>(o->RawFastPropertyAt(0));
   2240   CHECK(HEAP->InOldPointerSpace(inner_object));
   2241   CHECK(HEAP->InOldDataSpace(inner_object->RawFastPropertyAt(0)));
   2242   CHECK(HEAP->InOldPointerSpace(inner_object->RawFastPropertyAt(1)));
   2243 }
   2244 
   2245 
   2246 TEST(OptimizedPretenuringDoubleArrayProperties) {
   2247   i::FLAG_allow_natives_syntax = true;
   2248   CcTest::InitializeVM();
   2249   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2250   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2251   v8::HandleScope scope(CcTest::isolate());
   2252   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2253 
   2254   v8::Local<v8::Value> res = CompileRun(
   2255       "function f() {"
   2256       "  var numbers = {a: 1.1, b: 2.2};"
   2257       "  return numbers;"
   2258       "};"
   2259       "f(); f(); f();"
   2260       "%OptimizeFunctionOnNextCall(f);"
   2261       "f();");
   2262 
   2263   Handle<JSObject> o =
   2264       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2265 
   2266   CHECK(HEAP->InOldPointerSpace(*o));
   2267   CHECK(HEAP->InOldDataSpace(o->properties()));
   2268 }
   2269 
   2270 
   2271 TEST(OptimizedPretenuringdoubleArrayLiterals) {
   2272   i::FLAG_allow_natives_syntax = true;
   2273   CcTest::InitializeVM();
   2274   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2275   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2276   v8::HandleScope scope(CcTest::isolate());
   2277   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2278 
   2279   v8::Local<v8::Value> res = CompileRun(
   2280       "function f() {"
   2281       "  var numbers = [1.1, 2.2, 3.3];"
   2282       "  return numbers;"
   2283       "};"
   2284       "f(); f(); f();"
   2285       "%OptimizeFunctionOnNextCall(f);"
   2286       "f();");
   2287 
   2288   Handle<JSObject> o =
   2289       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2290 
   2291   CHECK(HEAP->InOldDataSpace(o->elements()));
   2292   CHECK(HEAP->InOldPointerSpace(*o));
   2293 }
   2294 
   2295 
   2296 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
   2297   i::FLAG_allow_natives_syntax = true;
   2298   CcTest::InitializeVM();
   2299   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2300   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2301   v8::HandleScope scope(CcTest::isolate());
   2302   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2303 
   2304   v8::Local<v8::Value> res = CompileRun(
   2305       "function f() {"
   2306       "  var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
   2307       "  return numbers;"
   2308       "};"
   2309       "f(); f(); f();"
   2310       "%OptimizeFunctionOnNextCall(f);"
   2311       "f();");
   2312 
   2313   v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
   2314   Handle<JSObject> int_array_handle =
   2315       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
   2316   v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
   2317   Handle<JSObject> double_array_handle =
   2318       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
   2319 
   2320   Handle<JSObject> o =
   2321       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2322   CHECK(HEAP->InOldPointerSpace(*o));
   2323   CHECK(HEAP->InOldPointerSpace(*int_array_handle));
   2324   CHECK(HEAP->InOldPointerSpace(int_array_handle->elements()));
   2325   CHECK(HEAP->InOldPointerSpace(*double_array_handle));
   2326   CHECK(HEAP->InOldDataSpace(double_array_handle->elements()));
   2327 }
   2328 
   2329 
   2330 TEST(OptimizedPretenuringNestedObjectLiterals) {
   2331   i::FLAG_allow_natives_syntax = true;
   2332   CcTest::InitializeVM();
   2333   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2334   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2335   v8::HandleScope scope(CcTest::isolate());
   2336   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2337 
   2338   v8::Local<v8::Value> res = CompileRun(
   2339       "function f() {"
   2340       "  var numbers = [[{}, {}, {}],[{}, {}, {}]];"
   2341       "  return numbers;"
   2342       "};"
   2343       "f(); f(); f();"
   2344       "%OptimizeFunctionOnNextCall(f);"
   2345       "f();");
   2346 
   2347   v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
   2348   Handle<JSObject> int_array_handle_1 =
   2349       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
   2350   v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
   2351   Handle<JSObject> int_array_handle_2 =
   2352       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
   2353 
   2354   Handle<JSObject> o =
   2355       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2356   CHECK(HEAP->InOldPointerSpace(*o));
   2357   CHECK(HEAP->InOldPointerSpace(*int_array_handle_1));
   2358   CHECK(HEAP->InOldPointerSpace(int_array_handle_1->elements()));
   2359   CHECK(HEAP->InOldPointerSpace(*int_array_handle_2));
   2360   CHECK(HEAP->InOldPointerSpace(int_array_handle_2->elements()));
   2361 }
   2362 
   2363 
   2364 TEST(OptimizedPretenuringNestedDoubleLiterals) {
   2365   i::FLAG_allow_natives_syntax = true;
   2366   CcTest::InitializeVM();
   2367   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2368   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2369   v8::HandleScope scope(CcTest::isolate());
   2370   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2371 
   2372   v8::Local<v8::Value> res = CompileRun(
   2373       "function f() {"
   2374       "  var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
   2375       "  return numbers;"
   2376       "};"
   2377       "f(); f(); f();"
   2378       "%OptimizeFunctionOnNextCall(f);"
   2379       "f();");
   2380 
   2381   v8::Local<v8::Value> double_array_1 =
   2382       v8::Object::Cast(*res)->Get(v8_str("0"));
   2383   Handle<JSObject> double_array_handle_1 =
   2384       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
   2385   v8::Local<v8::Value> double_array_2 =
   2386       v8::Object::Cast(*res)->Get(v8_str("1"));
   2387   Handle<JSObject> double_array_handle_2 =
   2388       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
   2389 
   2390   Handle<JSObject> o =
   2391       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2392   CHECK(HEAP->InOldPointerSpace(*o));
   2393   CHECK(HEAP->InOldPointerSpace(*double_array_handle_1));
   2394   CHECK(HEAP->InOldDataSpace(double_array_handle_1->elements()));
   2395   CHECK(HEAP->InOldPointerSpace(*double_array_handle_2));
   2396   CHECK(HEAP->InOldDataSpace(double_array_handle_2->elements()));
   2397 }
   2398 
   2399 
   2400 // Test regular array literals allocation.
   2401 TEST(OptimizedAllocationArrayLiterals) {
   2402   i::FLAG_allow_natives_syntax = true;
   2403   CcTest::InitializeVM();
   2404   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2405   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2406   v8::HandleScope scope(CcTest::isolate());
   2407 
   2408   v8::Local<v8::Value> res = CompileRun(
   2409       "function f() {"
   2410       "  var numbers = new Array(1, 2, 3);"
   2411       "  numbers[0] = 3.14;"
   2412       "  return numbers;"
   2413       "};"
   2414       "f(); f(); f();"
   2415       "%OptimizeFunctionOnNextCall(f);"
   2416       "f();");
   2417   CHECK_EQ(static_cast<int>(3.14),
   2418            v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
   2419 
   2420   Handle<JSObject> o =
   2421       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2422 
   2423   CHECK(HEAP->InNewSpace(o->elements()));
   2424 }
   2425 
   2426 
   2427 TEST(OptimizedPretenuringCallNew) {
   2428   i::FLAG_allow_natives_syntax = true;
   2429   i::FLAG_pretenuring_call_new = true;
   2430   CcTest::InitializeVM();
   2431   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
   2432   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   2433   v8::HandleScope scope(CcTest::isolate());
   2434   HEAP->SetNewSpaceHighPromotionModeActive(true);
   2435 
   2436   AlwaysAllocateScope always_allocate;
   2437   v8::Local<v8::Value> res = CompileRun(
   2438       "function g() { this.a = 0; }"
   2439       "function f() {"
   2440       "  return new g();"
   2441       "};"
   2442       "f(); f(); f();"
   2443       "%OptimizeFunctionOnNextCall(f);"
   2444       "f();");
   2445 
   2446   Handle<JSObject> o =
   2447       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
   2448   CHECK(HEAP->InOldPointerSpace(*o));
   2449 }
   2450 
   2451 
   2452 static int CountMapTransitions(Map* map) {
   2453   return map->transitions()->number_of_transitions();
   2454 }
   2455 
   2456 
   2457 // Test that map transitions are cleared and maps are collected with
   2458 // incremental marking as well.
   2459 TEST(Regress1465) {
   2460   i::FLAG_stress_compaction = false;
   2461   i::FLAG_allow_natives_syntax = true;
   2462   i::FLAG_trace_incremental_marking = true;
   2463   CcTest::InitializeVM();
   2464   v8::HandleScope scope(CcTest::isolate());
   2465   static const int transitions_count = 256;
   2466 
   2467   {
   2468     AlwaysAllocateScope always_allocate;
   2469     for (int i = 0; i < transitions_count; i++) {
   2470       EmbeddedVector<char, 64> buffer;
   2471       OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
   2472       CompileRun(buffer.start());
   2473     }
   2474     CompileRun("var root = new Object;");
   2475   }
   2476 
   2477   Handle<JSObject> root =
   2478       v8::Utils::OpenHandle(
   2479           *v8::Handle<v8::Object>::Cast(
   2480               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
   2481 
   2482   // Count number of live transitions before marking.
   2483   int transitions_before = CountMapTransitions(root->map());
   2484   CompileRun("%DebugPrint(root);");
   2485   CHECK_EQ(transitions_count, transitions_before);
   2486 
   2487   SimulateIncrementalMarking();
   2488   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2489 
   2490   // Count number of live transitions after marking.  Note that one transition
   2491   // is left, because 'o' still holds an instance of one transition target.
   2492   int transitions_after = CountMapTransitions(root->map());
   2493   CompileRun("%DebugPrint(root);");
   2494   CHECK_EQ(1, transitions_after);
   2495 }
   2496 
   2497 
   2498 TEST(Regress2143a) {
   2499   i::FLAG_collect_maps = true;
   2500   i::FLAG_incremental_marking = true;
   2501   CcTest::InitializeVM();
   2502   v8::HandleScope scope(CcTest::isolate());
   2503 
   2504   // Prepare a map transition from the root object together with a yet
   2505   // untransitioned root object.
   2506   CompileRun("var root = new Object;"
   2507              "root.foo = 0;"
   2508              "root = new Object;");
   2509 
   2510   SimulateIncrementalMarking();
   2511 
   2512   // Compile a StoreIC that performs the prepared map transition. This
   2513   // will restart incremental marking and should make sure the root is
   2514   // marked grey again.
   2515   CompileRun("function f(o) {"
   2516              "  o.foo = 0;"
   2517              "}"
   2518              "f(new Object);"
   2519              "f(root);");
   2520 
   2521   // This bug only triggers with aggressive IC clearing.
   2522   HEAP->AgeInlineCaches();
   2523 
   2524   // Explicitly request GC to perform final marking step and sweeping.
   2525   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2526 
   2527   Handle<JSObject> root =
   2528       v8::Utils::OpenHandle(
   2529           *v8::Handle<v8::Object>::Cast(
   2530               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
   2531 
   2532   // The root object should be in a sane state.
   2533   CHECK(root->IsJSObject());
   2534   CHECK(root->map()->IsMap());
   2535 }
   2536 
   2537 
   2538 TEST(Regress2143b) {
   2539   i::FLAG_collect_maps = true;
   2540   i::FLAG_incremental_marking = true;
   2541   i::FLAG_allow_natives_syntax = true;
   2542   CcTest::InitializeVM();
   2543   v8::HandleScope scope(CcTest::isolate());
   2544 
   2545   // Prepare a map transition from the root object together with a yet
   2546   // untransitioned root object.
   2547   CompileRun("var root = new Object;"
   2548              "root.foo = 0;"
   2549              "root = new Object;");
   2550 
   2551   SimulateIncrementalMarking();
   2552 
   2553   // Compile an optimized LStoreNamedField that performs the prepared
   2554   // map transition. This will restart incremental marking and should
   2555   // make sure the root is marked grey again.
   2556   CompileRun("function f(o) {"
   2557              "  o.foo = 0;"
   2558              "}"
   2559              "f(new Object);"
   2560              "f(new Object);"
   2561              "%OptimizeFunctionOnNextCall(f);"
   2562              "f(root);"
   2563              "%DeoptimizeFunction(f);");
   2564 
   2565   // This bug only triggers with aggressive IC clearing.
   2566   HEAP->AgeInlineCaches();
   2567 
   2568   // Explicitly request GC to perform final marking step and sweeping.
   2569   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2570 
   2571   Handle<JSObject> root =
   2572       v8::Utils::OpenHandle(
   2573           *v8::Handle<v8::Object>::Cast(
   2574               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
   2575 
   2576   // The root object should be in a sane state.
   2577   CHECK(root->IsJSObject());
   2578   CHECK(root->map()->IsMap());
   2579 }
   2580 
   2581 
   2582 TEST(ReleaseOverReservedPages) {
   2583   i::FLAG_trace_gc = true;
   2584   // The optimizer can allocate stuff, messing up the test.
   2585   i::FLAG_crankshaft = false;
   2586   i::FLAG_always_opt = false;
   2587   CcTest::InitializeVM();
   2588   Isolate* isolate = Isolate::Current();
   2589   Factory* factory = isolate->factory();
   2590   v8::HandleScope scope(CcTest::isolate());
   2591   static const int number_of_test_pages = 20;
   2592 
   2593   // Prepare many pages with low live-bytes count.
   2594   PagedSpace* old_pointer_space = HEAP->old_pointer_space();
   2595   CHECK_EQ(1, old_pointer_space->CountTotalPages());
   2596   for (int i = 0; i < number_of_test_pages; i++) {
   2597     AlwaysAllocateScope always_allocate;
   2598     SimulateFullSpace(old_pointer_space);
   2599     factory->NewFixedArray(1, TENURED);
   2600   }
   2601   CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
   2602 
   2603   // Triggering one GC will cause a lot of garbage to be discovered but
   2604   // even spread across all allocated pages.
   2605   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
   2606   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
   2607 
   2608   // Triggering subsequent GCs should cause at least half of the pages
   2609   // to be released to the OS after at most two cycles.
   2610   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
   2611   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
   2612   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
   2613   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
   2614 
   2615   // Triggering a last-resort GC should cause all pages to be released to the
   2616   // OS so that other processes can seize the memory.  If we get a failure here
   2617   // where there are 2 pages left instead of 1, then we should increase the
   2618   // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
   2619   // first page should be small in order to reduce memory used when the VM
   2620   // boots, but if the 20 small arrays don't fit on the first page then that's
   2621   // an indication that it is too small.
   2622   HEAP->CollectAllAvailableGarbage("triggered really hard");
   2623   CHECK_EQ(1, old_pointer_space->CountTotalPages());
   2624 }
   2625 
   2626 
   2627 TEST(Regress2237) {
   2628   i::FLAG_stress_compaction = false;
   2629   CcTest::InitializeVM();
   2630   Isolate* isolate = Isolate::Current();
   2631   Factory* factory = isolate->factory();
   2632   v8::HandleScope scope(CcTest::isolate());
   2633   Handle<String> slice(HEAP->empty_string());
   2634 
   2635   {
   2636     // Generate a parent that lives in new-space.
   2637     v8::HandleScope inner_scope(CcTest::isolate());
   2638     const char* c = "This text is long enough to trigger sliced strings.";
   2639     Handle<String> s = factory->NewStringFromAscii(CStrVector(c));
   2640     CHECK(s->IsSeqOneByteString());
   2641     CHECK(HEAP->InNewSpace(*s));
   2642 
   2643     // Generate a sliced string that is based on the above parent and
   2644     // lives in old-space.
   2645     SimulateFullSpace(HEAP->new_space());
   2646     AlwaysAllocateScope always_allocate;
   2647     Handle<String> t = factory->NewProperSubString(s, 5, 35);
   2648     CHECK(t->IsSlicedString());
   2649     CHECK(!HEAP->InNewSpace(*t));
   2650     *slice.location() = *t.location();
   2651   }
   2652 
   2653   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
   2654   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2655   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
   2656 }
   2657 
   2658 
   2659 #ifdef OBJECT_PRINT
   2660 TEST(PrintSharedFunctionInfo) {
   2661   CcTest::InitializeVM();
   2662   v8::HandleScope scope(CcTest::isolate());
   2663   const char* source = "f = function() { return 987654321; }\n"
   2664                        "g = function() { return 123456789; }\n";
   2665   CompileRun(source);
   2666   Handle<JSFunction> g =
   2667       v8::Utils::OpenHandle(
   2668           *v8::Handle<v8::Function>::Cast(
   2669               v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
   2670 
   2671   DisallowHeapAllocation no_allocation;
   2672   g->shared()->PrintLn();
   2673 }
   2674 #endif  // OBJECT_PRINT
   2675 
   2676 
   2677 TEST(Regress2211) {
   2678   CcTest::InitializeVM();
   2679   v8::HandleScope scope(CcTest::isolate());
   2680 
   2681   v8::Handle<v8::String> value = v8_str("val string");
   2682   Smi* hash = Smi::FromInt(321);
   2683   Heap* heap = Isolate::Current()->heap();
   2684 
   2685   for (int i = 0; i < 2; i++) {
   2686     // Store identity hash first and common hidden property second.
   2687     v8::Handle<v8::Object> obj = v8::Object::New();
   2688     Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
   2689     CHECK(internal_obj->HasFastProperties());
   2690 
   2691     // In the first iteration, set hidden value first and identity hash second.
   2692     // In the second iteration, reverse the order.
   2693     if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
   2694     MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
   2695                                                            ALLOW_CREATION);
   2696     CHECK(!maybe_obj->IsFailure());
   2697     if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
   2698 
   2699     // Check values.
   2700     CHECK_EQ(hash,
   2701              internal_obj->GetHiddenProperty(heap->identity_hash_string()));
   2702     CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
   2703 
   2704     // Check size.
   2705     DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
   2706     ObjectHashTable* hashtable = ObjectHashTable::cast(
   2707         internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
   2708     // HashTable header (5) and 4 initial entries (8).
   2709     CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
   2710   }
   2711 }
   2712 
   2713 
   2714 TEST(IncrementalMarkingClearsTypeFeedbackCells) {
   2715   if (i::FLAG_always_opt) return;
   2716   CcTest::InitializeVM();
   2717   v8::HandleScope scope(CcTest::isolate());
   2718   v8::Local<v8::Value> fun1, fun2;
   2719 
   2720   {
   2721     LocalContext env;
   2722     CompileRun("function fun() {};");
   2723     fun1 = env->Global()->Get(v8_str("fun"));
   2724   }
   2725 
   2726   {
   2727     LocalContext env;
   2728     CompileRun("function fun() {};");
   2729     fun2 = env->Global()->Get(v8_str("fun"));
   2730   }
   2731 
   2732   // Prepare function f that contains type feedback for closures
   2733   // originating from two different native contexts.
   2734   v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
   2735   v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
   2736   CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
   2737   Handle<JSFunction> f =
   2738       v8::Utils::OpenHandle(
   2739           *v8::Handle<v8::Function>::Cast(
   2740               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2741   Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
   2742       f->shared()->code()->type_feedback_info())->type_feedback_cells());
   2743 
   2744   CHECK_EQ(2, cells->CellCount());
   2745   CHECK(cells->GetCell(0)->value()->IsJSFunction());
   2746   CHECK(cells->GetCell(1)->value()->IsJSFunction());
   2747 
   2748   SimulateIncrementalMarking();
   2749   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2750 
   2751   CHECK_EQ(2, cells->CellCount());
   2752   CHECK(cells->GetCell(0)->value()->IsTheHole());
   2753   CHECK(cells->GetCell(1)->value()->IsTheHole());
   2754 }
   2755 
   2756 
   2757 static Code* FindFirstIC(Code* code, Code::Kind kind) {
   2758   int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
   2759              RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
   2760              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
   2761              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
   2762   for (RelocIterator it(code, mask); !it.done(); it.next()) {
   2763     RelocInfo* info = it.rinfo();
   2764     Code* target = Code::GetCodeFromTargetAddress(info->target_address());
   2765     if (target->is_inline_cache_stub() && target->kind() == kind) {
   2766       return target;
   2767     }
   2768   }
   2769   return NULL;
   2770 }
   2771 
   2772 
   2773 TEST(IncrementalMarkingPreservesMonomorhpicIC) {
   2774   if (i::FLAG_always_opt) return;
   2775   CcTest::InitializeVM();
   2776   v8::HandleScope scope(CcTest::isolate());
   2777 
   2778   // Prepare function f that contains a monomorphic IC for object
   2779   // originating from the same native context.
   2780   CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
   2781              "function f(o) { return o.x; } f(obj); f(obj);");
   2782   Handle<JSFunction> f =
   2783       v8::Utils::OpenHandle(
   2784           *v8::Handle<v8::Function>::Cast(
   2785               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2786 
   2787   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2788   CHECK(ic_before->ic_state() == MONOMORPHIC);
   2789 
   2790   SimulateIncrementalMarking();
   2791   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2792 
   2793   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2794   CHECK(ic_after->ic_state() == MONOMORPHIC);
   2795 }
   2796 
   2797 
   2798 TEST(IncrementalMarkingClearsMonomorhpicIC) {
   2799   if (i::FLAG_always_opt) return;
   2800   CcTest::InitializeVM();
   2801   v8::HandleScope scope(CcTest::isolate());
   2802   v8::Local<v8::Value> obj1;
   2803 
   2804   {
   2805     LocalContext env;
   2806     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
   2807     obj1 = env->Global()->Get(v8_str("obj"));
   2808   }
   2809 
   2810   // Prepare function f that contains a monomorphic IC for object
   2811   // originating from a different native context.
   2812   v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
   2813   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
   2814   Handle<JSFunction> f =
   2815       v8::Utils::OpenHandle(
   2816           *v8::Handle<v8::Function>::Cast(
   2817               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2818 
   2819   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2820   CHECK(ic_before->ic_state() == MONOMORPHIC);
   2821 
   2822   // Fire context dispose notification.
   2823   v8::V8::ContextDisposedNotification();
   2824   SimulateIncrementalMarking();
   2825   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2826 
   2827   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2828   CHECK(ic_after->ic_state() == UNINITIALIZED);
   2829 }
   2830 
   2831 
   2832 TEST(IncrementalMarkingClearsPolymorhpicIC) {
   2833   if (i::FLAG_always_opt) return;
   2834   CcTest::InitializeVM();
   2835   v8::HandleScope scope(CcTest::isolate());
   2836   v8::Local<v8::Value> obj1, obj2;
   2837 
   2838   {
   2839     LocalContext env;
   2840     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
   2841     obj1 = env->Global()->Get(v8_str("obj"));
   2842   }
   2843 
   2844   {
   2845     LocalContext env;
   2846     CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
   2847     obj2 = env->Global()->Get(v8_str("obj"));
   2848   }
   2849 
   2850   // Prepare function f that contains a polymorphic IC for objects
   2851   // originating from two different native contexts.
   2852   v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
   2853   v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
   2854   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
   2855   Handle<JSFunction> f =
   2856       v8::Utils::OpenHandle(
   2857           *v8::Handle<v8::Function>::Cast(
   2858               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   2859 
   2860   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2861   CHECK(ic_before->ic_state() == POLYMORPHIC);
   2862 
   2863   // Fire context dispose notification.
   2864   v8::V8::ContextDisposedNotification();
   2865   SimulateIncrementalMarking();
   2866   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
   2867 
   2868   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
   2869   CHECK(ic_after->ic_state() == UNINITIALIZED);
   2870 }
   2871 
   2872 
   2873 class SourceResource: public v8::String::ExternalAsciiStringResource {
   2874  public:
   2875   explicit SourceResource(const char* data)
   2876     : data_(data), length_(strlen(data)) { }
   2877 
   2878   virtual void Dispose() {
   2879     i::DeleteArray(data_);
   2880     data_ = NULL;
   2881   }
   2882 
   2883   const char* data() const { return data_; }
   2884 
   2885   size_t length() const { return length_; }
   2886 
   2887   bool IsDisposed() { return data_ == NULL; }
   2888 
   2889  private:
   2890   const char* data_;
   2891   size_t length_;
   2892 };
   2893 
   2894 
   2895 void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
   2896   // Test that the data retained by the Error.stack accessor is released
   2897   // after the first time the accessor is fired.  We use external string
   2898   // to check whether the data is being released since the external string
   2899   // resource's callback is fired when the external string is GC'ed.
   2900   FLAG_use_ic = false;  // ICs retain objects.
   2901   FLAG_parallel_recompilation = false;
   2902   CcTest::InitializeVM();
   2903   v8::HandleScope scope(CcTest::isolate());
   2904   SourceResource* resource = new SourceResource(i::StrDup(source));
   2905   {
   2906     v8::HandleScope scope(CcTest::isolate());
   2907     v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
   2908     HEAP->CollectAllAvailableGarbage();
   2909     v8::Script::Compile(source_string)->Run();
   2910     CHECK(!resource->IsDisposed());
   2911   }
   2912   // HEAP->CollectAllAvailableGarbage();
   2913   CHECK(!resource->IsDisposed());
   2914 
   2915   CompileRun(accessor);
   2916   HEAP->CollectAllAvailableGarbage();
   2917 
   2918   // External source has been released.
   2919   CHECK(resource->IsDisposed());
   2920   delete resource;
   2921 }
   2922 
   2923 
   2924 TEST(ReleaseStackTraceData) {
   2925   static const char* source1 = "var error = null;            "
   2926   /* Normal Error */           "try {                        "
   2927                                "  throw new Error();         "
   2928                                "} catch (e) {                "
   2929                                "  error = e;                 "
   2930                                "}                            ";
   2931   static const char* source2 = "var error = null;            "
   2932   /* Stack overflow */         "try {                        "
   2933                                "  (function f() { f(); })(); "
   2934                                "} catch (e) {                "
   2935                                "  error = e;                 "
   2936                                "}                            ";
   2937   static const char* source3 = "var error = null;            "
   2938   /* Normal Error */           "try {                        "
   2939   /* as prototype */           "  throw new Error();         "
   2940                                "} catch (e) {                "
   2941                                "  error = {};                "
   2942                                "  error.__proto__ = e;       "
   2943                                "}                            ";
   2944   static const char* source4 = "var error = null;            "
   2945   /* Stack overflow */         "try {                        "
   2946   /* as prototype   */         "  (function f() { f(); })(); "
   2947                                "} catch (e) {                "
   2948                                "  error = {};                "
   2949                                "  error.__proto__ = e;       "
   2950                                "}                            ";
   2951   static const char* getter = "error.stack";
   2952   static const char* setter = "error.stack = 0";
   2953 
   2954   ReleaseStackTraceDataTest(source1, setter);
   2955   ReleaseStackTraceDataTest(source2, setter);
   2956   // We do not test source3 and source4 with setter, since the setter is
   2957   // supposed to (untypically) write to the receiver, not the holder.  This is
   2958   // to emulate the behavior of a data property.
   2959 
   2960   ReleaseStackTraceDataTest(source1, getter);
   2961   ReleaseStackTraceDataTest(source2, getter);
   2962   ReleaseStackTraceDataTest(source3, getter);
   2963   ReleaseStackTraceDataTest(source4, getter);
   2964 }
   2965 
   2966 
   2967 TEST(Regression144230) {
   2968   i::FLAG_stress_compaction = false;
   2969   CcTest::InitializeVM();
   2970   Isolate* isolate = Isolate::Current();
   2971   Heap* heap = isolate->heap();
   2972   HandleScope scope(isolate);
   2973 
   2974   // First make sure that the uninitialized CallIC stub is on a single page
   2975   // that will later be selected as an evacuation candidate.
   2976   {
   2977     HandleScope inner_scope(isolate);
   2978     AlwaysAllocateScope always_allocate;
   2979     SimulateFullSpace(heap->code_space());
   2980     isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
   2981   }
   2982 
   2983   // Second compile a CallIC and execute it once so that it gets patched to
   2984   // the pre-monomorphic stub. These code objects are on yet another page.
   2985   {
   2986     HandleScope inner_scope(isolate);
   2987     AlwaysAllocateScope always_allocate;
   2988     SimulateFullSpace(heap->code_space());
   2989     CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
   2990                "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
   2991                "call();");
   2992   }
   2993 
   2994   // Third we fill up the last page of the code space so that it does not get
   2995   // chosen as an evacuation candidate.
   2996   {
   2997     HandleScope inner_scope(isolate);
   2998     AlwaysAllocateScope always_allocate;
   2999     CompileRun("for (var i = 0; i < 2000; i++) {"
   3000                "  eval('function f' + i + '() { return ' + i +'; };' +"
   3001                "       'f' + i + '();');"
   3002                "}");
   3003   }
   3004   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3005 
   3006   // Fourth is the tricky part. Make sure the code containing the CallIC is
   3007   // visited first without clearing the IC. The shared function info is then
   3008   // visited later, causing the CallIC to be cleared.
   3009   Handle<String> name = isolate->factory()->InternalizeUtf8String("call");
   3010   Handle<GlobalObject> global(isolate->context()->global_object());
   3011   MaybeObject* maybe_call = global->GetProperty(*name);
   3012   JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
   3013   USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
   3014   isolate->compilation_cache()->Clear();
   3015   call->shared()->set_ic_age(heap->global_ic_age() + 1);
   3016   Handle<Object> call_code(call->code(), isolate);
   3017   Handle<Object> call_function(call, isolate);
   3018 
   3019   // Now we are ready to mess up the heap.
   3020   heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
   3021 
   3022   // Either heap verification caught the problem already or we go kaboom once
   3023   // the CallIC is executed the next time.
   3024   USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
   3025   CompileRun("call();");
   3026 }
   3027 
   3028 
   3029 TEST(Regress159140) {
   3030   i::FLAG_allow_natives_syntax = true;
   3031   i::FLAG_flush_code_incrementally = true;
   3032   CcTest::InitializeVM();
   3033   Isolate* isolate = Isolate::Current();
   3034   Heap* heap = isolate->heap();
   3035   HandleScope scope(isolate);
   3036 
   3037   // Perform one initial GC to enable code flushing.
   3038   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   3039 
   3040   // Prepare several closures that are all eligible for code flushing
   3041   // because all reachable ones are not optimized. Make sure that the
   3042   // optimized code object is directly reachable through a handle so
   3043   // that it is marked black during incremental marking.
   3044   Handle<Code> code;
   3045   {
   3046     HandleScope inner_scope(isolate);
   3047     CompileRun("function h(x) {}"
   3048                "function mkClosure() {"
   3049                "  return function(x) { return x + 1; };"
   3050                "}"
   3051                "var f = mkClosure();"
   3052                "var g = mkClosure();"
   3053                "f(1); f(2);"
   3054                "g(1); g(2);"
   3055                "h(1); h(2);"
   3056                "%OptimizeFunctionOnNextCall(f); f(3);"
   3057                "%OptimizeFunctionOnNextCall(h); h(3);");
   3058 
   3059     Handle<JSFunction> f =
   3060         v8::Utils::OpenHandle(
   3061             *v8::Handle<v8::Function>::Cast(
   3062                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   3063     CHECK(f->is_compiled());
   3064     CompileRun("f = null;");
   3065 
   3066     Handle<JSFunction> g =
   3067         v8::Utils::OpenHandle(
   3068             *v8::Handle<v8::Function>::Cast(
   3069                 v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
   3070     CHECK(g->is_compiled());
   3071     const int kAgingThreshold = 6;
   3072     for (int i = 0; i < kAgingThreshold; i++) {
   3073       g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3074     }
   3075 
   3076     code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
   3077   }
   3078 
   3079   // Simulate incremental marking so that the functions are enqueued as
   3080   // code flushing candidates. Then optimize one function. Finally
   3081   // finish the GC to complete code flushing.
   3082   SimulateIncrementalMarking();
   3083   CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
   3084   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3085 
   3086   // Unoptimized code is missing and the deoptimizer will go ballistic.
   3087   CompileRun("g('bozo');");
   3088 }
   3089 
   3090 
   3091 TEST(Regress165495) {
   3092   i::FLAG_allow_natives_syntax = true;
   3093   i::FLAG_flush_code_incrementally = true;
   3094   CcTest::InitializeVM();
   3095   Isolate* isolate = Isolate::Current();
   3096   Heap* heap = isolate->heap();
   3097   HandleScope scope(isolate);
   3098 
   3099   // Perform one initial GC to enable code flushing.
   3100   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   3101 
   3102   // Prepare an optimized closure that the optimized code map will get
   3103   // populated. Then age the unoptimized code to trigger code flushing
   3104   // but make sure the optimized code is unreachable.
   3105   {
   3106     HandleScope inner_scope(isolate);
   3107     CompileRun("function mkClosure() {"
   3108                "  return function(x) { return x + 1; };"
   3109                "}"
   3110                "var f = mkClosure();"
   3111                "f(1); f(2);"
   3112                "%OptimizeFunctionOnNextCall(f); f(3);");
   3113 
   3114     Handle<JSFunction> f =
   3115         v8::Utils::OpenHandle(
   3116             *v8::Handle<v8::Function>::Cast(
   3117                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   3118     CHECK(f->is_compiled());
   3119     const int kAgingThreshold = 6;
   3120     for (int i = 0; i < kAgingThreshold; i++) {
   3121       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3122     }
   3123 
   3124     CompileRun("f = null;");
   3125   }
   3126 
   3127   // Simulate incremental marking so that unoptimized code is flushed
   3128   // even though it still is cached in the optimized code map.
   3129   SimulateIncrementalMarking();
   3130   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3131 
   3132   // Make a new closure that will get code installed from the code map.
   3133   // Unoptimized code is missing and the deoptimizer will go ballistic.
   3134   CompileRun("var g = mkClosure(); g('bozo');");
   3135 }
   3136 
   3137 
   3138 TEST(Regress169209) {
   3139   i::FLAG_stress_compaction = false;
   3140   i::FLAG_allow_natives_syntax = true;
   3141   i::FLAG_flush_code_incrementally = true;
   3142 
   3143   // Experimental natives are compiled during snapshot deserialization.
   3144   // This test breaks because heap layout changes in a way that closure
   3145   // is visited before shared function info.
   3146   i::FLAG_harmony_typed_arrays = false;
   3147   i::FLAG_harmony_array_buffer = false;
   3148 
   3149   // Disable loading the i18n extension which breaks the assumptions of this
   3150   // test about the heap layout.
   3151   i::FLAG_enable_i18n = false;
   3152 
   3153   CcTest::InitializeVM();
   3154   Isolate* isolate = Isolate::Current();
   3155   Heap* heap = isolate->heap();
   3156   HandleScope scope(isolate);
   3157 
   3158   // Perform one initial GC to enable code flushing.
   3159   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   3160 
   3161   // Prepare a shared function info eligible for code flushing for which
   3162   // the unoptimized code will be replaced during optimization.
   3163   Handle<SharedFunctionInfo> shared1;
   3164   {
   3165     HandleScope inner_scope(isolate);
   3166     CompileRun("function f() { return 'foobar'; }"
   3167                "function g(x) { if (x) f(); }"
   3168                "f();"
   3169                "g(false);"
   3170                "g(false);");
   3171 
   3172     Handle<JSFunction> f =
   3173         v8::Utils::OpenHandle(
   3174             *v8::Handle<v8::Function>::Cast(
   3175                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   3176     CHECK(f->is_compiled());
   3177     const int kAgingThreshold = 6;
   3178     for (int i = 0; i < kAgingThreshold; i++) {
   3179       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3180     }
   3181 
   3182     shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   3183   }
   3184 
   3185   // Prepare a shared function info eligible for code flushing that will
   3186   // represent the dangling tail of the candidate list.
   3187   Handle<SharedFunctionInfo> shared2;
   3188   {
   3189     HandleScope inner_scope(isolate);
   3190     CompileRun("function flushMe() { return 0; }"
   3191                "flushMe(1);");
   3192 
   3193     Handle<JSFunction> f =
   3194         v8::Utils::OpenHandle(
   3195             *v8::Handle<v8::Function>::Cast(
   3196                 v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe"))));
   3197     CHECK(f->is_compiled());
   3198     const int kAgingThreshold = 6;
   3199     for (int i = 0; i < kAgingThreshold; i++) {
   3200       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3201     }
   3202 
   3203     shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
   3204   }
   3205 
   3206   // Simulate incremental marking and collect code flushing candidates.
   3207   SimulateIncrementalMarking();
   3208   CHECK(shared1->code()->gc_metadata() != NULL);
   3209 
   3210   // Optimize function and make sure the unoptimized code is replaced.
   3211 #ifdef DEBUG
   3212   FLAG_stop_at = "f";
   3213 #endif
   3214   CompileRun("%OptimizeFunctionOnNextCall(g);"
   3215              "g(false);");
   3216 
   3217   // Finish garbage collection cycle.
   3218   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3219   CHECK(shared1->code()->gc_metadata() == NULL);
   3220 }
   3221 
   3222 
   3223 // Helper function that simulates a fill new-space in the heap.
   3224 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
   3225                                         int extra_bytes) {
   3226   int space_remaining = static_cast<int>(
   3227       *space->allocation_limit_address() - *space->allocation_top_address());
   3228   CHECK(space_remaining >= extra_bytes);
   3229   int new_linear_size = space_remaining - extra_bytes;
   3230   v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
   3231   v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
   3232   node->set_size(space->heap(), new_linear_size);
   3233 }
   3234 
   3235 
   3236 TEST(Regress169928) {
   3237   i::FLAG_allow_natives_syntax = true;
   3238   i::FLAG_crankshaft = false;
   3239   CcTest::InitializeVM();
   3240   Isolate* isolate = Isolate::Current();
   3241   Factory* factory = isolate->factory();
   3242   v8::HandleScope scope(CcTest::isolate());
   3243 
   3244   // Some flags turn Scavenge collections into Mark-sweep collections
   3245   // and hence are incompatible with this test case.
   3246   if (FLAG_gc_global || FLAG_stress_compaction) return;
   3247 
   3248   // Prepare the environment
   3249   CompileRun("function fastliteralcase(literal, value) {"
   3250              "    literal[0] = value;"
   3251              "    return literal;"
   3252              "}"
   3253              "function get_standard_literal() {"
   3254              "    var literal = [1, 2, 3];"
   3255              "    return literal;"
   3256              "}"
   3257              "obj = fastliteralcase(get_standard_literal(), 1);"
   3258              "obj = fastliteralcase(get_standard_literal(), 1.5);"
   3259              "obj = fastliteralcase(get_standard_literal(), 2);");
   3260 
   3261   // prepare the heap
   3262   v8::Local<v8::String> mote_code_string =
   3263       v8_str("fastliteralcase(mote, 2.5);");
   3264 
   3265   v8::Local<v8::String> array_name = v8_str("mote");
   3266   v8::Context::GetCurrent()->Global()->Set(array_name, v8::Int32::New(0));
   3267 
   3268   // First make sure we flip spaces
   3269   HEAP->CollectGarbage(NEW_SPACE);
   3270 
   3271   // Allocate the object.
   3272   Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
   3273   array_data->set(0, Smi::FromInt(1));
   3274   array_data->set(1, Smi::FromInt(2));
   3275 
   3276   AllocateAllButNBytes(HEAP->new_space(),
   3277                        JSArray::kSize + AllocationMemento::kSize +
   3278                        kPointerSize);
   3279 
   3280   Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
   3281                                                           FAST_SMI_ELEMENTS,
   3282                                                           NOT_TENURED);
   3283 
   3284   CHECK_EQ(Smi::FromInt(2), array->length());
   3285   CHECK(array->HasFastSmiOrObjectElements());
   3286 
   3287   // We need filler the size of AllocationMemento object, plus an extra
   3288   // fill pointer value.
   3289   MaybeObject* maybe_object = HEAP->AllocateRaw(
   3290       AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
   3291   Object* obj = NULL;
   3292   CHECK(maybe_object->ToObject(&obj));
   3293   Address addr_obj = reinterpret_cast<Address>(
   3294       reinterpret_cast<byte*>(obj - kHeapObjectTag));
   3295   HEAP->CreateFillerObjectAt(addr_obj,
   3296                              AllocationMemento::kSize + kPointerSize);
   3297 
   3298   // Give the array a name, making sure not to allocate strings.
   3299   v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
   3300   v8::Context::GetCurrent()->Global()->Set(array_name, array_obj);
   3301 
   3302   // This should crash with a protection violation if we are running a build
   3303   // with the bug.
   3304   AlwaysAllocateScope aa_scope;
   3305   v8::Script::Compile(mote_code_string)->Run();
   3306 }
   3307 
   3308 
   3309 TEST(Regress168801) {
   3310   i::FLAG_always_compact = true;
   3311   i::FLAG_cache_optimized_code = false;
   3312   i::FLAG_allow_natives_syntax = true;
   3313   i::FLAG_flush_code_incrementally = true;
   3314   CcTest::InitializeVM();
   3315   Isolate* isolate = Isolate::Current();
   3316   Heap* heap = isolate->heap();
   3317   HandleScope scope(isolate);
   3318 
   3319   // Perform one initial GC to enable code flushing.
   3320   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   3321 
   3322   // Ensure the code ends up on an evacuation candidate.
   3323   SimulateFullSpace(heap->code_space());
   3324 
   3325   // Prepare an unoptimized function that is eligible for code flushing.
   3326   Handle<JSFunction> function;
   3327   {
   3328     HandleScope inner_scope(isolate);
   3329     CompileRun("function mkClosure() {"
   3330                "  return function(x) { return x + 1; };"
   3331                "}"
   3332                "var f = mkClosure();"
   3333                "f(1); f(2);");
   3334 
   3335     Handle<JSFunction> f =
   3336         v8::Utils::OpenHandle(
   3337             *v8::Handle<v8::Function>::Cast(
   3338                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   3339     CHECK(f->is_compiled());
   3340     const int kAgingThreshold = 6;
   3341     for (int i = 0; i < kAgingThreshold; i++) {
   3342       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3343     }
   3344 
   3345     function = inner_scope.CloseAndEscape(handle(*f, isolate));
   3346   }
   3347 
   3348   // Simulate incremental marking so that unoptimized function is enqueued as a
   3349   // candidate for code flushing. The shared function info however will not be
   3350   // explicitly enqueued.
   3351   SimulateIncrementalMarking();
   3352 
   3353   // Now optimize the function so that it is taken off the candidate list.
   3354   {
   3355     HandleScope inner_scope(isolate);
   3356     CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
   3357   }
   3358 
   3359   // This cycle will bust the heap and subsequent cycles will go ballistic.
   3360   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3361   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3362 }
   3363 
   3364 
   3365 TEST(Regress173458) {
   3366   i::FLAG_always_compact = true;
   3367   i::FLAG_cache_optimized_code = false;
   3368   i::FLAG_allow_natives_syntax = true;
   3369   i::FLAG_flush_code_incrementally = true;
   3370   CcTest::InitializeVM();
   3371   Isolate* isolate = Isolate::Current();
   3372   Heap* heap = isolate->heap();
   3373   HandleScope scope(isolate);
   3374 
   3375   // Perform one initial GC to enable code flushing.
   3376   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
   3377 
   3378   // Ensure the code ends up on an evacuation candidate.
   3379   SimulateFullSpace(heap->code_space());
   3380 
   3381   // Prepare an unoptimized function that is eligible for code flushing.
   3382   Handle<JSFunction> function;
   3383   {
   3384     HandleScope inner_scope(isolate);
   3385     CompileRun("function mkClosure() {"
   3386                "  return function(x) { return x + 1; };"
   3387                "}"
   3388                "var f = mkClosure();"
   3389                "f(1); f(2);");
   3390 
   3391     Handle<JSFunction> f =
   3392         v8::Utils::OpenHandle(
   3393             *v8::Handle<v8::Function>::Cast(
   3394                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
   3395     CHECK(f->is_compiled());
   3396     const int kAgingThreshold = 6;
   3397     for (int i = 0; i < kAgingThreshold; i++) {
   3398       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
   3399     }
   3400 
   3401     function = inner_scope.CloseAndEscape(handle(*f, isolate));
   3402   }
   3403 
   3404   // Simulate incremental marking so that unoptimized function is enqueued as a
   3405   // candidate for code flushing. The shared function info however will not be
   3406   // explicitly enqueued.
   3407   SimulateIncrementalMarking();
   3408 
   3409 #ifdef ENABLE_DEBUGGER_SUPPORT
   3410   // Now enable the debugger which in turn will disable code flushing.
   3411   CHECK(isolate->debug()->Load());
   3412 #endif  // ENABLE_DEBUGGER_SUPPORT
   3413 
   3414   // This cycle will bust the heap and subsequent cycles will go ballistic.
   3415   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3416   heap->CollectAllGarbage(Heap::kNoGCFlags);
   3417 }
   3418 
   3419 
   3420 class DummyVisitor : public ObjectVisitor {
   3421  public:
   3422   void VisitPointers(Object** start, Object** end) { }
   3423 };
   3424 
   3425 
   3426 TEST(DeferredHandles) {
   3427   CcTest::InitializeVM();
   3428   Isolate* isolate = Isolate::Current();
   3429   Heap* heap = isolate->heap();
   3430   v8::HandleScope scope;
   3431   v8::ImplementationUtilities::HandleScopeData* data =
   3432       isolate->handle_scope_data();
   3433   Handle<Object> init(heap->empty_string(), isolate);
   3434   while (data->next < data->limit) {
   3435     Handle<Object> obj(heap->empty_string(), isolate);
   3436   }
   3437   // An entire block of handles has been filled.
   3438   // Next handle would require a new block.
   3439   ASSERT(data->next == data->limit);
   3440 
   3441   DeferredHandleScope deferred(isolate);
   3442   DummyVisitor visitor;
   3443   isolate->handle_scope_implementer()->Iterate(&visitor);
   3444   deferred.Detach();
   3445 }
   3446 
   3447 
   3448 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
   3449   CcTest::InitializeVM();
   3450   v8::HandleScope scope(CcTest::isolate());
   3451   CompileRun("function f(n) {"
   3452              "    var a = new Array(n);"
   3453              "    for (var i = 0; i < n; i += 100) a[i] = i;"
   3454              "};"
   3455              "f(10 * 1024 * 1024);");
   3456   IncrementalMarking* marking = HEAP->incremental_marking();
   3457   if (marking->IsStopped()) marking->Start();
   3458   // This big step should be sufficient to mark the whole array.
   3459   marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
   3460   ASSERT(marking->IsComplete());
   3461 }
   3462