Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_HEAP_INL_H_
     29 #define V8_HEAP_INL_H_
     30 
     31 #include "heap.h"
     32 #include "isolate.h"
     33 #include "list-inl.h"
     34 #include "objects.h"
     35 #include "platform.h"
     36 #include "v8-counters.h"
     37 #include "store-buffer.h"
     38 #include "store-buffer-inl.h"
     39 
     40 namespace v8 {
     41 namespace internal {
     42 
     43 void PromotionQueue::insert(HeapObject* target, int size) {
     44   if (emergency_stack_ != NULL) {
     45     emergency_stack_->Add(Entry(target, size));
     46     return;
     47   }
     48 
     49   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
     50     NewSpacePage* rear_page =
     51         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
     52     ASSERT(!rear_page->prev_page()->is_anchor());
     53     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
     54     ActivateGuardIfOnTheSamePage();
     55   }
     56 
     57   if (guard_) {
     58     ASSERT(GetHeadPage() ==
     59            Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
     60 
     61     if ((rear_ - 2) < limit_) {
     62       RelocateQueueHead();
     63       emergency_stack_->Add(Entry(target, size));
     64       return;
     65     }
     66   }
     67 
     68   *(--rear_) = reinterpret_cast<intptr_t>(target);
     69   *(--rear_) = size;
     70   // Assert no overflow into live objects.
     71 #ifdef DEBUG
     72   SemiSpace::AssertValidRange(HEAP->new_space()->top(),
     73                               reinterpret_cast<Address>(rear_));
     74 #endif
     75 }
     76 
     77 
     78 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
     79   guard_ = guard_ ||
     80       heap_->new_space()->active_space()->current_page()->address() ==
     81       GetHeadPage()->address();
     82 }
     83 
     84 
     85 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
     86                                           PretenureFlag pretenure) {
     87   // Check for ASCII first since this is the common case.
     88   if (String::IsAscii(str.start(), str.length())) {
     89     // If the string is ASCII, we do not need to convert the characters
     90     // since UTF8 is backwards compatible with ASCII.
     91     return AllocateStringFromAscii(str, pretenure);
     92   }
     93   // Non-ASCII and we need to decode.
     94   return AllocateStringFromUtf8Slow(str, pretenure);
     95 }
     96 
     97 
     98 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
     99                                   int chars,
    100                                   uint32_t hash_field) {
    101   unibrow::Utf8InputBuffer<> buffer(str.start(),
    102                                     static_cast<unsigned>(str.length()));
    103   return AllocateInternalSymbol(&buffer, chars, hash_field);
    104 }
    105 
    106 
    107 MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
    108                                        uint32_t hash_field) {
    109   if (str.length() > SeqAsciiString::kMaxLength) {
    110     return Failure::OutOfMemoryException();
    111   }
    112   // Compute map and object size.
    113   Map* map = ascii_symbol_map();
    114   int size = SeqAsciiString::SizeFor(str.length());
    115 
    116   // Allocate string.
    117   Object* result;
    118   { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
    119                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
    120                    : old_data_space_->AllocateRaw(size);
    121     if (!maybe_result->ToObject(&result)) return maybe_result;
    122   }
    123 
    124   // String maps are all immortal immovable objects.
    125   reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
    126   // Set length and hash fields of the allocated string.
    127   String* answer = String::cast(result);
    128   answer->set_length(str.length());
    129   answer->set_hash_field(hash_field);
    130 
    131   ASSERT_EQ(size, answer->Size());
    132 
    133   // Fill in the characters.
    134   memcpy(answer->address() + SeqAsciiString::kHeaderSize,
    135          str.start(), str.length());
    136 
    137   return answer;
    138 }
    139 
    140 
    141 MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
    142                                          uint32_t hash_field) {
    143   if (str.length() > SeqTwoByteString::kMaxLength) {
    144     return Failure::OutOfMemoryException();
    145   }
    146   // Compute map and object size.
    147   Map* map = symbol_map();
    148   int size = SeqTwoByteString::SizeFor(str.length());
    149 
    150   // Allocate string.
    151   Object* result;
    152   { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
    153                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
    154                    : old_data_space_->AllocateRaw(size);
    155     if (!maybe_result->ToObject(&result)) return maybe_result;
    156   }
    157 
    158   reinterpret_cast<HeapObject*>(result)->set_map(map);
    159   // Set length and hash fields of the allocated string.
    160   String* answer = String::cast(result);
    161   answer->set_length(str.length());
    162   answer->set_hash_field(hash_field);
    163 
    164   ASSERT_EQ(size, answer->Size());
    165 
    166   // Fill in the characters.
    167   memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
    168          str.start(), str.length() * kUC16Size);
    169 
    170   return answer;
    171 }
    172 
    173 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
    174   return CopyFixedArrayWithMap(src, src->map());
    175 }
    176 
    177 
    178 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
    179   return CopyFixedDoubleArrayWithMap(src, src->map());
    180 }
    181 
    182 
    183 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
    184                                AllocationSpace space,
    185                                AllocationSpace retry_space) {
    186   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
    187   ASSERT(space != NEW_SPACE ||
    188          retry_space == OLD_POINTER_SPACE ||
    189          retry_space == OLD_DATA_SPACE ||
    190          retry_space == LO_SPACE);
    191 #ifdef DEBUG
    192   if (FLAG_gc_interval >= 0 &&
    193       !disallow_allocation_failure_ &&
    194       Heap::allocation_timeout_-- <= 0) {
    195     return Failure::RetryAfterGC(space);
    196   }
    197   isolate_->counters()->objs_since_last_full()->Increment();
    198   isolate_->counters()->objs_since_last_young()->Increment();
    199 #endif
    200   MaybeObject* result;
    201   if (NEW_SPACE == space) {
    202     result = new_space_.AllocateRaw(size_in_bytes);
    203     if (always_allocate() && result->IsFailure()) {
    204       space = retry_space;
    205     } else {
    206       return result;
    207     }
    208   }
    209 
    210   if (OLD_POINTER_SPACE == space) {
    211     result = old_pointer_space_->AllocateRaw(size_in_bytes);
    212   } else if (OLD_DATA_SPACE == space) {
    213     result = old_data_space_->AllocateRaw(size_in_bytes);
    214   } else if (CODE_SPACE == space) {
    215     result = code_space_->AllocateRaw(size_in_bytes);
    216   } else if (LO_SPACE == space) {
    217     result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
    218   } else if (CELL_SPACE == space) {
    219     result = cell_space_->AllocateRaw(size_in_bytes);
    220   } else {
    221     ASSERT(MAP_SPACE == space);
    222     result = map_space_->AllocateRaw(size_in_bytes);
    223   }
    224   if (result->IsFailure()) old_gen_exhausted_ = true;
    225   return result;
    226 }
    227 
    228 
    229 MaybeObject* Heap::NumberFromInt32(
    230     int32_t value, PretenureFlag pretenure) {
    231   if (Smi::IsValid(value)) return Smi::FromInt(value);
    232   // Bypass NumberFromDouble to avoid various redundant checks.
    233   return AllocateHeapNumber(FastI2D(value), pretenure);
    234 }
    235 
    236 
    237 MaybeObject* Heap::NumberFromUint32(
    238     uint32_t value, PretenureFlag pretenure) {
    239   if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
    240     return Smi::FromInt((int32_t)value);
    241   }
    242   // Bypass NumberFromDouble to avoid various redundant checks.
    243   return AllocateHeapNumber(FastUI2D(value), pretenure);
    244 }
    245 
    246 
    247 void Heap::FinalizeExternalString(String* string) {
    248   ASSERT(string->IsExternalString());
    249   v8::String::ExternalStringResourceBase** resource_addr =
    250       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
    251           reinterpret_cast<byte*>(string) +
    252           ExternalString::kResourceOffset -
    253           kHeapObjectTag);
    254 
    255   // Dispose of the C++ object if it has not already been disposed.
    256   if (*resource_addr != NULL) {
    257     (*resource_addr)->Dispose();
    258     *resource_addr = NULL;
    259   }
    260 }
    261 
    262 
    263 MaybeObject* Heap::AllocateRawMap() {
    264 #ifdef DEBUG
    265   isolate_->counters()->objs_since_last_full()->Increment();
    266   isolate_->counters()->objs_since_last_young()->Increment();
    267 #endif
    268   MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
    269   if (result->IsFailure()) old_gen_exhausted_ = true;
    270 #ifdef DEBUG
    271   if (!result->IsFailure()) {
    272     // Maps have their own alignment.
    273     CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
    274           static_cast<intptr_t>(kHeapObjectTag));
    275   }
    276 #endif
    277   return result;
    278 }
    279 
    280 
    281 MaybeObject* Heap::AllocateRawCell() {
    282 #ifdef DEBUG
    283   isolate_->counters()->objs_since_last_full()->Increment();
    284   isolate_->counters()->objs_since_last_young()->Increment();
    285 #endif
    286   MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
    287   if (result->IsFailure()) old_gen_exhausted_ = true;
    288   return result;
    289 }
    290 
    291 
    292 bool Heap::InNewSpace(Object* object) {
    293   bool result = new_space_.Contains(object);
    294   ASSERT(!result ||                  // Either not in new space
    295          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
    296          InToSpace(object));         // ... or in to-space (where we allocate).
    297   return result;
    298 }
    299 
    300 
    301 bool Heap::InNewSpace(Address addr) {
    302   return new_space_.Contains(addr);
    303 }
    304 
    305 
    306 bool Heap::InFromSpace(Object* object) {
    307   return new_space_.FromSpaceContains(object);
    308 }
    309 
    310 
    311 bool Heap::InToSpace(Object* object) {
    312   return new_space_.ToSpaceContains(object);
    313 }
    314 
    315 
    316 bool Heap::OldGenerationAllocationLimitReached() {
    317   if (!incremental_marking()->IsStopped()) return false;
    318   return OldGenerationSpaceAvailable() < 0;
    319 }
    320 
    321 
    322 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
    323   // An object should be promoted if:
    324   // - the object has survived a scavenge operation or
    325   // - to space is already 25% full.
    326   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
    327   Address age_mark = new_space_.age_mark();
    328   bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
    329       (!page->ContainsLimit(age_mark) || old_address < age_mark);
    330   return below_mark || (new_space_.Size() + object_size) >=
    331                         (new_space_.EffectiveCapacity() >> 2);
    332 }
    333 
    334 
    335 void Heap::RecordWrite(Address address, int offset) {
    336   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
    337 }
    338 
    339 
    340 void Heap::RecordWrites(Address address, int start, int len) {
    341   if (!InNewSpace(address)) {
    342     for (int i = 0; i < len; i++) {
    343       store_buffer_.Mark(address + start + i * kPointerSize);
    344     }
    345   }
    346 }
    347 
    348 
    349 OldSpace* Heap::TargetSpace(HeapObject* object) {
    350   InstanceType type = object->map()->instance_type();
    351   AllocationSpace space = TargetSpaceId(type);
    352   return (space == OLD_POINTER_SPACE)
    353       ? old_pointer_space_
    354       : old_data_space_;
    355 }
    356 
    357 
    358 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
    359   // Heap numbers and sequential strings are promoted to old data space, all
    360   // other object types are promoted to old pointer space.  We do not use
    361   // object->IsHeapNumber() and object->IsSeqString() because we already
    362   // know that object has the heap object tag.
    363 
    364   // These objects are never allocated in new space.
    365   ASSERT(type != MAP_TYPE);
    366   ASSERT(type != CODE_TYPE);
    367   ASSERT(type != ODDBALL_TYPE);
    368   ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
    369 
    370   if (type < FIRST_NONSTRING_TYPE) {
    371     // There are four string representations: sequential strings, external
    372     // strings, cons strings, and sliced strings.
    373     // Only the latter two contain non-map-word pointers to heap objects.
    374     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
    375         ? OLD_POINTER_SPACE
    376         : OLD_DATA_SPACE;
    377   } else {
    378     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
    379   }
    380 }
    381 
    382 
    383 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
    384   CopyWords(reinterpret_cast<Object**>(dst),
    385             reinterpret_cast<Object**>(src),
    386             byte_size / kPointerSize);
    387 }
    388 
    389 
    390 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
    391   ASSERT(IsAligned(byte_size, kPointerSize));
    392 
    393   int size_in_words = byte_size / kPointerSize;
    394 
    395   if ((dst < src) || (dst >= (src + byte_size))) {
    396     Object** src_slot = reinterpret_cast<Object**>(src);
    397     Object** dst_slot = reinterpret_cast<Object**>(dst);
    398     Object** end_slot = src_slot + size_in_words;
    399 
    400     while (src_slot != end_slot) {
    401       *dst_slot++ = *src_slot++;
    402     }
    403   } else {
    404     memmove(dst, src, byte_size);
    405   }
    406 }
    407 
    408 
    409 void Heap::ScavengePointer(HeapObject** p) {
    410   ScavengeObject(p, *p);
    411 }
    412 
    413 
    414 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
    415   ASSERT(HEAP->InFromSpace(object));
    416 
    417   // We use the first word (where the map pointer usually is) of a heap
    418   // object to record the forwarding pointer.  A forwarding pointer can
    419   // point to an old space, the code space, or the to space of the new
    420   // generation.
    421   MapWord first_word = object->map_word();
    422 
    423   // If the first word is a forwarding address, the object has already been
    424   // copied.
    425   if (first_word.IsForwardingAddress()) {
    426     HeapObject* dest = first_word.ToForwardingAddress();
    427     ASSERT(HEAP->InFromSpace(*p));
    428     *p = dest;
    429     return;
    430   }
    431 
    432   // Call the slow part of scavenge object.
    433   return ScavengeObjectSlow(p, object);
    434 }
    435 
    436 
    437 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
    438   const char* collector_reason = NULL;
    439   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
    440   return CollectGarbage(space, collector, gc_reason, collector_reason);
    441 }
    442 
    443 
    444 MaybeObject* Heap::PrepareForCompare(String* str) {
    445   // Always flatten small strings and force flattening of long strings
    446   // after we have accumulated a certain amount we failed to flatten.
    447   static const int kMaxAlwaysFlattenLength = 32;
    448   static const int kFlattenLongThreshold = 16*KB;
    449 
    450   const int length = str->length();
    451   MaybeObject* obj = str->TryFlatten();
    452   if (length <= kMaxAlwaysFlattenLength ||
    453       unflattened_strings_length_ >= kFlattenLongThreshold) {
    454     return obj;
    455   }
    456   if (obj->IsFailure()) {
    457     unflattened_strings_length_ += length;
    458   }
    459   return str;
    460 }
    461 
    462 
    463 int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
    464   ASSERT(HasBeenSetUp());
    465   int amount = amount_of_external_allocated_memory_ + change_in_bytes;
    466   if (change_in_bytes >= 0) {
    467     // Avoid overflow.
    468     if (amount > amount_of_external_allocated_memory_) {
    469       amount_of_external_allocated_memory_ = amount;
    470     }
    471     int amount_since_last_global_gc =
    472         amount_of_external_allocated_memory_ -
    473         amount_of_external_allocated_memory_at_last_global_gc_;
    474     if (amount_since_last_global_gc > external_allocation_limit_) {
    475       CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
    476     }
    477   } else {
    478     // Avoid underflow.
    479     if (amount >= 0) {
    480       amount_of_external_allocated_memory_ = amount;
    481     }
    482   }
    483   ASSERT(amount_of_external_allocated_memory_ >= 0);
    484   return amount_of_external_allocated_memory_;
    485 }
    486 
    487 
    488 void Heap::SetLastScriptId(Object* last_script_id) {
    489   roots_[kLastScriptIdRootIndex] = last_script_id;
    490 }
    491 
    492 
    493 Isolate* Heap::isolate() {
    494   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
    495       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
    496 }
    497 
    498 
    499 #ifdef DEBUG
    500 #define GC_GREEDY_CHECK() \
    501   if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
    502 #else
    503 #define GC_GREEDY_CHECK() { }
    504 #endif
    505 
    506 // Calls the FUNCTION_CALL function and retries it up to three times
    507 // to guarantee that any allocations performed during the call will
    508 // succeed if there's enough memory.
    509 
    510 // Warning: Do not use the identifiers __object__, __maybe_object__ or
    511 // __scope__ in a call to this macro.
    512 
    513 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
    514   do {                                                                    \
    515     GC_GREEDY_CHECK();                                                    \
    516     MaybeObject* __maybe_object__ = FUNCTION_CALL;                        \
    517     Object* __object__ = NULL;                                            \
    518     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    519     if (__maybe_object__->IsOutOfMemory()) {                              \
    520       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
    521     }                                                                     \
    522     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
    523     ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)->     \
    524                                     allocation_space(),                   \
    525                                     "allocation failure");                \
    526     __maybe_object__ = FUNCTION_CALL;                                     \
    527     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    528     if (__maybe_object__->IsOutOfMemory()) {                              \
    529       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
    530     }                                                                     \
    531     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
    532     ISOLATE->counters()->gc_last_resort_from_handles()->Increment();      \
    533     ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc");        \
    534     {                                                                     \
    535       AlwaysAllocateScope __scope__;                                      \
    536       __maybe_object__ = FUNCTION_CALL;                                   \
    537     }                                                                     \
    538     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    539     if (__maybe_object__->IsOutOfMemory() ||                              \
    540         __maybe_object__->IsRetryAfterGC()) {                             \
    541       /* TODO(1181417): Fix this. */                                      \
    542       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
    543     }                                                                     \
    544     RETURN_EMPTY;                                                         \
    545   } while (false)
    546 
    547 
    548 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)       \
    549   CALL_AND_RETRY(ISOLATE,                                      \
    550                  FUNCTION_CALL,                                \
    551                  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE),  \
    552                  return Handle<TYPE>())
    553 
    554 
    555 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
    556   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
    557 
    558 
    559 #ifdef DEBUG
    560 
    561 inline bool Heap::allow_allocation(bool new_state) {
    562   bool old = allocation_allowed_;
    563   allocation_allowed_ = new_state;
    564   return old;
    565 }
    566 
    567 #endif
    568 
    569 
    570 void ExternalStringTable::AddString(String* string) {
    571   ASSERT(string->IsExternalString());
    572   if (heap_->InNewSpace(string)) {
    573     new_space_strings_.Add(string);
    574   } else {
    575     old_space_strings_.Add(string);
    576   }
    577 }
    578 
    579 
    580 void ExternalStringTable::Iterate(ObjectVisitor* v) {
    581   if (!new_space_strings_.is_empty()) {
    582     Object** start = &new_space_strings_[0];
    583     v->VisitPointers(start, start + new_space_strings_.length());
    584   }
    585   if (!old_space_strings_.is_empty()) {
    586     Object** start = &old_space_strings_[0];
    587     v->VisitPointers(start, start + old_space_strings_.length());
    588   }
    589 }
    590 
    591 
    592 // Verify() is inline to avoid ifdef-s around its calls in release
    593 // mode.
    594 void ExternalStringTable::Verify() {
    595 #ifdef DEBUG
    596   for (int i = 0; i < new_space_strings_.length(); ++i) {
    597     ASSERT(heap_->InNewSpace(new_space_strings_[i]));
    598     ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
    599   }
    600   for (int i = 0; i < old_space_strings_.length(); ++i) {
    601     ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
    602     ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
    603   }
    604 #endif
    605 }
    606 
    607 
    608 void ExternalStringTable::AddOldString(String* string) {
    609   ASSERT(string->IsExternalString());
    610   ASSERT(!heap_->InNewSpace(string));
    611   old_space_strings_.Add(string);
    612 }
    613 
    614 
    615 void ExternalStringTable::ShrinkNewStrings(int position) {
    616   new_space_strings_.Rewind(position);
    617   if (FLAG_verify_heap) {
    618     Verify();
    619   }
    620 }
    621 
    622 
    623 void Heap::ClearInstanceofCache() {
    624   set_instanceof_cache_function(the_hole_value());
    625 }
    626 
    627 
    628 Object* Heap::ToBoolean(bool condition) {
    629   return condition ? true_value() : false_value();
    630 }
    631 
    632 
    633 void Heap::CompletelyClearInstanceofCache() {
    634   set_instanceof_cache_map(the_hole_value());
    635   set_instanceof_cache_function(the_hole_value());
    636 }
    637 
    638 
    639 MaybeObject* TranscendentalCache::Get(Type type, double input) {
    640   SubCache* cache = caches_[type];
    641   if (cache == NULL) {
    642     caches_[type] = cache = new SubCache(type);
    643   }
    644   return cache->Get(input);
    645 }
    646 
    647 
    648 Address TranscendentalCache::cache_array_address() {
    649   return reinterpret_cast<Address>(caches_);
    650 }
    651 
    652 
    653 double TranscendentalCache::SubCache::Calculate(double input) {
    654   switch (type_) {
    655     case ACOS:
    656       return acos(input);
    657     case ASIN:
    658       return asin(input);
    659     case ATAN:
    660       return atan(input);
    661     case COS:
    662       return fast_cos(input);
    663     case EXP:
    664       return exp(input);
    665     case LOG:
    666       return fast_log(input);
    667     case SIN:
    668       return fast_sin(input);
    669     case TAN:
    670       return fast_tan(input);
    671     default:
    672       return 0.0;  // Never happens.
    673   }
    674 }
    675 
    676 
    677 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
    678   Converter c;
    679   c.dbl = input;
    680   int hash = Hash(c);
    681   Element e = elements_[hash];
    682   if (e.in[0] == c.integers[0] &&
    683       e.in[1] == c.integers[1]) {
    684     ASSERT(e.output != NULL);
    685     isolate_->counters()->transcendental_cache_hit()->Increment();
    686     return e.output;
    687   }
    688   double answer = Calculate(input);
    689   isolate_->counters()->transcendental_cache_miss()->Increment();
    690   Object* heap_number;
    691   { MaybeObject* maybe_heap_number =
    692         isolate_->heap()->AllocateHeapNumber(answer);
    693     if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
    694   }
    695   elements_[hash].in[0] = c.integers[0];
    696   elements_[hash].in[1] = c.integers[1];
    697   elements_[hash].output = heap_number;
    698   return heap_number;
    699 }
    700 
    701 
    702 AlwaysAllocateScope::AlwaysAllocateScope() {
    703   // We shouldn't hit any nested scopes, because that requires
    704   // non-handle code to call handle code. The code still works but
    705   // performance will degrade, so we want to catch this situation
    706   // in debug mode.
    707   ASSERT(HEAP->always_allocate_scope_depth_ == 0);
    708   HEAP->always_allocate_scope_depth_++;
    709 }
    710 
    711 
    712 AlwaysAllocateScope::~AlwaysAllocateScope() {
    713   HEAP->always_allocate_scope_depth_--;
    714   ASSERT(HEAP->always_allocate_scope_depth_ == 0);
    715 }
    716 
    717 
    718 LinearAllocationScope::LinearAllocationScope() {
    719   HEAP->linear_allocation_scope_depth_++;
    720 }
    721 
    722 
    723 LinearAllocationScope::~LinearAllocationScope() {
    724   HEAP->linear_allocation_scope_depth_--;
    725   ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
    726 }
    727 
    728 
    729 #ifdef DEBUG
    730 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
    731   for (Object** current = start; current < end; current++) {
    732     if ((*current)->IsHeapObject()) {
    733       HeapObject* object = HeapObject::cast(*current);
    734       ASSERT(HEAP->Contains(object));
    735       ASSERT(object->map()->IsMap());
    736     }
    737   }
    738 }
    739 #endif
    740 
    741 
    742 double GCTracer::SizeOfHeapObjects() {
    743   return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
    744 }
    745 
    746 
    747 #ifdef DEBUG
    748 DisallowAllocationFailure::DisallowAllocationFailure() {
    749   old_state_ = HEAP->disallow_allocation_failure_;
    750   HEAP->disallow_allocation_failure_ = true;
    751 }
    752 
    753 
    754 DisallowAllocationFailure::~DisallowAllocationFailure() {
    755   HEAP->disallow_allocation_failure_ = old_state_;
    756 }
    757 #endif
    758 
    759 
    760 #ifdef DEBUG
    761 AssertNoAllocation::AssertNoAllocation() {
    762   old_state_ = HEAP->allow_allocation(false);
    763 }
    764 
    765 
    766 AssertNoAllocation::~AssertNoAllocation() {
    767   HEAP->allow_allocation(old_state_);
    768 }
    769 
    770 
    771 DisableAssertNoAllocation::DisableAssertNoAllocation() {
    772   old_state_ = HEAP->allow_allocation(true);
    773 }
    774 
    775 
    776 DisableAssertNoAllocation::~DisableAssertNoAllocation() {
    777   HEAP->allow_allocation(old_state_);
    778 }
    779 
    780 #else
    781 
    782 AssertNoAllocation::AssertNoAllocation() { }
    783 AssertNoAllocation::~AssertNoAllocation() { }
    784 DisableAssertNoAllocation::DisableAssertNoAllocation() { }
    785 DisableAssertNoAllocation::~DisableAssertNoAllocation() { }
    786 
    787 #endif
    788 
    789 
    790 } }  // namespace v8::internal
    791 
    792 #endif  // V8_HEAP_INL_H_
    793