Home | History | Annotate | Download | only in src
      1 // Copyright 2006-2010 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_HEAP_INL_H_
     29 #define V8_HEAP_INL_H_
     30 
     31 #include "heap.h"
     32 #include "objects.h"
     33 #include "isolate.h"
     34 #include "v8-counters.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 void PromotionQueue::insert(HeapObject* target, int size) {
     40   *(--rear_) = reinterpret_cast<intptr_t>(target);
     41   *(--rear_) = size;
     42   // Assert no overflow into live objects.
     43   ASSERT(reinterpret_cast<Address>(rear_) >= HEAP->new_space()->top());
     44 }
     45 
     46 
     47 int Heap::MaxObjectSizeInPagedSpace() {
     48   return Page::kMaxHeapObjectSize;
     49 }
     50 
     51 
     52 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
     53                                           PretenureFlag pretenure) {
     54   // Check for ASCII first since this is the common case.
     55   if (String::IsAscii(str.start(), str.length())) {
     56     // If the string is ASCII, we do not need to convert the characters
     57     // since UTF8 is backwards compatible with ASCII.
     58     return AllocateStringFromAscii(str, pretenure);
     59   }
     60   // Non-ASCII and we need to decode.
     61   return AllocateStringFromUtf8Slow(str, pretenure);
     62 }
     63 
     64 
     65 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
     66                                   int chars,
     67                                   uint32_t hash_field) {
     68   unibrow::Utf8InputBuffer<> buffer(str.start(),
     69                                     static_cast<unsigned>(str.length()));
     70   return AllocateInternalSymbol(&buffer, chars, hash_field);
     71 }
     72 
     73 
     74 MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
     75                                        uint32_t hash_field) {
     76   if (str.length() > SeqAsciiString::kMaxLength) {
     77     return Failure::OutOfMemoryException();
     78   }
     79   // Compute map and object size.
     80   Map* map = ascii_symbol_map();
     81   int size = SeqAsciiString::SizeFor(str.length());
     82 
     83   // Allocate string.
     84   Object* result;
     85   { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
     86                    ? lo_space_->AllocateRaw(size)
     87                    : old_data_space_->AllocateRaw(size);
     88     if (!maybe_result->ToObject(&result)) return maybe_result;
     89   }
     90 
     91   reinterpret_cast<HeapObject*>(result)->set_map(map);
     92   // Set length and hash fields of the allocated string.
     93   String* answer = String::cast(result);
     94   answer->set_length(str.length());
     95   answer->set_hash_field(hash_field);
     96 
     97   ASSERT_EQ(size, answer->Size());
     98 
     99   // Fill in the characters.
    100   memcpy(answer->address() + SeqAsciiString::kHeaderSize,
    101          str.start(), str.length());
    102 
    103   return answer;
    104 }
    105 
    106 
    107 MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
    108                                          uint32_t hash_field) {
    109   if (str.length() > SeqTwoByteString::kMaxLength) {
    110     return Failure::OutOfMemoryException();
    111   }
    112   // Compute map and object size.
    113   Map* map = symbol_map();
    114   int size = SeqTwoByteString::SizeFor(str.length());
    115 
    116   // Allocate string.
    117   Object* result;
    118   { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
    119                    ? lo_space_->AllocateRaw(size)
    120                    : old_data_space_->AllocateRaw(size);
    121     if (!maybe_result->ToObject(&result)) return maybe_result;
    122   }
    123 
    124   reinterpret_cast<HeapObject*>(result)->set_map(map);
    125   // Set length and hash fields of the allocated string.
    126   String* answer = String::cast(result);
    127   answer->set_length(str.length());
    128   answer->set_hash_field(hash_field);
    129 
    130   ASSERT_EQ(size, answer->Size());
    131 
    132   // Fill in the characters.
    133   memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
    134          str.start(), str.length() * kUC16Size);
    135 
    136   return answer;
    137 }
    138 
    139 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
    140   return CopyFixedArrayWithMap(src, src->map());
    141 }
    142 
    143 
    144 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
    145                                AllocationSpace space,
    146                                AllocationSpace retry_space) {
    147   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
    148   ASSERT(space != NEW_SPACE ||
    149          retry_space == OLD_POINTER_SPACE ||
    150          retry_space == OLD_DATA_SPACE ||
    151          retry_space == LO_SPACE);
    152 #ifdef DEBUG
    153   if (FLAG_gc_interval >= 0 &&
    154       !disallow_allocation_failure_ &&
    155       Heap::allocation_timeout_-- <= 0) {
    156     return Failure::RetryAfterGC(space);
    157   }
    158   isolate_->counters()->objs_since_last_full()->Increment();
    159   isolate_->counters()->objs_since_last_young()->Increment();
    160 #endif
    161   MaybeObject* result;
    162   if (NEW_SPACE == space) {
    163     result = new_space_.AllocateRaw(size_in_bytes);
    164     if (always_allocate() && result->IsFailure()) {
    165       space = retry_space;
    166     } else {
    167       return result;
    168     }
    169   }
    170 
    171   if (OLD_POINTER_SPACE == space) {
    172     result = old_pointer_space_->AllocateRaw(size_in_bytes);
    173   } else if (OLD_DATA_SPACE == space) {
    174     result = old_data_space_->AllocateRaw(size_in_bytes);
    175   } else if (CODE_SPACE == space) {
    176     result = code_space_->AllocateRaw(size_in_bytes);
    177   } else if (LO_SPACE == space) {
    178     result = lo_space_->AllocateRaw(size_in_bytes);
    179   } else if (CELL_SPACE == space) {
    180     result = cell_space_->AllocateRaw(size_in_bytes);
    181   } else {
    182     ASSERT(MAP_SPACE == space);
    183     result = map_space_->AllocateRaw(size_in_bytes);
    184   }
    185   if (result->IsFailure()) old_gen_exhausted_ = true;
    186   return result;
    187 }
    188 
    189 
    190 MaybeObject* Heap::NumberFromInt32(int32_t value) {
    191   if (Smi::IsValid(value)) return Smi::FromInt(value);
    192   // Bypass NumberFromDouble to avoid various redundant checks.
    193   return AllocateHeapNumber(FastI2D(value));
    194 }
    195 
    196 
    197 MaybeObject* Heap::NumberFromUint32(uint32_t value) {
    198   if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
    199     return Smi::FromInt((int32_t)value);
    200   }
    201   // Bypass NumberFromDouble to avoid various redundant checks.
    202   return AllocateHeapNumber(FastUI2D(value));
    203 }
    204 
    205 
    206 void Heap::FinalizeExternalString(String* string) {
    207   ASSERT(string->IsExternalString());
    208   v8::String::ExternalStringResourceBase** resource_addr =
    209       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
    210           reinterpret_cast<byte*>(string) +
    211           ExternalString::kResourceOffset -
    212           kHeapObjectTag);
    213 
    214   // Dispose of the C++ object if it has not already been disposed.
    215   if (*resource_addr != NULL) {
    216     (*resource_addr)->Dispose();
    217   }
    218 
    219   // Clear the resource pointer in the string.
    220   *resource_addr = NULL;
    221 }
    222 
    223 
    224 MaybeObject* Heap::AllocateRawMap() {
    225 #ifdef DEBUG
    226   isolate_->counters()->objs_since_last_full()->Increment();
    227   isolate_->counters()->objs_since_last_young()->Increment();
    228 #endif
    229   MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
    230   if (result->IsFailure()) old_gen_exhausted_ = true;
    231 #ifdef DEBUG
    232   if (!result->IsFailure()) {
    233     // Maps have their own alignment.
    234     CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
    235           static_cast<intptr_t>(kHeapObjectTag));
    236   }
    237 #endif
    238   return result;
    239 }
    240 
    241 
    242 MaybeObject* Heap::AllocateRawCell() {
    243 #ifdef DEBUG
    244   isolate_->counters()->objs_since_last_full()->Increment();
    245   isolate_->counters()->objs_since_last_young()->Increment();
    246 #endif
    247   MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
    248   if (result->IsFailure()) old_gen_exhausted_ = true;
    249   return result;
    250 }
    251 
    252 
    253 bool Heap::InNewSpace(Object* object) {
    254   bool result = new_space_.Contains(object);
    255   ASSERT(!result ||                  // Either not in new space
    256          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
    257          InToSpace(object));         // ... or in to-space (where we allocate).
    258   return result;
    259 }
    260 
    261 
    262 bool Heap::InFromSpace(Object* object) {
    263   return new_space_.FromSpaceContains(object);
    264 }
    265 
    266 
    267 bool Heap::InToSpace(Object* object) {
    268   return new_space_.ToSpaceContains(object);
    269 }
    270 
    271 
    272 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
    273   // An object should be promoted if:
    274   // - the object has survived a scavenge operation or
    275   // - to space is already 25% full.
    276   return old_address < new_space_.age_mark()
    277       || (new_space_.Size() + object_size) >= (new_space_.Capacity() >> 2);
    278 }
    279 
    280 
    281 void Heap::RecordWrite(Address address, int offset) {
    282   if (new_space_.Contains(address)) return;
    283   ASSERT(!new_space_.FromSpaceContains(address));
    284   SLOW_ASSERT(Contains(address + offset));
    285   Page::FromAddress(address)->MarkRegionDirty(address + offset);
    286 }
    287 
    288 
    289 void Heap::RecordWrites(Address address, int start, int len) {
    290   if (new_space_.Contains(address)) return;
    291   ASSERT(!new_space_.FromSpaceContains(address));
    292   Page* page = Page::FromAddress(address);
    293   page->SetRegionMarks(page->GetRegionMarks() |
    294       page->GetRegionMaskForSpan(address + start, len * kPointerSize));
    295 }
    296 
    297 
    298 OldSpace* Heap::TargetSpace(HeapObject* object) {
    299   InstanceType type = object->map()->instance_type();
    300   AllocationSpace space = TargetSpaceId(type);
    301   return (space == OLD_POINTER_SPACE)
    302       ? old_pointer_space_
    303       : old_data_space_;
    304 }
    305 
    306 
    307 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
    308   // Heap numbers and sequential strings are promoted to old data space, all
    309   // other object types are promoted to old pointer space.  We do not use
    310   // object->IsHeapNumber() and object->IsSeqString() because we already
    311   // know that object has the heap object tag.
    312 
    313   // These objects are never allocated in new space.
    314   ASSERT(type != MAP_TYPE);
    315   ASSERT(type != CODE_TYPE);
    316   ASSERT(type != ODDBALL_TYPE);
    317   ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
    318 
    319   if (type < FIRST_NONSTRING_TYPE) {
    320     // There are three string representations: sequential strings, cons
    321     // strings, and external strings.  Only cons strings contain
    322     // non-map-word pointers to heap objects.
    323     return ((type & kStringRepresentationMask) == kConsStringTag)
    324         ? OLD_POINTER_SPACE
    325         : OLD_DATA_SPACE;
    326   } else {
    327     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
    328   }
    329 }
    330 
    331 
    332 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
    333   ASSERT(IsAligned(byte_size, kPointerSize));
    334   CopyWords(reinterpret_cast<Object**>(dst),
    335             reinterpret_cast<Object**>(src),
    336             byte_size / kPointerSize);
    337 }
    338 
    339 
    340 void Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
    341                                                    Address src,
    342                                                    int byte_size) {
    343   ASSERT(IsAligned(byte_size, kPointerSize));
    344 
    345   Page* page = Page::FromAddress(dst);
    346   uint32_t marks = page->GetRegionMarks();
    347 
    348   for (int remaining = byte_size / kPointerSize;
    349        remaining > 0;
    350        remaining--) {
    351     Memory::Object_at(dst) = Memory::Object_at(src);
    352 
    353     if (InNewSpace(Memory::Object_at(dst))) {
    354       marks |= page->GetRegionMaskForAddress(dst);
    355     }
    356 
    357     dst += kPointerSize;
    358     src += kPointerSize;
    359   }
    360 
    361   page->SetRegionMarks(marks);
    362 }
    363 
    364 
    365 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
    366   ASSERT(IsAligned(byte_size, kPointerSize));
    367 
    368   int size_in_words = byte_size / kPointerSize;
    369 
    370   if ((dst < src) || (dst >= (src + byte_size))) {
    371     Object** src_slot = reinterpret_cast<Object**>(src);
    372     Object** dst_slot = reinterpret_cast<Object**>(dst);
    373     Object** end_slot = src_slot + size_in_words;
    374 
    375     while (src_slot != end_slot) {
    376       *dst_slot++ = *src_slot++;
    377     }
    378   } else {
    379     memmove(dst, src, byte_size);
    380   }
    381 }
    382 
    383 
    384 void Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
    385                                                    Address src,
    386                                                    int byte_size) {
    387   ASSERT(IsAligned(byte_size, kPointerSize));
    388   ASSERT((dst < src) || (dst >= (src + byte_size)));
    389 
    390   CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size);
    391 }
    392 
    393 
    394 void Heap::ScavengePointer(HeapObject** p) {
    395   ScavengeObject(p, *p);
    396 }
    397 
    398 
    399 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
    400   ASSERT(HEAP->InFromSpace(object));
    401 
    402   // We use the first word (where the map pointer usually is) of a heap
    403   // object to record the forwarding pointer.  A forwarding pointer can
    404   // point to an old space, the code space, or the to space of the new
    405   // generation.
    406   MapWord first_word = object->map_word();
    407 
    408   // If the first word is a forwarding address, the object has already been
    409   // copied.
    410   if (first_word.IsForwardingAddress()) {
    411     *p = first_word.ToForwardingAddress();
    412     return;
    413   }
    414 
    415   // Call the slow part of scavenge object.
    416   return ScavengeObjectSlow(p, object);
    417 }
    418 
    419 
    420 bool Heap::CollectGarbage(AllocationSpace space) {
    421   return CollectGarbage(space, SelectGarbageCollector(space));
    422 }
    423 
    424 
    425 MaybeObject* Heap::PrepareForCompare(String* str) {
    426   // Always flatten small strings and force flattening of long strings
    427   // after we have accumulated a certain amount we failed to flatten.
    428   static const int kMaxAlwaysFlattenLength = 32;
    429   static const int kFlattenLongThreshold = 16*KB;
    430 
    431   const int length = str->length();
    432   MaybeObject* obj = str->TryFlatten();
    433   if (length <= kMaxAlwaysFlattenLength ||
    434       unflattened_strings_length_ >= kFlattenLongThreshold) {
    435     return obj;
    436   }
    437   if (obj->IsFailure()) {
    438     unflattened_strings_length_ += length;
    439   }
    440   return str;
    441 }
    442 
    443 
    444 int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
    445   ASSERT(HasBeenSetup());
    446   int amount = amount_of_external_allocated_memory_ + change_in_bytes;
    447   if (change_in_bytes >= 0) {
    448     // Avoid overflow.
    449     if (amount > amount_of_external_allocated_memory_) {
    450       amount_of_external_allocated_memory_ = amount;
    451     }
    452     int amount_since_last_global_gc =
    453         amount_of_external_allocated_memory_ -
    454         amount_of_external_allocated_memory_at_last_global_gc_;
    455     if (amount_since_last_global_gc > external_allocation_limit_) {
    456       CollectAllGarbage(false);
    457     }
    458   } else {
    459     // Avoid underflow.
    460     if (amount >= 0) {
    461       amount_of_external_allocated_memory_ = amount;
    462     }
    463   }
    464   ASSERT(amount_of_external_allocated_memory_ >= 0);
    465   return amount_of_external_allocated_memory_;
    466 }
    467 
    468 
    469 void Heap::SetLastScriptId(Object* last_script_id) {
    470   roots_[kLastScriptIdRootIndex] = last_script_id;
    471 }
    472 
    473 Isolate* Heap::isolate() {
    474   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
    475       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
    476 }
    477 
    478 
    479 #ifdef DEBUG
    480 #define GC_GREEDY_CHECK() \
    481   if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
    482 #else
    483 #define GC_GREEDY_CHECK() { }
    484 #endif
    485 
    486 
    487 // Calls the FUNCTION_CALL function and retries it up to three times
    488 // to guarantee that any allocations performed during the call will
    489 // succeed if there's enough memory.
    490 
    491 // Warning: Do not use the identifiers __object__, __maybe_object__ or
    492 // __scope__ in a call to this macro.
    493 
    494 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
    495   do {                                                                    \
    496     GC_GREEDY_CHECK();                                                    \
    497     MaybeObject* __maybe_object__ = FUNCTION_CALL;                        \
    498     Object* __object__ = NULL;                                            \
    499     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    500     if (__maybe_object__->IsOutOfMemory()) {                              \
    501       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
    502     }                                                                     \
    503     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
    504     ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)->     \
    505                                     allocation_space());                  \
    506     __maybe_object__ = FUNCTION_CALL;                                     \
    507     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    508     if (__maybe_object__->IsOutOfMemory()) {                              \
    509       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
    510     }                                                                     \
    511     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
    512     ISOLATE->counters()->gc_last_resort_from_handles()->Increment();      \
    513     ISOLATE->heap()->CollectAllAvailableGarbage();                        \
    514     {                                                                     \
    515       AlwaysAllocateScope __scope__;                                      \
    516       __maybe_object__ = FUNCTION_CALL;                                   \
    517     }                                                                     \
    518     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
    519     if (__maybe_object__->IsOutOfMemory() ||                              \
    520         __maybe_object__->IsRetryAfterGC()) {                             \
    521       /* TODO(1181417): Fix this. */                                      \
    522       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
    523     }                                                                     \
    524     RETURN_EMPTY;                                                         \
    525   } while (false)
    526 
    527 
    528 // TODO(isolates): cache isolate: either accept as a parameter or
    529 //                 set to some known symbol (__CUR_ISOLATE__?)
    530 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)       \
    531   CALL_AND_RETRY(ISOLATE,                                      \
    532                  FUNCTION_CALL,                                \
    533                  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE),  \
    534                  return Handle<TYPE>())
    535 
    536 
    537 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
    538   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
    539 
    540 
    541 #ifdef DEBUG
    542 
    543 inline bool Heap::allow_allocation(bool new_state) {
    544   bool old = allocation_allowed_;
    545   allocation_allowed_ = new_state;
    546   return old;
    547 }
    548 
    549 #endif
    550 
    551 
    552 void ExternalStringTable::AddString(String* string) {
    553   ASSERT(string->IsExternalString());
    554   if (heap_->InNewSpace(string)) {
    555     new_space_strings_.Add(string);
    556   } else {
    557     old_space_strings_.Add(string);
    558   }
    559 }
    560 
    561 
    562 void ExternalStringTable::Iterate(ObjectVisitor* v) {
    563   if (!new_space_strings_.is_empty()) {
    564     Object** start = &new_space_strings_[0];
    565     v->VisitPointers(start, start + new_space_strings_.length());
    566   }
    567   if (!old_space_strings_.is_empty()) {
    568     Object** start = &old_space_strings_[0];
    569     v->VisitPointers(start, start + old_space_strings_.length());
    570   }
    571 }
    572 
    573 
    574 // Verify() is inline to avoid ifdef-s around its calls in release
    575 // mode.
    576 void ExternalStringTable::Verify() {
    577 #ifdef DEBUG
    578   for (int i = 0; i < new_space_strings_.length(); ++i) {
    579     ASSERT(heap_->InNewSpace(new_space_strings_[i]));
    580     ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_null_value());
    581   }
    582   for (int i = 0; i < old_space_strings_.length(); ++i) {
    583     ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
    584     ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_null_value());
    585   }
    586 #endif
    587 }
    588 
    589 
    590 void ExternalStringTable::AddOldString(String* string) {
    591   ASSERT(string->IsExternalString());
    592   ASSERT(!heap_->InNewSpace(string));
    593   old_space_strings_.Add(string);
    594 }
    595 
    596 
    597 void ExternalStringTable::ShrinkNewStrings(int position) {
    598   new_space_strings_.Rewind(position);
    599   Verify();
    600 }
    601 
    602 
    603 void Heap::ClearInstanceofCache() {
    604   set_instanceof_cache_function(the_hole_value());
    605 }
    606 
    607 
    608 Object* Heap::ToBoolean(bool condition) {
    609   return condition ? true_value() : false_value();
    610 }
    611 
    612 
    613 void Heap::CompletelyClearInstanceofCache() {
    614   set_instanceof_cache_map(the_hole_value());
    615   set_instanceof_cache_function(the_hole_value());
    616 }
    617 
    618 
    619 MaybeObject* TranscendentalCache::Get(Type type, double input) {
    620   SubCache* cache = caches_[type];
    621   if (cache == NULL) {
    622     caches_[type] = cache = new SubCache(type);
    623   }
    624   return cache->Get(input);
    625 }
    626 
    627 
    628 Address TranscendentalCache::cache_array_address() {
    629   return reinterpret_cast<Address>(caches_);
    630 }
    631 
    632 
    633 double TranscendentalCache::SubCache::Calculate(double input) {
    634   switch (type_) {
    635     case ACOS:
    636       return acos(input);
    637     case ASIN:
    638       return asin(input);
    639     case ATAN:
    640       return atan(input);
    641     case COS:
    642       return cos(input);
    643     case EXP:
    644       return exp(input);
    645     case LOG:
    646       return log(input);
    647     case SIN:
    648       return sin(input);
    649     case TAN:
    650       return tan(input);
    651     default:
    652       return 0.0;  // Never happens.
    653   }
    654 }
    655 
    656 
    657 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
    658   Converter c;
    659   c.dbl = input;
    660   int hash = Hash(c);
    661   Element e = elements_[hash];
    662   if (e.in[0] == c.integers[0] &&
    663       e.in[1] == c.integers[1]) {
    664     ASSERT(e.output != NULL);
    665     isolate_->counters()->transcendental_cache_hit()->Increment();
    666     return e.output;
    667   }
    668   double answer = Calculate(input);
    669   isolate_->counters()->transcendental_cache_miss()->Increment();
    670   Object* heap_number;
    671   { MaybeObject* maybe_heap_number =
    672         isolate_->heap()->AllocateHeapNumber(answer);
    673     if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
    674   }
    675   elements_[hash].in[0] = c.integers[0];
    676   elements_[hash].in[1] = c.integers[1];
    677   elements_[hash].output = heap_number;
    678   return heap_number;
    679 }
    680 
    681 
    682 Heap* _inline_get_heap_() {
    683   return HEAP;
    684 }
    685 
    686 
    687 void MarkCompactCollector::SetMark(HeapObject* obj) {
    688   tracer_->increment_marked_count();
    689 #ifdef DEBUG
    690   UpdateLiveObjectCount(obj);
    691 #endif
    692   obj->SetMark();
    693 }
    694 
    695 
    696 } }  // namespace v8::internal
    697 
    698 #endif  // V8_HEAP_INL_H_
    699