1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_HEAP_INL_H_ 29 #define V8_HEAP_INL_H_ 30 31 #include "heap.h" 32 #include "isolate.h" 33 #include "list-inl.h" 34 #include "objects.h" 35 #include "platform.h" 36 #include "v8-counters.h" 37 #include "store-buffer.h" 38 #include "store-buffer-inl.h" 39 40 namespace v8 { 41 namespace internal { 42 43 void PromotionQueue::insert(HeapObject* target, int size) { 44 if (emergency_stack_ != NULL) { 45 emergency_stack_->Add(Entry(target, size)); 46 return; 47 } 48 49 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) { 50 NewSpacePage* rear_page = 51 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_)); 52 ASSERT(!rear_page->prev_page()->is_anchor()); 53 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end()); 54 ActivateGuardIfOnTheSamePage(); 55 } 56 57 if (guard_) { 58 ASSERT(GetHeadPage() == 59 Page::FromAllocationTop(reinterpret_cast<Address>(limit_))); 60 61 if ((rear_ - 2) < limit_) { 62 RelocateQueueHead(); 63 emergency_stack_->Add(Entry(target, size)); 64 return; 65 } 66 } 67 68 *(--rear_) = reinterpret_cast<intptr_t>(target); 69 *(--rear_) = size; 70 // Assert no overflow into live objects. 71 #ifdef DEBUG 72 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), 73 reinterpret_cast<Address>(rear_)); 74 #endif 75 } 76 77 78 void PromotionQueue::ActivateGuardIfOnTheSamePage() { 79 guard_ = guard_ || 80 heap_->new_space()->active_space()->current_page()->address() == 81 GetHeadPage()->address(); 82 } 83 84 85 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str, 86 PretenureFlag pretenure) { 87 // Check for ASCII first since this is the common case. 88 const char* start = str.start(); 89 int length = str.length(); 90 int non_ascii_start = String::NonAsciiStart(start, length); 91 if (non_ascii_start >= length) { 92 // If the string is ASCII, we do not need to convert the characters 93 // since UTF8 is backwards compatible with ASCII. 94 return AllocateStringFromOneByte(str, pretenure); 95 } 96 // Non-ASCII and we need to decode. 97 return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure); 98 } 99 100 101 template<> 102 bool inline Heap::IsOneByte(Vector<const char> str, int chars) { 103 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? 104 // ASCII only check. 105 return chars == str.length(); 106 } 107 108 109 template<> 110 bool inline Heap::IsOneByte(String* str, int chars) { 111 return str->IsOneByteRepresentation(); 112 } 113 114 115 MaybeObject* Heap::AllocateInternalizedStringFromUtf8( 116 Vector<const char> str, int chars, uint32_t hash_field) { 117 if (IsOneByte(str, chars)) { 118 return AllocateOneByteInternalizedString( 119 Vector<const uint8_t>::cast(str), hash_field); 120 } 121 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); 122 } 123 124 125 template<typename T> 126 MaybeObject* Heap::AllocateInternalizedStringImpl( 127 T t, int chars, uint32_t hash_field) { 128 if (IsOneByte(t, chars)) { 129 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); 130 } 131 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); 132 } 133 134 135 MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str, 136 uint32_t hash_field) { 137 if (str.length() > SeqOneByteString::kMaxLength) { 138 return Failure::OutOfMemoryException(0x2); 139 } 140 // Compute map and object size. 141 Map* map = ascii_internalized_string_map(); 142 int size = SeqOneByteString::SizeFor(str.length()); 143 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); 144 145 // Allocate string. 146 Object* result; 147 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 148 if (!maybe_result->ToObject(&result)) return maybe_result; 149 } 150 151 // String maps are all immortal immovable objects. 152 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); 153 // Set length and hash fields of the allocated string. 154 String* answer = String::cast(result); 155 answer->set_length(str.length()); 156 answer->set_hash_field(hash_field); 157 158 ASSERT_EQ(size, answer->Size()); 159 160 // Fill in the characters. 161 OS::MemCopy(answer->address() + SeqOneByteString::kHeaderSize, 162 str.start(), str.length()); 163 164 return answer; 165 } 166 167 168 MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, 169 uint32_t hash_field) { 170 if (str.length() > SeqTwoByteString::kMaxLength) { 171 return Failure::OutOfMemoryException(0x3); 172 } 173 // Compute map and object size. 174 Map* map = internalized_string_map(); 175 int size = SeqTwoByteString::SizeFor(str.length()); 176 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); 177 178 // Allocate string. 179 Object* result; 180 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 181 if (!maybe_result->ToObject(&result)) return maybe_result; 182 } 183 184 reinterpret_cast<HeapObject*>(result)->set_map(map); 185 // Set length and hash fields of the allocated string. 186 String* answer = String::cast(result); 187 answer->set_length(str.length()); 188 answer->set_hash_field(hash_field); 189 190 ASSERT_EQ(size, answer->Size()); 191 192 // Fill in the characters. 193 OS::MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, 194 str.start(), str.length() * kUC16Size); 195 196 return answer; 197 } 198 199 MaybeObject* Heap::CopyFixedArray(FixedArray* src) { 200 return CopyFixedArrayWithMap(src, src->map()); 201 } 202 203 204 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { 205 return CopyFixedDoubleArrayWithMap(src, src->map()); 206 } 207 208 209 MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) { 210 return CopyConstantPoolArrayWithMap(src, src->map()); 211 } 212 213 214 MaybeObject* Heap::AllocateRaw(int size_in_bytes, 215 AllocationSpace space, 216 AllocationSpace retry_space) { 217 ASSERT(AllowHandleAllocation::IsAllowed()); 218 ASSERT(AllowHeapAllocation::IsAllowed()); 219 ASSERT(gc_state_ == NOT_IN_GC); 220 HeapProfiler* profiler = isolate_->heap_profiler(); 221 #ifdef DEBUG 222 if (FLAG_gc_interval >= 0 && 223 !disallow_allocation_failure_ && 224 Heap::allocation_timeout_-- <= 0) { 225 return Failure::RetryAfterGC(space); 226 } 227 isolate_->counters()->objs_since_last_full()->Increment(); 228 isolate_->counters()->objs_since_last_young()->Increment(); 229 #endif 230 231 HeapObject* object; 232 MaybeObject* result; 233 if (NEW_SPACE == space) { 234 result = new_space_.AllocateRaw(size_in_bytes); 235 if (always_allocate() && result->IsFailure() && retry_space != NEW_SPACE) { 236 space = retry_space; 237 } else { 238 if (profiler->is_tracking_allocations() && result->To(&object)) { 239 profiler->AllocationEvent(object->address(), size_in_bytes); 240 } 241 return result; 242 } 243 } 244 245 if (OLD_POINTER_SPACE == space) { 246 result = old_pointer_space_->AllocateRaw(size_in_bytes); 247 } else if (OLD_DATA_SPACE == space) { 248 result = old_data_space_->AllocateRaw(size_in_bytes); 249 } else if (CODE_SPACE == space) { 250 result = code_space_->AllocateRaw(size_in_bytes); 251 } else if (LO_SPACE == space) { 252 result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); 253 } else if (CELL_SPACE == space) { 254 result = cell_space_->AllocateRaw(size_in_bytes); 255 } else if (PROPERTY_CELL_SPACE == space) { 256 result = property_cell_space_->AllocateRaw(size_in_bytes); 257 } else { 258 ASSERT(MAP_SPACE == space); 259 result = map_space_->AllocateRaw(size_in_bytes); 260 } 261 if (result->IsFailure()) old_gen_exhausted_ = true; 262 if (profiler->is_tracking_allocations() && result->To(&object)) { 263 profiler->AllocationEvent(object->address(), size_in_bytes); 264 } 265 return result; 266 } 267 268 269 MaybeObject* Heap::NumberFromInt32( 270 int32_t value, PretenureFlag pretenure) { 271 if (Smi::IsValid(value)) return Smi::FromInt(value); 272 // Bypass NumberFromDouble to avoid various redundant checks. 273 return AllocateHeapNumber(FastI2D(value), pretenure); 274 } 275 276 277 MaybeObject* Heap::NumberFromUint32( 278 uint32_t value, PretenureFlag pretenure) { 279 if (static_cast<int32_t>(value) >= 0 && 280 Smi::IsValid(static_cast<int32_t>(value))) { 281 return Smi::FromInt(static_cast<int32_t>(value)); 282 } 283 // Bypass NumberFromDouble to avoid various redundant checks. 284 return AllocateHeapNumber(FastUI2D(value), pretenure); 285 } 286 287 288 void Heap::FinalizeExternalString(String* string) { 289 ASSERT(string->IsExternalString()); 290 v8::String::ExternalStringResourceBase** resource_addr = 291 reinterpret_cast<v8::String::ExternalStringResourceBase**>( 292 reinterpret_cast<byte*>(string) + 293 ExternalString::kResourceOffset - 294 kHeapObjectTag); 295 296 // Dispose of the C++ object if it has not already been disposed. 297 if (*resource_addr != NULL) { 298 (*resource_addr)->Dispose(); 299 *resource_addr = NULL; 300 } 301 } 302 303 304 bool Heap::InNewSpace(Object* object) { 305 bool result = new_space_.Contains(object); 306 ASSERT(!result || // Either not in new space 307 gc_state_ != NOT_IN_GC || // ... or in the middle of GC 308 InToSpace(object)); // ... or in to-space (where we allocate). 309 return result; 310 } 311 312 313 bool Heap::InNewSpace(Address address) { 314 return new_space_.Contains(address); 315 } 316 317 318 bool Heap::InFromSpace(Object* object) { 319 return new_space_.FromSpaceContains(object); 320 } 321 322 323 bool Heap::InToSpace(Object* object) { 324 return new_space_.ToSpaceContains(object); 325 } 326 327 328 bool Heap::InOldPointerSpace(Address address) { 329 return old_pointer_space_->Contains(address); 330 } 331 332 333 bool Heap::InOldPointerSpace(Object* object) { 334 return InOldPointerSpace(reinterpret_cast<Address>(object)); 335 } 336 337 338 bool Heap::InOldDataSpace(Address address) { 339 return old_data_space_->Contains(address); 340 } 341 342 343 bool Heap::InOldDataSpace(Object* object) { 344 return InOldDataSpace(reinterpret_cast<Address>(object)); 345 } 346 347 348 bool Heap::OldGenerationAllocationLimitReached() { 349 if (!incremental_marking()->IsStopped()) return false; 350 return OldGenerationSpaceAvailable() < 0; 351 } 352 353 354 bool Heap::ShouldBePromoted(Address old_address, int object_size) { 355 // An object should be promoted if: 356 // - the object has survived a scavenge operation or 357 // - to space is already 25% full. 358 NewSpacePage* page = NewSpacePage::FromAddress(old_address); 359 Address age_mark = new_space_.age_mark(); 360 bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && 361 (!page->ContainsLimit(age_mark) || old_address < age_mark); 362 return below_mark || (new_space_.Size() + object_size) >= 363 (new_space_.EffectiveCapacity() >> 2); 364 } 365 366 367 void Heap::RecordWrite(Address address, int offset) { 368 if (!InNewSpace(address)) store_buffer_.Mark(address + offset); 369 } 370 371 372 void Heap::RecordWrites(Address address, int start, int len) { 373 if (!InNewSpace(address)) { 374 for (int i = 0; i < len; i++) { 375 store_buffer_.Mark(address + start + i * kPointerSize); 376 } 377 } 378 } 379 380 381 OldSpace* Heap::TargetSpace(HeapObject* object) { 382 InstanceType type = object->map()->instance_type(); 383 AllocationSpace space = TargetSpaceId(type); 384 return (space == OLD_POINTER_SPACE) 385 ? old_pointer_space_ 386 : old_data_space_; 387 } 388 389 390 AllocationSpace Heap::TargetSpaceId(InstanceType type) { 391 // Heap numbers and sequential strings are promoted to old data space, all 392 // other object types are promoted to old pointer space. We do not use 393 // object->IsHeapNumber() and object->IsSeqString() because we already 394 // know that object has the heap object tag. 395 396 // These objects are never allocated in new space. 397 ASSERT(type != MAP_TYPE); 398 ASSERT(type != CODE_TYPE); 399 ASSERT(type != ODDBALL_TYPE); 400 ASSERT(type != CELL_TYPE); 401 ASSERT(type != PROPERTY_CELL_TYPE); 402 403 if (type <= LAST_NAME_TYPE) { 404 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE; 405 ASSERT(type < FIRST_NONSTRING_TYPE); 406 // There are four string representations: sequential strings, external 407 // strings, cons strings, and sliced strings. 408 // Only the latter two contain non-map-word pointers to heap objects. 409 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag) 410 ? OLD_POINTER_SPACE 411 : OLD_DATA_SPACE; 412 } else { 413 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; 414 } 415 } 416 417 418 bool Heap::AllowedToBeMigrated(HeapObject* object, AllocationSpace dst) { 419 // Object migration is governed by the following rules: 420 // 421 // 1) Objects in new-space can be migrated to one of the old spaces 422 // that matches their target space or they stay in new-space. 423 // 2) Objects in old-space stay in the same space when migrating. 424 // 3) Fillers (two or more words) can migrate due to left-trimming of 425 // fixed arrays in new-space, old-data-space and old-pointer-space. 426 // 4) Fillers (one word) can never migrate, they are skipped by 427 // incremental marking explicitly to prevent invalid pattern. 428 // 429 // Since this function is used for debugging only, we do not place 430 // asserts here, but check everything explicitly. 431 if (object->map() == one_pointer_filler_map()) return false; 432 InstanceType type = object->map()->instance_type(); 433 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); 434 AllocationSpace src = chunk->owner()->identity(); 435 switch (src) { 436 case NEW_SPACE: 437 return dst == src || dst == TargetSpaceId(type); 438 case OLD_POINTER_SPACE: 439 return dst == src && (dst == TargetSpaceId(type) || object->IsFiller()); 440 case OLD_DATA_SPACE: 441 return dst == src && dst == TargetSpaceId(type); 442 case CODE_SPACE: 443 return dst == src && type == CODE_TYPE; 444 case MAP_SPACE: 445 case CELL_SPACE: 446 case PROPERTY_CELL_SPACE: 447 case LO_SPACE: 448 return false; 449 } 450 UNREACHABLE(); 451 return false; 452 } 453 454 455 void Heap::CopyBlock(Address dst, Address src, int byte_size) { 456 CopyWords(reinterpret_cast<Object**>(dst), 457 reinterpret_cast<Object**>(src), 458 static_cast<size_t>(byte_size / kPointerSize)); 459 } 460 461 462 void Heap::MoveBlock(Address dst, Address src, int byte_size) { 463 ASSERT(IsAligned(byte_size, kPointerSize)); 464 465 int size_in_words = byte_size / kPointerSize; 466 467 if ((dst < src) || (dst >= (src + byte_size))) { 468 Object** src_slot = reinterpret_cast<Object**>(src); 469 Object** dst_slot = reinterpret_cast<Object**>(dst); 470 Object** end_slot = src_slot + size_in_words; 471 472 while (src_slot != end_slot) { 473 *dst_slot++ = *src_slot++; 474 } 475 } else { 476 OS::MemMove(dst, src, static_cast<size_t>(byte_size)); 477 } 478 } 479 480 481 void Heap::ScavengePointer(HeapObject** p) { 482 ScavengeObject(p, *p); 483 } 484 485 486 void Heap::UpdateAllocationSiteFeedback(HeapObject* object) { 487 if (FLAG_allocation_site_pretenuring && object->IsJSObject()) { 488 AllocationMemento* memento = AllocationMemento::FindForJSObject( 489 JSObject::cast(object), true); 490 if (memento != NULL) { 491 ASSERT(memento->IsValid()); 492 memento->GetAllocationSite()->IncrementMementoFoundCount(); 493 } 494 } 495 } 496 497 498 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { 499 ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); 500 501 // We use the first word (where the map pointer usually is) of a heap 502 // object to record the forwarding pointer. A forwarding pointer can 503 // point to an old space, the code space, or the to space of the new 504 // generation. 505 MapWord first_word = object->map_word(); 506 507 // If the first word is a forwarding address, the object has already been 508 // copied. 509 if (first_word.IsForwardingAddress()) { 510 HeapObject* dest = first_word.ToForwardingAddress(); 511 ASSERT(object->GetIsolate()->heap()->InFromSpace(*p)); 512 *p = dest; 513 return; 514 } 515 516 UpdateAllocationSiteFeedback(object); 517 518 // AllocationMementos are unrooted and shouldn't survive a scavenge 519 ASSERT(object->map() != object->GetHeap()->allocation_memento_map()); 520 // Call the slow part of scavenge object. 521 return ScavengeObjectSlow(p, object); 522 } 523 524 525 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) { 526 const char* collector_reason = NULL; 527 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason); 528 return CollectGarbage(space, collector, gc_reason, collector_reason); 529 } 530 531 532 MaybeObject* Heap::PrepareForCompare(String* str) { 533 // Always flatten small strings and force flattening of long strings 534 // after we have accumulated a certain amount we failed to flatten. 535 static const int kMaxAlwaysFlattenLength = 32; 536 static const int kFlattenLongThreshold = 16*KB; 537 538 const int length = str->length(); 539 MaybeObject* obj = str->TryFlatten(); 540 if (length <= kMaxAlwaysFlattenLength || 541 unflattened_strings_length_ >= kFlattenLongThreshold) { 542 return obj; 543 } 544 if (obj->IsFailure()) { 545 unflattened_strings_length_ += length; 546 } 547 return str; 548 } 549 550 551 int64_t Heap::AdjustAmountOfExternalAllocatedMemory( 552 int64_t change_in_bytes) { 553 ASSERT(HasBeenSetUp()); 554 int64_t amount = amount_of_external_allocated_memory_ + change_in_bytes; 555 if (change_in_bytes > 0) { 556 // Avoid overflow. 557 if (amount > amount_of_external_allocated_memory_) { 558 amount_of_external_allocated_memory_ = amount; 559 } else { 560 // Give up and reset the counters in case of an overflow. 561 amount_of_external_allocated_memory_ = 0; 562 amount_of_external_allocated_memory_at_last_global_gc_ = 0; 563 } 564 int64_t amount_since_last_global_gc = PromotedExternalMemorySize(); 565 if (amount_since_last_global_gc > external_allocation_limit_) { 566 CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached"); 567 } 568 } else { 569 // Avoid underflow. 570 if (amount >= 0) { 571 amount_of_external_allocated_memory_ = amount; 572 } else { 573 // Give up and reset the counters in case of an underflow. 574 amount_of_external_allocated_memory_ = 0; 575 amount_of_external_allocated_memory_at_last_global_gc_ = 0; 576 } 577 } 578 if (FLAG_trace_external_memory) { 579 PrintPID("%8.0f ms: ", isolate()->time_millis_since_init()); 580 PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, " 581 "amount=%6" V8_PTR_PREFIX "d KB, since_gc=%6" V8_PTR_PREFIX "d KB, " 582 "isolate=0x%08" V8PRIxPTR ".\n", 583 static_cast<intptr_t>(change_in_bytes / KB), 584 static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB), 585 static_cast<intptr_t>(PromotedExternalMemorySize() / KB), 586 reinterpret_cast<intptr_t>(isolate())); 587 } 588 ASSERT(amount_of_external_allocated_memory_ >= 0); 589 return amount_of_external_allocated_memory_; 590 } 591 592 593 Isolate* Heap::isolate() { 594 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) - 595 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4); 596 } 597 598 599 #ifdef DEBUG 600 #define GC_GREEDY_CHECK(ISOLATE) \ 601 if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck() 602 #else 603 #define GC_GREEDY_CHECK(ISOLATE) { } 604 #endif 605 606 // Calls the FUNCTION_CALL function and retries it up to three times 607 // to guarantee that any allocations performed during the call will 608 // succeed if there's enough memory. 609 610 // Warning: Do not use the identifiers __object__, __maybe_object__ or 611 // __scope__ in a call to this macro. 612 613 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ 614 do { \ 615 GC_GREEDY_CHECK(ISOLATE); \ 616 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ 617 Object* __object__ = NULL; \ 618 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 619 if (__maybe_object__->IsOutOfMemory()) { \ 620 OOM; \ 621 } \ 622 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ 623 (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ 624 allocation_space(), \ 625 "allocation failure"); \ 626 __maybe_object__ = FUNCTION_CALL; \ 627 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 628 if (__maybe_object__->IsOutOfMemory()) { \ 629 OOM; \ 630 } \ 631 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ 632 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ 633 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ 634 { \ 635 AlwaysAllocateScope __scope__; \ 636 __maybe_object__ = FUNCTION_CALL; \ 637 } \ 638 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ 639 if (__maybe_object__->IsOutOfMemory()) { \ 640 OOM; \ 641 } \ 642 if (__maybe_object__->IsRetryAfterGC()) { \ 643 /* TODO(1181417): Fix this. */ \ 644 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ 645 } \ 646 RETURN_EMPTY; \ 647 } while (false) 648 649 #define CALL_AND_RETRY_OR_DIE( \ 650 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 651 CALL_AND_RETRY( \ 652 ISOLATE, \ 653 FUNCTION_CALL, \ 654 RETURN_VALUE, \ 655 RETURN_EMPTY, \ 656 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY", true)) 657 658 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ 659 CALL_AND_RETRY_OR_DIE(ISOLATE, \ 660 FUNCTION_CALL, \ 661 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ 662 return Handle<TYPE>()) \ 663 664 665 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ 666 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) 667 668 669 #define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \ 670 CALL_AND_RETRY(ISOLATE, \ 671 FUNCTION_CALL, \ 672 return __object__, \ 673 return __maybe_object__, \ 674 return __maybe_object__) 675 676 677 void ExternalStringTable::AddString(String* string) { 678 ASSERT(string->IsExternalString()); 679 if (heap_->InNewSpace(string)) { 680 new_space_strings_.Add(string); 681 } else { 682 old_space_strings_.Add(string); 683 } 684 } 685 686 687 void ExternalStringTable::Iterate(ObjectVisitor* v) { 688 if (!new_space_strings_.is_empty()) { 689 Object** start = &new_space_strings_[0]; 690 v->VisitPointers(start, start + new_space_strings_.length()); 691 } 692 if (!old_space_strings_.is_empty()) { 693 Object** start = &old_space_strings_[0]; 694 v->VisitPointers(start, start + old_space_strings_.length()); 695 } 696 } 697 698 699 // Verify() is inline to avoid ifdef-s around its calls in release 700 // mode. 701 void ExternalStringTable::Verify() { 702 #ifdef DEBUG 703 for (int i = 0; i < new_space_strings_.length(); ++i) { 704 Object* obj = Object::cast(new_space_strings_[i]); 705 ASSERT(heap_->InNewSpace(obj)); 706 ASSERT(obj != heap_->the_hole_value()); 707 } 708 for (int i = 0; i < old_space_strings_.length(); ++i) { 709 Object* obj = Object::cast(old_space_strings_[i]); 710 ASSERT(!heap_->InNewSpace(obj)); 711 ASSERT(obj != heap_->the_hole_value()); 712 } 713 #endif 714 } 715 716 717 void ExternalStringTable::AddOldString(String* string) { 718 ASSERT(string->IsExternalString()); 719 ASSERT(!heap_->InNewSpace(string)); 720 old_space_strings_.Add(string); 721 } 722 723 724 void ExternalStringTable::ShrinkNewStrings(int position) { 725 new_space_strings_.Rewind(position); 726 #ifdef VERIFY_HEAP 727 if (FLAG_verify_heap) { 728 Verify(); 729 } 730 #endif 731 } 732 733 734 void Heap::ClearInstanceofCache() { 735 set_instanceof_cache_function(the_hole_value()); 736 } 737 738 739 Object* Heap::ToBoolean(bool condition) { 740 return condition ? true_value() : false_value(); 741 } 742 743 744 void Heap::CompletelyClearInstanceofCache() { 745 set_instanceof_cache_map(the_hole_value()); 746 set_instanceof_cache_function(the_hole_value()); 747 } 748 749 750 MaybeObject* TranscendentalCache::Get(Type type, double input) { 751 SubCache* cache = caches_[type]; 752 if (cache == NULL) { 753 caches_[type] = cache = new SubCache(isolate_, type); 754 } 755 return cache->Get(input); 756 } 757 758 759 Address TranscendentalCache::cache_array_address() { 760 return reinterpret_cast<Address>(caches_); 761 } 762 763 764 double TranscendentalCache::SubCache::Calculate(double input) { 765 switch (type_) { 766 case ACOS: 767 return acos(input); 768 case ASIN: 769 return asin(input); 770 case ATAN: 771 return atan(input); 772 case COS: 773 return fast_cos(input); 774 case EXP: 775 return exp(input); 776 case LOG: 777 return fast_log(input); 778 case SIN: 779 return fast_sin(input); 780 case TAN: 781 return fast_tan(input); 782 default: 783 return 0.0; // Never happens. 784 } 785 } 786 787 788 MaybeObject* TranscendentalCache::SubCache::Get(double input) { 789 Converter c; 790 c.dbl = input; 791 int hash = Hash(c); 792 Element e = elements_[hash]; 793 if (e.in[0] == c.integers[0] && 794 e.in[1] == c.integers[1]) { 795 ASSERT(e.output != NULL); 796 isolate_->counters()->transcendental_cache_hit()->Increment(); 797 return e.output; 798 } 799 double answer = Calculate(input); 800 isolate_->counters()->transcendental_cache_miss()->Increment(); 801 Object* heap_number; 802 { MaybeObject* maybe_heap_number = 803 isolate_->heap()->AllocateHeapNumber(answer); 804 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number; 805 } 806 elements_[hash].in[0] = c.integers[0]; 807 elements_[hash].in[1] = c.integers[1]; 808 elements_[hash].output = heap_number; 809 return heap_number; 810 } 811 812 813 AlwaysAllocateScope::AlwaysAllocateScope() { 814 // We shouldn't hit any nested scopes, because that requires 815 // non-handle code to call handle code. The code still works but 816 // performance will degrade, so we want to catch this situation 817 // in debug mode. 818 Isolate* isolate = Isolate::Current(); 819 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); 820 isolate->heap()->always_allocate_scope_depth_++; 821 } 822 823 824 AlwaysAllocateScope::~AlwaysAllocateScope() { 825 Isolate* isolate = Isolate::Current(); 826 isolate->heap()->always_allocate_scope_depth_--; 827 ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); 828 } 829 830 831 #ifdef VERIFY_HEAP 832 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() { 833 Isolate* isolate = Isolate::Current(); 834 isolate->heap()->no_weak_object_verification_scope_depth_++; 835 } 836 837 838 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() { 839 Isolate* isolate = Isolate::Current(); 840 isolate->heap()->no_weak_object_verification_scope_depth_--; 841 } 842 #endif 843 844 845 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) { 846 for (Object** current = start; current < end; current++) { 847 if ((*current)->IsHeapObject()) { 848 HeapObject* object = HeapObject::cast(*current); 849 CHECK(object->GetIsolate()->heap()->Contains(object)); 850 CHECK(object->map()->IsMap()); 851 } 852 } 853 } 854 855 856 double GCTracer::SizeOfHeapObjects() { 857 return (static_cast<double>(heap_->SizeOfObjects())) / MB; 858 } 859 860 861 DisallowAllocationFailure::DisallowAllocationFailure() { 862 #ifdef DEBUG 863 Isolate* isolate = Isolate::Current(); 864 old_state_ = isolate->heap()->disallow_allocation_failure_; 865 isolate->heap()->disallow_allocation_failure_ = true; 866 #endif 867 } 868 869 870 DisallowAllocationFailure::~DisallowAllocationFailure() { 871 #ifdef DEBUG 872 Isolate* isolate = Isolate::Current(); 873 isolate->heap()->disallow_allocation_failure_ = old_state_; 874 #endif 875 } 876 877 878 } } // namespace v8::internal 879 880 #endif // V8_HEAP_INL_H_ 881