1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_HEAP_INL_H_ 29 #define V8_HEAP_INL_H_ 30 31 #include "log.h" 32 #include "v8-counters.h" 33 34 namespace v8 { 35 namespace internal { 36 37 int Heap::MaxObjectSizeInPagedSpace() { 38 return Page::kMaxHeapObjectSize; 39 } 40 41 42 Object* Heap::AllocateSymbol(Vector<const char> str, 43 int chars, 44 uint32_t hash_field) { 45 unibrow::Utf8InputBuffer<> buffer(str.start(), 46 static_cast<unsigned>(str.length())); 47 return AllocateInternalSymbol(&buffer, chars, hash_field); 48 } 49 50 51 Object* Heap::AllocateRaw(int size_in_bytes, 52 AllocationSpace space, 53 AllocationSpace retry_space) { 54 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 55 ASSERT(space != NEW_SPACE || 56 retry_space == OLD_POINTER_SPACE || 57 retry_space == OLD_DATA_SPACE || 58 retry_space == LO_SPACE); 59 #ifdef DEBUG 60 if (FLAG_gc_interval >= 0 && 61 !disallow_allocation_failure_ && 62 Heap::allocation_timeout_-- <= 0) { 63 return Failure::RetryAfterGC(size_in_bytes, space); 64 } 65 Counters::objs_since_last_full.Increment(); 66 Counters::objs_since_last_young.Increment(); 67 #endif 68 Object* result; 69 if (NEW_SPACE == space) { 70 result = new_space_.AllocateRaw(size_in_bytes); 71 if (always_allocate() && result->IsFailure()) { 72 space = retry_space; 73 } else { 74 return result; 75 } 76 } 77 78 if (OLD_POINTER_SPACE == space) { 79 result = old_pointer_space_->AllocateRaw(size_in_bytes); 80 } else if (OLD_DATA_SPACE == space) { 81 result = old_data_space_->AllocateRaw(size_in_bytes); 82 } else if (CODE_SPACE == space) { 83 result = code_space_->AllocateRaw(size_in_bytes); 84 } else if (LO_SPACE == space) { 85 result = lo_space_->AllocateRaw(size_in_bytes); 86 } else if (CELL_SPACE == space) { 87 result = cell_space_->AllocateRaw(size_in_bytes); 88 } else { 89 ASSERT(MAP_SPACE == space); 90 result = map_space_->AllocateRaw(size_in_bytes); 91 } 92 if (result->IsFailure()) old_gen_exhausted_ = true; 93 return result; 94 } 95 96 97 Object* Heap::NumberFromInt32(int32_t value) { 98 if (Smi::IsValid(value)) return Smi::FromInt(value); 99 // Bypass NumberFromDouble to avoid various redundant checks. 100 return AllocateHeapNumber(FastI2D(value)); 101 } 102 103 104 Object* Heap::NumberFromUint32(uint32_t value) { 105 if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) { 106 return Smi::FromInt((int32_t)value); 107 } 108 // Bypass NumberFromDouble to avoid various redundant checks. 109 return AllocateHeapNumber(FastUI2D(value)); 110 } 111 112 113 void Heap::FinalizeExternalString(String* string) { 114 ASSERT(string->IsExternalString()); 115 v8::String::ExternalStringResourceBase** resource_addr = 116 reinterpret_cast<v8::String::ExternalStringResourceBase**>( 117 reinterpret_cast<byte*>(string) + 118 ExternalString::kResourceOffset - 119 kHeapObjectTag); 120 delete *resource_addr; 121 // Clear the resource pointer in the string. 122 *resource_addr = NULL; 123 } 124 125 126 Object* Heap::AllocateRawMap() { 127 #ifdef DEBUG 128 Counters::objs_since_last_full.Increment(); 129 Counters::objs_since_last_young.Increment(); 130 #endif 131 Object* result = map_space_->AllocateRaw(Map::kSize); 132 if (result->IsFailure()) old_gen_exhausted_ = true; 133 #ifdef DEBUG 134 if (!result->IsFailure()) { 135 // Maps have their own alignment. 136 CHECK((OffsetFrom(result) & kMapAlignmentMask) == kHeapObjectTag); 137 } 138 #endif 139 return result; 140 } 141 142 143 Object* Heap::AllocateRawCell() { 144 #ifdef DEBUG 145 Counters::objs_since_last_full.Increment(); 146 Counters::objs_since_last_young.Increment(); 147 #endif 148 Object* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize); 149 if (result->IsFailure()) old_gen_exhausted_ = true; 150 return result; 151 } 152 153 154 bool Heap::InNewSpace(Object* object) { 155 bool result = new_space_.Contains(object); 156 ASSERT(!result || // Either not in new space 157 gc_state_ != NOT_IN_GC || // ... or in the middle of GC 158 InToSpace(object)); // ... or in to-space (where we allocate). 159 return result; 160 } 161 162 163 bool Heap::InFromSpace(Object* object) { 164 return new_space_.FromSpaceContains(object); 165 } 166 167 168 bool Heap::InToSpace(Object* object) { 169 return new_space_.ToSpaceContains(object); 170 } 171 172 173 bool Heap::ShouldBePromoted(Address old_address, int object_size) { 174 // An object should be promoted if: 175 // - the object has survived a scavenge operation or 176 // - to space is already 25% full. 177 return old_address < new_space_.age_mark() 178 || (new_space_.Size() + object_size) >= (new_space_.Capacity() >> 2); 179 } 180 181 182 void Heap::RecordWrite(Address address, int offset) { 183 if (new_space_.Contains(address)) return; 184 ASSERT(!new_space_.FromSpaceContains(address)); 185 SLOW_ASSERT(Contains(address + offset)); 186 Page::SetRSet(address, offset); 187 } 188 189 190 OldSpace* Heap::TargetSpace(HeapObject* object) { 191 InstanceType type = object->map()->instance_type(); 192 AllocationSpace space = TargetSpaceId(type); 193 return (space == OLD_POINTER_SPACE) 194 ? old_pointer_space_ 195 : old_data_space_; 196 } 197 198 199 AllocationSpace Heap::TargetSpaceId(InstanceType type) { 200 // Heap numbers and sequential strings are promoted to old data space, all 201 // other object types are promoted to old pointer space. We do not use 202 // object->IsHeapNumber() and object->IsSeqString() because we already 203 // know that object has the heap object tag. 204 205 // These objects are never allocated in new space. 206 ASSERT(type != MAP_TYPE); 207 ASSERT(type != CODE_TYPE); 208 ASSERT(type != ODDBALL_TYPE); 209 ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE); 210 211 if (type < FIRST_NONSTRING_TYPE) { 212 // There are three string representations: sequential strings, cons 213 // strings, and external strings. Only cons strings contain 214 // non-map-word pointers to heap objects. 215 return ((type & kStringRepresentationMask) == kConsStringTag) 216 ? OLD_POINTER_SPACE 217 : OLD_DATA_SPACE; 218 } else { 219 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE; 220 } 221 } 222 223 224 void Heap::CopyBlock(Object** dst, Object** src, int byte_size) { 225 ASSERT(IsAligned(byte_size, kPointerSize)); 226 227 // Use block copying memcpy if the segment we're copying is 228 // enough to justify the extra call/setup overhead. 229 static const int kBlockCopyLimit = 16 * kPointerSize; 230 231 if (byte_size >= kBlockCopyLimit) { 232 memcpy(dst, src, byte_size); 233 } else { 234 int remaining = byte_size / kPointerSize; 235 do { 236 remaining--; 237 *dst++ = *src++; 238 } while (remaining > 0); 239 } 240 } 241 242 243 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { 244 ASSERT(InFromSpace(object)); 245 246 // We use the first word (where the map pointer usually is) of a heap 247 // object to record the forwarding pointer. A forwarding pointer can 248 // point to an old space, the code space, or the to space of the new 249 // generation. 250 MapWord first_word = object->map_word(); 251 252 // If the first word is a forwarding address, the object has already been 253 // copied. 254 if (first_word.IsForwardingAddress()) { 255 *p = first_word.ToForwardingAddress(); 256 return; 257 } 258 259 // Call the slow part of scavenge object. 260 return ScavengeObjectSlow(p, object); 261 } 262 263 264 int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) { 265 ASSERT(HasBeenSetup()); 266 int amount = amount_of_external_allocated_memory_ + change_in_bytes; 267 if (change_in_bytes >= 0) { 268 // Avoid overflow. 269 if (amount > amount_of_external_allocated_memory_) { 270 amount_of_external_allocated_memory_ = amount; 271 } 272 int amount_since_last_global_gc = 273 amount_of_external_allocated_memory_ - 274 amount_of_external_allocated_memory_at_last_global_gc_; 275 if (amount_since_last_global_gc > external_allocation_limit_) { 276 CollectAllGarbage(false); 277 } 278 } else { 279 // Avoid underflow. 280 if (amount >= 0) { 281 amount_of_external_allocated_memory_ = amount; 282 } 283 } 284 ASSERT(amount_of_external_allocated_memory_ >= 0); 285 return amount_of_external_allocated_memory_; 286 } 287 288 289 void Heap::SetLastScriptId(Object* last_script_id) { 290 roots_[kLastScriptIdRootIndex] = last_script_id; 291 } 292 293 294 #define GC_GREEDY_CHECK() \ 295 ASSERT(!FLAG_gc_greedy || v8::internal::Heap::GarbageCollectionGreedyCheck()) 296 297 298 // Calls the FUNCTION_CALL function and retries it up to three times 299 // to guarantee that any allocations performed during the call will 300 // succeed if there's enough memory. 301 302 // Warning: Do not use the identifiers __object__ or __scope__ in a 303 // call to this macro. 304 305 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 306 do { \ 307 GC_GREEDY_CHECK(); \ 308 Object* __object__ = FUNCTION_CALL; \ 309 if (!__object__->IsFailure()) RETURN_VALUE; \ 310 if (__object__->IsOutOfMemoryFailure()) { \ 311 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \ 312 } \ 313 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ 314 Heap::CollectGarbage(Failure::cast(__object__)->requested(), \ 315 Failure::cast(__object__)->allocation_space()); \ 316 __object__ = FUNCTION_CALL; \ 317 if (!__object__->IsFailure()) RETURN_VALUE; \ 318 if (__object__->IsOutOfMemoryFailure()) { \ 319 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \ 320 } \ 321 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ 322 Counters::gc_last_resort_from_handles.Increment(); \ 323 Heap::CollectAllGarbage(false); \ 324 { \ 325 AlwaysAllocateScope __scope__; \ 326 __object__ = FUNCTION_CALL; \ 327 } \ 328 if (!__object__->IsFailure()) RETURN_VALUE; \ 329 if (__object__->IsOutOfMemoryFailure() || \ 330 __object__->IsRetryAfterGC()) { \ 331 /* TODO(1181417): Fix this. */ \ 332 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \ 333 } \ 334 RETURN_EMPTY; \ 335 } while (false) 336 337 338 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \ 339 CALL_AND_RETRY(FUNCTION_CALL, \ 340 return Handle<TYPE>(TYPE::cast(__object__)), \ 341 return Handle<TYPE>()) 342 343 344 #define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \ 345 CALL_AND_RETRY(FUNCTION_CALL, return, return) 346 347 348 #ifdef DEBUG 349 350 inline bool Heap::allow_allocation(bool new_state) { 351 bool old = allocation_allowed_; 352 allocation_allowed_ = new_state; 353 return old; 354 } 355 356 #endif 357 358 359 void ExternalStringTable::AddString(String* string) { 360 ASSERT(string->IsExternalString()); 361 if (Heap::InNewSpace(string)) { 362 new_space_strings_.Add(string); 363 } else { 364 old_space_strings_.Add(string); 365 } 366 } 367 368 369 void ExternalStringTable::Iterate(ObjectVisitor* v) { 370 if (!new_space_strings_.is_empty()) { 371 Object** start = &new_space_strings_[0]; 372 v->VisitPointers(start, start + new_space_strings_.length()); 373 } 374 if (!old_space_strings_.is_empty()) { 375 Object** start = &old_space_strings_[0]; 376 v->VisitPointers(start, start + old_space_strings_.length()); 377 } 378 } 379 380 381 // Verify() is inline to avoid ifdef-s around its calls in release 382 // mode. 383 void ExternalStringTable::Verify() { 384 #ifdef DEBUG 385 for (int i = 0; i < new_space_strings_.length(); ++i) { 386 ASSERT(Heap::InNewSpace(new_space_strings_[i])); 387 ASSERT(new_space_strings_[i] != Heap::raw_unchecked_null_value()); 388 } 389 for (int i = 0; i < old_space_strings_.length(); ++i) { 390 ASSERT(!Heap::InNewSpace(old_space_strings_[i])); 391 ASSERT(old_space_strings_[i] != Heap::raw_unchecked_null_value()); 392 } 393 #endif 394 } 395 396 397 void ExternalStringTable::AddOldString(String* string) { 398 ASSERT(string->IsExternalString()); 399 ASSERT(!Heap::InNewSpace(string)); 400 old_space_strings_.Add(string); 401 } 402 403 404 void ExternalStringTable::ShrinkNewStrings(int position) { 405 new_space_strings_.Rewind(position); 406 Verify(); 407 } 408 409 } } // namespace v8::internal 410 411 #endif // V8_HEAP_INL_H_ 412