Home | History | Annotate | Download | only in src

Lines Matching refs:Heap

39 #include "heap-profiler.h"
68 Heap::Heap()
126 previous_survival_rate_trend_(Heap::STABLE),
127 survival_rate_trend_(Heap::STABLE),
188 intptr_t Heap::Capacity() {
201 intptr_t Heap::CommittedMemory() {
215 size_t Heap::CommittedPhysicalMemory() {
229 intptr_t Heap::CommittedMemoryExecutable() {
236 void Heap::UpdateMaximumCommitted() {
246 intptr_t Heap::Available() {
259 bool Heap::HasBeenSetUp() {
270 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
278 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
329 // TODO(1238405): Combine the infrastructure for --heap-stats and
331 void Heap::ReportStatisticsBeforeGC() {
332 // Heap::ReportHeapStatistics will also log NewSpace statistics when
353 void Heap::PrintShortHeapStatistics() {
419 // TODO(1238405): Combine the infrastructure for --heap-stats and
421 void Heap::ReportStatisticsAfterGC() {
437 void Heap::GarbageCollectionPrologue() {
473 intptr_t Heap::SizeOfObjects() {
483 void Heap::ClearAllICsByKind(Code::Kind kind) {
497 void Heap::RepairFreeListsAfterBoot() {
507 void Heap::GarbageCollectionEpilogue() {
544 // In release mode, we only zap the from space under heap verification.
545 if (Heap::ShouldZapGarbage()) {
676 void Heap::CollectAllGarbage(int flags, const char* gc_reason) {
686 void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
721 bool Heap::CollectGarbage(AllocationSpace space,
785 // Start incremental marking for the next cycle. The heap snapshot
798 int Heap::NotifyContextDisposed() {
809 void Heap::PerformScavenge() {
819 void Heap::MoveElements(FixedArray* array,
859 static void VerifyStringTable(Heap* heap) {
861 heap->string_table()->IterateElements(&verifier);
867 Heap* heap,
870 heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
871 bool result = heap->CollectGarbage(space, gc_reason);
872 heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
877 void Heap::ReserveSpace(int *sizes, Address *locations_out) {
895 Heap::CollectGarbage(NEW_SPACE,
917 V8::FatalProcessOutOfMemory("Heap::ReserveSpace");
922 void Heap::EnsureFromSpaceIsCommitted() {
931 void Heap::ClearJSFunctionResultCaches() {
954 void Heap::ClearNormalizedMapCaches() {
974 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
1006 bool Heap::PerformGarbageCollection(GarbageCollector collector,
1032 int start_new_space_size = Heap::new_space()->SizeAsInt();
1148 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
1165 void Heap::CallGCEpilogueCallbacks(GCType gc_type) {
1183 void Heap::MarkCompact(GCTracer* tracer) {
1206 void Heap::MarkCompactPrologue() {
1231 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
1244 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1248 Heap* heap_;
1257 explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
1267 Heap* heap_;
1271 static void VerifyNonPointerSpacePointers(Heap* heap) {
1274 VerifyNonPointerSpacePointersVisitor v(heap);
1275 HeapObjectIterator code_it(heap->code_space());
1282 if (!heap->old_data_space()->was_swept_conservatively()) {
1283 HeapObjectIterator data_it(heap->old_data_space());
1292 void Heap::CheckNewSpaceExpansionCriteria() {
1305 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
1306 return heap->InNewSpace(*p) &&
1311 void Heap::ScavengeStoreBufferCallback(
1312 Heap* heap,
1315 heap->store_buffer_rebuilder_.Callback(page, event);
1407 explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
1422 Heap* heap_;
1426 void Heap::Scavenge() {
1579 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1585 heap->FinalizeExternalString(String::cast(*p));
1594 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1631 void Heap::UpdateReferencesInExternalStringTable(
1650 static Object* VisitWeakList(Heap* heap,
1654 Object* undefined = heap->undefined_value();
1657 MarkCompactCollector* collector = heap->mark_compact_collector();
1684 heap, tail, retainer, record_slots);
1686 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
1715 static void VisitLiveObject(Heap*, JSFunction*,
1719 static void VisitPhantomObject(Heap*, JSFunction*) {
1738 static void VisitLiveObject(Heap*, Code*,
1742 static void VisitPhantomObject(Heap*, Code*) {
1759 static void VisitLiveObject(Heap* heap,
1764 DoWeakList<JSFunction>(heap, context, retainer, record_slots,
1766 DoWeakList<Code>(heap, context, retainer, record_slots,
1768 DoWeakList<Code>(heap, context, retainer, record_slots,
1773 static void DoWeakList(Heap* heap,
1779 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer,
1789 heap->mark_compact_collector()->RecordSlot(
1794 static void VisitPhantomObject(Heap*, Context*) {
1803 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1818 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer,
1838 static void VisitLiveObject(Heap*,
1843 static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
1861 static void VisitLiveObject(Heap* heap,
1867 heap,
1871 if (typed_array_obj != heap->undefined_value() && record_slots) {
1874 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
1878 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
1879 Runtime::FreeArrayBuffer(heap->isolate(), phantom);
1888 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
1898 void Heap::TearDownArrayBuffers() {
1919 static void VisitLiveObject(Heap* heap,
1924 static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {}
1932 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
1942 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
1995 static inline void VisitPointer(Heap* heap, Object** p) {
1997 if (!heap->InNewSpace(object)) return;
1998 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
2004 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
2055 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
2059 static HeapObject* EnsureDoubleAligned(Heap* heap,
2063 heap->CreateFillerObjectAt(object->address(), kPointerSize);
2066 heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
2166 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
2173 if (heap->new_space()->Contains(obj)) {
2174 heap->new_space()->RecordAllocation(obj);
2176 heap->new_space()->RecordPromotion(obj);
2184 INLINE(static void MigrateObject(Heap* heap,
2189 heap->CopyBlock(target->address(), source->address(), size);
2196 RecordCopiedObject(heap, target);
2197 Isolate* isolate = heap->isolate();
2234 Heap* heap = map->GetHeap();
2235 if (heap->ShouldBePromoted(object->address(), object_size)) {
2239 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
2240 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
2242 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2243 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
2251 target = EnsureDoubleAligned(heap, target, allocation_size);
2258 MigrateObject(heap, object, target, object_size);
2262 heap->promotion_queue()->insert(
2265 heap->promotion_queue()->insert(target, object_size);
2269 heap->tracer()->increment_promoted_objects_size(object_size);
2273 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
2274 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
2275 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2280 target = EnsureDoubleAligned(heap, target, allocation_size);
2287 MigrateObject(heap, object, target, object_size);
2371 Heap* heap = map->GetHeap();
2375 heap->empty_string()) {
2381 if (!heap->InNewSpace(first)) {
2395 heap->DoScavengeObject(first->map(), slot, first);
2445 void Heap::SelectScavengingVisitorsTable() {
2487 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
2488 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
2496 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
2520 MaybeObject* Heap::AllocateMap(InstanceType instance_type,
2554 MaybeObject* Heap::AllocateCodeCache() {
2565 MaybeObject* Heap::AllocatePolymorphicCodeCache() {
2570 MaybeObject* Heap::AllocateAccessorPair() {
2582 MaybeObject* Heap::AllocateTypeFeedbackInfo() {
2594 MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) {
2604 const Heap::StringTypeTable Heap::string_type_table[] = {
2612 const Heap::ConstantStringTable Heap::constant_string_table[] = {
2620 const Heap::StructTable Heap::struct_table[] = {
2628 bool Heap::CreateInitialMaps() {
2997 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2998 // Statically ensure that it is safe to allocate heap numbers in paged
3015 MaybeObject* Heap::AllocateCell(Object* value) {
3029 MaybeObject* Heap::AllocatePropertyCell() {
3049 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) {
3058 MaybeObject* Heap::AllocateAllocationSite() {
3072 MaybeObject* Heap::CreateOddball(const char* to_string,
3083 bool Heap::CreateApiObjects() {
3112 void Heap::CreateJSEntryStub() {
3118 void Heap::CreateJSConstructEntryStub() {
3124 void Heap::CreateFixedStubs() {
3138 Heap::CreateJSEntryStub();
3139 Heap::CreateJSConstructEntryStub();
3149 void Heap::CreateStubsRequiringBuiltins() {
3155 bool Heap::CreateInitialObjects() {
3391 bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
3420 bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
3426 Object* RegExpResultsCache::Lookup(Heap* heap,
3435 cache = heap->string_split_cache();
3439 cache = heap->regexp_multiple_cache();
3459 void RegExpResultsCache::Enter(Heap* heap,
3469 cache = heap->string_split_cache();
3473 cache = heap->regexp_multiple_cache();
3505 MaybeObject* maybe_string = heap->InternalizeString(str);
3512 value_array->set_map_no_write_barrier(heap->fixed_cow_array_map());
3523 MaybeObject* Heap::AllocateInitialNumberStringCache() {
3530 int Heap::FullSizeNumberStringCacheLength() {
3543 void Heap::AllocateFullSizeNumberStringCache() {
3561 void Heap::FlushNumberStringCache() {
3581 Object* Heap::GetNumberStringCache(Object* number) {
3601 void Heap::SetNumberStringCache(Object* number, String* string) {
3621 MaybeObject* Heap::NumberToString(Object* number,
3653 MaybeObject* Heap::Uint32ToString(uint32_t value,
3662 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
3667 Heap::RootListIndex Heap::RootIndexForExternalArrayType(
3694 Heap::RootListIndex Heap::RootIndexForEmptyExternalArray(
3722 ExternalArray* Heap::EmptyExternalArrayForMap(Map* map) {
3730 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
3746 // Materialize the value in the heap.
3751 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
3763 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
3802 MaybeObject* Heap::AllocateJSMessageObject(String* type,
3814 message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3816 message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
3837 Heap* heap,
3844 heap->string_table()->LookupTwoCharsStringIfExists(c1, c2, &result)) {
3852 { MaybeObject* maybe_result = heap->AllocateRawOneByteString(2);
3861 { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
3872 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
3990 MaybeObject* Heap::AllocateSubString(String* buffer,
4086 MaybeObject* Heap::AllocateExternalStringFromAscii(
4109 MaybeObject* Heap::AllocateExternalStringFromTwoByte(
4138 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
4163 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
4181 void Heap::CreateFillerObjectAt(Address addr, int size) {
4195 MaybeObject* Heap::AllocateExternalArray(int length,
4216 MaybeObject* Heap::CreateCode(const CodeDesc& desc,
4223 // leaving uninitialized Code object (and breaking the heap).
4292 // that are dereferenced during the copy to point directly to the actual heap
4306 MaybeObject* Heap::CopyCode(Code* code) {
4332 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
4334 // leaving uninitialized Code object (and breaking the heap).
4390 void Heap::InitializeAllocationMemento(AllocationMemento* memento,
4401 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
4422 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
4439 void Heap::InitializeFunction(JSFunction* function,
4454 MaybeObject* Heap::AllocateFunction(Map* function_map,
4469 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
4525 void Heap::InitializeJSObjectFromMap(JSObject* obj,
4534 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
4549 filler = Heap::one_pointer_filler_map();
4551 filler = Heap::undefined_value();
4553 obj->InitializeBody(map, Heap::undefined_value(), filler);
4557 MaybeObject* Heap::AllocateJSObjectFromMap(
4595 MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(
4629 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
4644 MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
4677 MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
4692 MaybeObject* Heap::AllocateJSArrayAndStorage(
4738 MaybeObject* Heap::AllocateJSArrayStorage(
4778 MaybeObject* Heap::AllocateJSArrayWithElements(
4794 MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
4814 MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
4840 MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
4925 MaybeObject* Heap::ReinitializeJSReceiver(
4986 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
5012 MaybeObject* Heap::AllocateStringFromOneByte(Vector<const uint8_t> string,
5016 return Heap::LookupSingleCharacterStringFromCode(string[0]);
5032 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
5066 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
5086 Map* Heap::InternalizedStringMapForString(String* string) {
5161 MaybeObject* Heap::AllocateInternalizedStringImpl(
5208 MaybeObject* Heap::AllocateInternalizedStringImpl<true>(String*, int, uint32_t);
5210 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
5213 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
5217 MaybeObject* Heap::AllocateRawOneByteString(int length,
5241 MaybeObject* Heap::AllocateRawTwoByteString(int length,
5264 MaybeObject* Heap::AllocateJSArray(
5276 MaybeObject* Heap::AllocateEmptyFixedArray() {
5291 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) {
5296 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
5322 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
5339 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
5360 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
5371 MaybeObject* Heap::AllocateFixedArrayWithFiller(int length,
5392 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
5397 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
5403 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
5418 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
5433 MaybeObject* Heap::AllocateUninitializedFixedDoubleArray(
5450 MaybeObject* Heap::AllocateFixedDoubleArrayWithHoles(
5471 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
5491 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
5526 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
5538 MaybeObject* Heap::AllocateSymbol() {
5568 MaybeObject* Heap::AllocatePrivateSymbol() {
5577 MaybeObject* Heap::AllocateNativeContext() {
5592 MaybeObject* Heap::AllocateGlobalContext(JSFunction* function,
5611 MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) {
5625 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
5641 MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
5662 MaybeObject* Heap::AllocateWithContext(JSFunction* function,
5679 MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
5697 MaybeObject* Heap::AllocateScopeInfo(int length) {
5706 MaybeObject* Heap::AllocateExternal(void* value) {
5720 MaybeObject* Heap::AllocateStruct(InstanceType type) {
5742 bool Heap::IsHeapIterable() {
5748 void Heap::EnsureHeapIsIterable() {
5751 CollectAllGarbage(kMakeHeapIterableMask, "Heap::EnsureHeapIsIterable");
5757 void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
5779 bool Heap::IdleNotification(int hint) {
5872 bool Heap::IdleGlobalGC() {
5934 void Heap::Print() {
5944 void Heap::ReportCodeStatistics(const char* title) {
5958 void Heap::ReportHeapStatistics(const char* title) {
5970 PrintF("Heap statistics : ");
5993 bool Heap::Contains(HeapObject* value) {
5998 bool Heap::Contains(Address addr) {
6012 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
6017 bool Heap::InSpace(Address addr, AllocationSpace space) {
6045 void Heap::Verify() {
6069 MaybeObject* Heap::InternalizeUtf8String(Vector<const char> string) {
6084 MaybeObject* Heap::InternalizeOneByteString(Vector<const uint8_t> string) {
6099 MaybeObject* Heap::InternalizeOneByteString(Handle<SeqOneByteString> string,
6119 MaybeObject* Heap::InternalizeTwoByteString(Vector<const uc16> string) {
6134 MaybeObject* Heap::InternalizeString(String* string) {
6150 bool Heap::InternalizeStringIfExists(String* string, String** result) {
6159 void Heap::ZapFromSpace() {
6173 void Heap::IterateAndMarkPointersToFromSpace(Address start,
6198 if (Heap::InFromSpace(object)) {
6203 SLOW_ASSERT(Heap::InToSpace(new_object));
6236 static void CheckStoreBuffer(Heap* heap,
6244 Map* free_space_map = heap->free_space_map();
6273 // without trying to cast it to a heap object since the hash field of
6276 if (!heap->InNewSpace(o)) continue;
6294 void Heap::OldPointerSpaceCheckStoreBuffer() {
6322 void Heap::MapSpaceCheckStoreBuffer() {
6350 void Heap::LargeObjectSpaceCheckStoreBuffer() {
6376 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
6382 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
6394 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
6424 // heap. Note that it is not necessary to iterate over code objects
6474 // TODO(1236194): Since the heap size is configurable on the command line
6475 // and through the API, we should gracefully handle the case that the heap
6477 bool Heap::ConfigureHeap(int max_semispace_size,
6558 bool Heap::ConfigureHeapDefault() {
6565 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
6604 intptr_t Heap::PromotedSpaceSizeOfObjects() {
6615 bool Heap::AdvanceSweepers(int step_size) {
6623 int64_t Heap::PromotedExternalMemorySize() {
6631 void Heap::EnableInlineAllocation() {
6640 void Heap::DisableInlineAllocation() {
6666 bool Heap::SetUp() {
6671 // Initialize heap spaces and initial maps and objects. Whenever something
6673 // call Heap::TearDown() to release allocated memory.
6675 // If the heap is not yet configured (e.g. through the API), configure it.
6763 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
6764 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
6774 bool Heap::CreateHeapObjects() {
6790 void Heap::SetStackLimits() {
6807 void Heap::TearDown() {
6917 void Heap::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
6927 void Heap::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) {
6939 void Heap::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
6949 void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) {
6961 MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj,
6978 DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) {
6985 void Heap::EnsureWeakObjectToCodeTable() {
7005 void Heap::PrintHandles() {
7073 SpaceIterator::SpaceIterator(Heap* heap)
7074 : heap_(heap),
7081 SpaceIterator::SpaceIterator(Heap* heap, HeapObjectCallback size_func)
7082 : heap_(heap),
7165 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) {
7212 Heap* heap_;
7217 HeapIterator::HeapIterator(Heap* heap)
7218 : heap_(heap),
7225 HeapIterator::HeapIterator(Heap* heap,
7227 : heap_(heap),
7256 // objects. Otherwise, heap will be left in an inconsistent state.
7478 // given root object and finds a path to a specific heap object and
7480 void Heap::TracePathToObjectFrom(Object* target, Object* root) {
7487 // and finds a path to a specific heap object and prints it.
7488 void Heap::TracePathToObject(Object* target) {
7497 void Heap::TracePathToGlobal() {
7506 static intptr_t CountTotalHolesSize(Heap* heap) {
7508 OldSpaces spaces(heap);
7518 GCTracer::GCTracer(Heap* heap,
7532 heap_(heap),
7544 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize(heap);
7729 if (!map->GetIsolate()->heap()->InternalizeStringIfExists(
7737 ASSERT(!map->GetIsolate()->heap()->InNewSpace(name));
7780 void Heap::GarbageCollectionGreedyCheck() {
7859 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
7865 void Heap::FreeQueuedChunks() {
7903 isolate_->heap()->store_buffer()->Compact();
7904 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
7913 void Heap::RememberUnmappedPage(Address page, bool compacted) {
7928 void Heap::ClearObjectStats(bool clear_last_time_stats) {
7941 void Heap::CheckpointObjectStats() {