Home | History | Annotate | Download | only in src

Lines Matching refs:heap_

43     : heap_(heap),
74 heap_->mark_compact_collector()->RecordSlot(
135 Code* host = heap_->isolate()->inner_pointer_to_code_cache()->
148 heap_->mark_compact_collector()->
172 heap_->mark_compact_collector()->RecordRelocSlot(rinfo,
442 DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space());
443 DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space());
444 DeactivateIncrementalWriteBarrierForSpace(heap_
445 DeactivateIncrementalWriteBarrierForSpace(heap_->property_cell_space());
446 DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
447 DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
448 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
450 LargePage* lop = heap_->lo_space()->first_page();
477 ActivateIncrementalWriteBarrier(heap_->old_pointer_space());
478 ActivateIncrementalWriteBarrier(heap_->old_data_space());
479 ActivateIncrementalWriteBarrier(heap_->cell_space());
480 ActivateIncrementalWriteBarrier(heap_->property_cell_space());
481 ActivateIncrementalWriteBarrier(heap_->map_space());
482 ActivateIncrementalWriteBarrier(heap_->code_space());
483 ActivateIncrementalWriteBarrier(heap_->new_space());
485 LargePage* lop = heap_->lo_space()->first_page();
508 heap_->gc_state() == Heap::NOT_IN_GC &&
510 heap_->isolate()->IsInitialized() &&
511 heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold;
586 ASSERT(heap_->gc_state() == Heap::NOT_IN_GC);
588 ASSERT(heap_->isolate()->IsInitialized());
592 if (heap_->IsSweepingComplete()) {
601 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold);
611 heap_->mark_compact_collector()->StartCompaction(
619 PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
634 heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
638 heap_->CompletelyClearInstanceofCache();
639 heap_->isolate()->compilation_cache()->MarkCompactPrologue();
644 MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache());
649 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
660 NewSpacePageIterator it(heap_->new_space()->FromSpaceStart(),
661 heap_->new_space()->FromSpaceEnd());
677 Map* filler_map = heap_->one_pointer_filler_map();
683 if (heap_->InNewSpace(obj)) {
741 Map* filler_map = heap_->one_pointer_filler_map();
759 Map* filler_map = heap_->one_pointer_filler_map();
789 heap_->AddMarkingTime(delta);
798 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
804 Object* context = heap_->native_contexts_list();
827 heap_->new_space()->LowerInlineAllocationLimit(0);
831 PatchIncrementalMarkingRecordWriteStubs(heap_,
836 LargeObjectIterator it(heap_->lo_space());
845 heap_->isolate()->stack_guard()->Continue(GC_REQUEST);
855 heap_->new_space()->LowerInlineAllocationLimit(0);
858 PatchIncrementalMarkingRecordWriteStubs(heap_,
862 heap_->isolate()->stack_guard()->Continue(GC_REQUEST);
878 heap_->isolate()->stack_guard()->RequestGC();
884 if (IsStopped() && WorthActivating() && heap_->NextGCIsLikelyToBeFull()) {
896 if (heap_->gc_state() != Heap::NOT_IN_GC ||
934 if (heap_->EnsureSweepersProgressed(static_cast<int>(bytes_to_process))) {
970 (heap_->PromotedTotalSize() >
980 int64_t promoted_during_marking = heap_->PromotedTotalSize()
983 intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
1016 heap_->AddMarkingTime(delta);
1028 heap_->PromotedTotalSize();
1039 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();