Lines Matching full:heap
32 #include "heap-profiler.h"
89 heap()->MarkMapPointersAsEncoded(true);
91 heap()->MarkMapPointersAsEncoded(false);
92 heap()->isolate()->pc_to_code_cache()->Flush();
97 heap()->isolate()->pc_to_code_cache()->Flush();
126 if (!heap()->map_space()->MapPointersEncodable())
164 heap()->isolate()->stub_cache()->Clear();
166 heap()->external_string_table_.CleanUp();
200 // Marking all live objects in the heap as part of mark-sweep or mark-compact
219 // the heap looking for objects marked as overflowed, push them on the stack,
343 // Optimization: If the heap object pointed to by p is a non-symbol
344 // cons string whose right substring is HEAP->empty_string, update
347 // Here we assume that if we change *p, we replace it with a heap object
348 // (ie, the left substring of a cons string is always a heap object).
352 // (ConsString::cast(object)->second() == HEAP->empty_string())
362 Heap* heap = map_word.ToMap()->heap();
363 if (second != heap->raw_unchecked_empty_string()) {
371 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object;
445 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
446 MarkObjectByPointer(heap, p);
449 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
453 if (VisitUnmarkedObjects(heap, start, end)) return;
456 for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
459 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
465 // marked since they are contained in HEAP->non_monomorphic_cache().
467 heap->mark_compact_collector()->MarkObject(code);
471 static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
475 VisitPointer(heap, &cell);
481 static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
487 heap->mark_compact_collector()->MarkObject(code);
491 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
495 heap->mark_compact_collector()->MarkUnmarkedObject(object);
504 ASSERT(Isolate::Current()->heap()->Contains(obj));
516 static inline bool VisitUnmarkedObjects(Heap* heap,
520 StackLimitCheck check(heap->isolate());
523 MarkCompactCollector* collector = heap->mark_compact_collector();
558 map->heap());
567 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
568 Object* undefined = heap->raw_unchecked_undefined_value();
584 inline static bool IsFlushable(Heap* heap, JSFunction* function) {
599 return IsFlushable(heap, shared_info);
602 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
612 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
643 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
644 if (!IsFlushable(heap, function)) return false;
651 heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
674 Heap* heap = map->heap();
675 if (!(map == heap->raw_unchecked_context_map() ||
676 map == heap->raw_unchecked_catch_context_map() ||
677 map == heap->raw_unchecked_global_context_map())) {
704 MarkCompactCollector* collector = map->heap()->mark_compact_collector();
715 Heap* heap = map->heap();
721 known_flush_code_candidate = IsFlushable(heap, shared);
723 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
727 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
731 static void VisitCodeEntry(Heap* heap, Address entry_address) {
734 VisitPointer(heap, &code);
743 Heap* heap = map->heap();
744 MarkCompactCollector* collector = heap->mark_compact_collector();
754 flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
799 Heap* heap = map->heap();
800 MarkCompactCollector* collector = heap->mark_compact_collector();
802 VisitPointers(heap,
807 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
824 VisitPointers(heap,
833 static void VisitSharedFunctionInfoFields(Heap* heap,
836 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
839 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
842 VisitPointers(heap,
861 explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
871 void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
872 StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
875 void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
876 StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
879 void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
880 StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
884 Heap* heap_;
928 ASSERT(heap() == Isolate::Current()->heap());
936 if (heap()->isolate()->debug()->IsLoaded() ||
937 heap()->isolate()->debug()->has_break_points()) {
946 MarkObject(heap()->raw_unchecked_empty_descriptor_array());
949 ASSERT(this == heap()->mark_compact_collector());
957 heap()->isolate()->thread_manager()->IterateArchivedThreads(
961 heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
962 heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
968 // Visitor class for marking heap roots.
971 explicit RootMarkingVisitor(Heap* heap)
972 : collector_(heap->mark_compact_collector()) { }
999 // overflowed objects in the heap.
1010 explicit SymbolTableCleaner(Heap* heap)
1011 : heap_(heap), pointers_removed_(0) { }
1036 Heap* heap_;
1058 ASSERT(HEAP->Contains(object));
1062 map->ClearCodeCache(heap());
1097 StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
1105 ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array());
1139 HeapObjectIterator iterator(heap()->map_space());
1148 ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
1181 ASSERT(HEAP->Contains(object));
1196 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1200 MarkingVisitor marker(heap());
1207 // Mark the heap roots including global variables, stack variables,
1209 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1214 // There may be overflowed objects in the heap. Visit them now.
1224 heap()->isolate()->global_handles()->object_groups();
1246 // An object in the group is marked, so mark all heap objects in
1264 heap()->isolate()->global_handles()->implicit_ref_groups();
1277 // A parent object is marked, so mark all child heap objects.
1293 // Before: the marking stack contains zero or more heap object pointers.
1295 // marking stack have been marked, or are overflowed in the heap.
1300 ASSERT(heap()->Contains(object));
1316 // Sweep the heap for overflowed objects, clear their overflow bits, and
1319 // overflowed objects in the heap so the overflow flag on the markings stack
1324 SemiSpaceIterator new_it(heap()->new_space(), &OverflowObjectSize);
1328 HeapObjectIterator old_pointer_it(heap()->old_pointer_space(),
1333 HeapObjectIterator old_data_it(heap()->old_data_space(), &OverflowObjectSize);
1337 HeapObjectIterator code_it(heap()->code_space(), &OverflowObjectSize);
1341 HeapObjectIterator map_it(heap()->map_space(), &OverflowObjectSize);
1345 HeapObjectIterator cell_it(heap()->cell_space(), &OverflowObjectSize);
1349 LargeObjectIterator lo_it(heap()->lo_space(), &OverflowObjectSize);
1358 // stack. Before: the marking stack contains zero or more heap object
1360 // objects in the heap.
1387 PostponeInterruptsScope postpone(heap()->isolate());
1395 marking_stack_.Initialize(heap()->new_space()->FromSpaceLow(),
1396 heap()->new_space()->FromSpaceHigh());
1402 RootMarkingVisitor root_visitor(heap());
1416 heap()->isolate()->global_handles()->IdentifyWeakHandles(
1419 heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
1432 SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1433 SymbolTableCleaner v(heap());
1436 heap()->external_string_table_.Iterate(&v);
1437 heap()->external_string_table_.CleanUp();
1441 heap()->ProcessWeakReferences(&mark_compact_object_retainer);
1444 heap()->isolate()->global_handles()->RemoveObjectGroups();
1445 heap
1453 heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
1460 if (heap()->new_space()->Contains(obj)) {
1462 } else if (heap()->map_space()->Contains(obj)) {
1465 } else if (heap()->cell_space()->Contains(obj)) {
1468 } else if (heap()->old_pointer_space()->Contains(obj)) {
1470 } else if (heap()->old_data_space()->Contains(obj)) {
1472 } else if (heap()->code_space()->Contains(obj)) {
1474 } else if (heap()->lo_space()->Contains(obj)) {
1490 heap()->lo_space()->FreeUnmarkedObjects();
1503 HeapObjectIterator map_iterator(heap()->map_space(), &SizeOfMarkedObject);
1556 Object* undefined = heap()->raw_unchecked_undefined_value();
1588 current->ClearNonLiveTransitions(heap(), real_prototype);
1608 // sweeps of the heap. A distinguished map-pointer encoding is used to mark
1647 // Try to promote all objects in new space. Heap numbers and sequential
1650 inline MaybeObject* MCAllocateFromNewSpace(Heap* heap,
1654 if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1657 OldSpace* target_space = heap->TargetSpace(object);
1658 ASSERT(target_space == heap->old_pointer_space() ||
1659 target_space == heap->old_data_space());
1664 result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
1672 Heap *heap,
1675 return heap->old_pointer_space()->MCAllocateRaw(object_size);
1680 Heap* heap,
1683 return heap->old_data_space()->MCAllocateRaw(object_size);
1688 Heap* heap,
1691 return heap->code_space()->MCAllocateRaw(object_size);
1696 Heap* heap,
1699 return heap->map_space()->MCAllocateRaw(object_size);
1704 Heap* heap, HeapObject* ignore, int object_size) {
1705 return heap->cell_space()->MCAllocateRaw(object_size);
1711 inline void EncodeForwardingAddressInNewSpace(Heap* heap,
1717 heap->new_space()->ToSpaceOffsetForAddress(old_object->address());
1718 Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) =
1726 inline void EncodeForwardingAddressInPagedSpace(Heap* heap,
1785 Alloc(collector->heap(), object, object_size)->ToObjectUnchecked();
1786 Encode(collector->heap(), object, object_size, forwarded, offset);
1800 ProcessNonLive(object, collector->heap()->isolate());
1823 heap()->new_space()->bottom(),
1824 heap()->new_space()->top(),
1861 static void MigrateObject(Heap* heap,
1867 heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1869 heap->CopyBlock(dst, src, size);
1879 static inline void VisitPointer(Heap* heap, Object** p) {
1885 if (heap->new_space()->Contains(obj)) {
1886 ASSERT(heap->InFromSpace(*p));
1897 explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
1926 Heap* heap_;
1937 ASSERT(HEAP->InFromSpace(*p));
1951 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1959 static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) {
1962 if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1964 heap->lo_space()->AllocateRawFixedArray(object_size);
1967 MigrateObject(heap, target->address(), object->address(), object_size,
1969 heap->mark_compact_collector()->tracer()->
1974 OldSpace* target_space = heap->TargetSpace(object);
1976 ASSERT(target_space == heap->old_pointer_space() ||
1977 target_space == heap->old_data_space());
1981 MigrateObject(heap,
1985 target_space == heap->old_pointer_space());
1986 heap->mark_compact_collector()->tracer()->
1996 static void SweepNewSpace(Heap* heap, NewSpace* space) {
1997 heap->CheckNewSpaceExpansionCriteria();
2017 heap->mark_compact_collector()->tracer()->decrement_marked_count();
2023 if (TryPromoteObject(heap, object, size)) {
2031 MigrateObject(heap,
2046 PointersToNewGenUpdatingVisitor updating_visitor(heap);
2058 heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
2062 heap->IterateDirtyRegions(heap->old_pointer_space(),
2063 &Heap::IteratePointersInDirtyRegion,
2065 heap->WATERMARK_SHOULD_BE_VALID);
2067 heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
2070 HeapObjectIterator cell_iterator(heap->cell_space());
2083 updating_visitor.VisitPointer(heap->global_contexts_list_address());
2086 heap->UpdateNewSpaceReferencesInExternalStringTable(
2090 heap->IncrementYoungSurvivorsCounter(survivors_size);
2094 heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
2098 static void SweepSpace(Heap* heap, PagedSpace* space) {
2136 heap->mark_compact_collector()->tracer()->decrement_marked_count();
2145 heap->mark_compact_collector()->ReportDeleteIfNeeded(
2146 object, heap->isolate());
2246 heap()->new_space()->MCResetRelocationInfo();
2251 heap()->old_pointer_space());
2255 heap()->old_data_space());
2259 heap()->code_space());
2263 heap()->cell_space());
2275 heap()->map_space());
2280 heap()->old_pointer_space()->MCWriteRelocationInfoToPage();
2281 heap()->old_data_space()->MCWriteRelocationInfoToPage();
2282 heap()->code_space()->MCWriteRelocationInfoToPage();
2283 heap()->map_space()->MCWriteRelocationInfoToPage();
2284 heap()->cell_space()->MCWriteRelocationInfoToPage();
2290 explicit MapIterator(Heap* heap)
2291 : HeapObjectIterator(heap->map_space(), &SizeCallback) { }
2293 MapIterator(Heap* heap, Address start)
2294 : HeapObjectIterator(heap->map_space(), start, &SizeCallback) { }
2306 explicit MapCompact(Heap* heap, int live_maps)
2307 : heap_(heap),
2309 to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
2310 vacant_map_it_(heap),
2311 map_to_evacuate_it_(heap, to_evacuate_start_),
2332 heap()->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
2333 heap()->isolate()->global_handles()->IterateWeakRoots(
2339 ASSERT(space != heap()->map_space());
2344 UpdateMapPointersInRange(heap(),
2351 NewSpace* space = heap()->new_space();
2352 UpdateMapPointersInRange(heap(), space->bottom(), space->top());
2356 LargeObjectIterator it(heap()->lo_space());
2358 UpdateMapPointersInObject(heap(), obj);
2362 heap()->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2365 inline Heap* heap() const { return heap_; }
2368 Heap* heap_;
2435 map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(
2456 static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) {
2459 ASSERT(heap->map_space()->Contains(map));
2464 ASSERT(heap->map_space()->Contains(new_map));
2483 static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) {
2488 size = UpdateMapPointersInObject(heap, object);
2516 SweepSpace(heap(), heap()->old_pointer_space());
2517 SweepSpace(heap(), heap()->old_data_space());
2518 SweepSpace(heap(), heap()->code_space());
2519 SweepSpace(heap(), heap()->cell_space());
2521 SweepNewSpace(heap(), heap()->new_space());
2523 SweepSpace(heap(), heap()->map_space());
2525 heap()->IterateDirtyRegions(heap()->map_space(),
2526 &heap()->IteratePointersInDirtyMapsRegion,
2528 heap()->WATERMARK_SHOULD_BE_VALID);
2530 intptr_t live_maps_size = heap()->map_space()->Size();
2534 if (heap()->map_space()->NeedsCompaction(live_maps)) {
2535 MapCompact map_compact(heap(), live_maps);
2543 if (space == heap()->map_space()) continue;
2609 explicit UpdatingVisitor(Heap* heap) : heap_(heap) {}
2639 inline Heap* heap() const { return heap_; }
2648 ASSERT(!heap()->InFromSpace(obj));
2650 if (heap()->new_space()->Contains(obj)) {
2652 heap()->new_space()->FromSpaceLow() +
2653 heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
2657 ASSERT(heap()->old_pointer_space()->Contains(new_addr) ||
2658 heap()->old_data_space()->Contains(new_addr) ||
2659 heap()->new_space()->FromSpaceContains(new_addr) ||
2660 heap()->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2662 if (heap()->new_space()->FromSpaceContains(new_addr)) {
2663 ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
2664 heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
2668 } else if (heap()->lo_space()->Contains(obj)) {
2698 Heap* heap_;
2707 UpdatingVisitor updating_visitor(heap());
2708 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
2710 heap()->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2711 heap()->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
2714 updating_visitor.VisitPointer(&heap()->global_contexts_list_);
2719 heap()->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2721 heap()->old_pointer_space(),
2724 heap()->old_data_space(),
2727 heap()->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2729 heap()->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2731 heap()->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
2734 LargeObjectIterator it(heap()->lo_space());
2761 ASSERT(heap()->map_space()->Contains(old_map));
2762 ASSERT(heap()->map_space()->Contains(forwarded));
2777 UpdatingVisitor updating_visitor(heap());
2786 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2787 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2808 UpdatingVisitor updating_visitor(heap());
2865 heap()->map_space(), &MarkCompactCollector::RelocateMapObject);
2867 heap()->old_pointer_space(),
2870 heap()->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
2872 heap()->code_space(), &MarkCompactCollector::RelocateCodeObject);
2874 heap()->cell_space(), &MarkCompactCollector::RelocateCellObject);
2876 heap()->new_space(), &MarkCompactCollector::RelocateNewObject);
2892 heap()->new_space()->Flip();
2894 heap()->new_space()->MCCommitRelocationInfo();
2897 Address mark = heap()->new_space()->bottom();
2898 heap()->new_space()->set_age_mark(mark);
2904 heap()->CheckNewSpaceExpansionCriteria();
2905 heap()->IncrementYoungSurvivorsCounter(live_news_size);
2912 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2913 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2926 heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2972 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2973 ASSERT(heap()->map_space()->Contains(map_addr));
2985 if (space == heap()->old_data_space()) {
2986 heap()->MoveBlock(new_addr, old_addr, obj_size);
2988 heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2998 PROFILE(heap()->isolate(),
3001 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3008 return RelocateOldNonCodeObject(obj, heap()->old_pointer_space());
3013 return RelocateOldNonCodeObject(obj, heap()->old_data_space());
3018 return RelocateOldNonCodeObject(obj, heap()->cell_space());
3025 Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
3026 ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
3032 int obj_size = RestoreMap(obj, heap()->code_space(), new_addr, map_addr);
3038 heap()->MoveBlock(new_addr, old_addr, obj_size);
3046 PROFILE(heap()->isolate(), CodeMoveEvent(old_addr, new_addr));
3048 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3059 int offset = heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
3062 Memory::Address_at(heap()->new_space()->FromSpaceLow() + offset);
3065 if (heap()->new_space()->FromSpaceContains(new_addr)) {
3066 ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
3067 heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
3069 ASSERT(heap()->TargetSpace(obj) == heap()->old_pointer_space() ||
3070 heap()->TargetSpace(obj) == heap()->old_data_space());
3075 if (heap()->InNewSpace(HeapObject::FromAddress(new_addr))) {
3076 heap()->CopyBlock(new_addr, old_addr, obj_size);
3078 heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
3091 PROFILE(heap()->isolate(),
3094 HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3103 code_flusher_ = new CodeFlusher(heap()->isolate());