/external/v8/test/cctest/ |
test-mementos.cc | 36 NewSpace* new_space = heap->new_space(); local 49 reinterpret_cast<AllocationMemento*>(new_space->top() + kHeapObjectTag); 78 Address top = CcTest::i_isolate()->heap()->new_space()->top(); 79 *(CcTest::i_isolate()->heap()->new_space()->allocation_limit_address()) = top; 96 CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
|
test-dictionary.cc | 59 CcTest::heap()->CollectGarbage(NEW_SPACE); 178 SimulateFullSpace(CcTest::heap()->new_space()); 209 SimulateFullSpace(CcTest::heap()->new_space());
|
test-unboxed-doubles.cc | [all...] |
test-api.cc | 500 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now 501 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now 531 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now 532 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now 554 CcTest::heap()->CollectGarbage(i::NEW_SPACE); 555 CcTest::heap()->CollectGarbage(i::NEW_SPACE); 567 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now 568 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now 600 CcTest::heap()->CollectGarbage(i::NEW_SPACE); 601 CcTest::heap()->CollectGarbage(i::NEW_SPACE); [all...] |
/external/v8/test/cctest/heap/ |
test-spaces.cc | 363 NewSpace new_space(heap); 365 CHECK(new_space.SetUp(CcTest::heap()->ReservedSemiSpaceSize(), 367 CHECK(new_space.HasBeenSetUp()); 369 while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) { 371 new_space.AllocateRawUnaligned(Page::kMaxRegularHeapObjectSize) 373 CHECK(new_space.Contains(HeapObject::cast(obj))); 376 new_space.TearDown(); 762 NewSpace* new_space = i_isolate->heap()->new_space(); local 766 if (new_space->InitialTotalCapacity() == Page::kPageSize) 825 NewSpace* new_space = i_isolate->heap()->new_space(); local 908 NewSpace* new_space = i_isolate->heap()->new_space(); local [all...] |
utils-inl.h | 36 heap->new_space()->DisableInlineAllocationSteps(); 38 static_cast<int>(*heap->new_space()->allocation_limit_address() - 39 *heap->new_space()->allocation_top_address());
|
test-heap.cc | 442 heap->CollectGarbage(NEW_SPACE); 469 heap->CollectGarbage(NEW_SPACE); 488 heap->CollectGarbage(NEW_SPACE); 559 heap->CollectGarbage(NEW_SPACE); 617 heap->CollectGarbage(NEW_SPACE); 655 heap->CollectGarbage(NEW_SPACE); 702 heap->CollectGarbage(NEW_SPACE); 2290 NewSpace* new_space = heap->new_space(); local 2350 NewSpace* new_space = heap->new_space(); local 5614 NewSpace* new_space = heap->new_space(); local [all...] |
/external/v8/src/heap/ |
scavenge-job.cc | 28 size_t new_space_size = heap->new_space()->Size(); 29 size_t new_space_capacity = heap->new_space()->Capacity(); 37 heap->CollectGarbage(NEW_SPACE, "idle task: scavenge");
|
scavenger.cc | 99 if (heap->new_space()->Contains(obj)) { 100 heap->new_space()->RecordAllocation(obj); 102 heap->new_space()->RecordPromotion(obj); 116 target->address() + size == heap->new_space()->top() || 117 target->address() + size + kPointerSize == heap->new_space()->top()); 123 heap->new_space()->top())); 149 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); 151 heap->new_space()->AllocateRaw(object_size, alignment); 159 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
store-buffer-inl.h | 38 !heap_->new_space()->Contains(addr));
|
mark-compact.cc | 155 VerifyMarking(heap->new_space()); 232 VerifyEvacuation(heap->new_space()); 415 VerifyMarkbitsAreClean(heap_->new_space()); 470 ClearMarkbitsInNewSpace(heap_->new_space()); 643 case NEW_SPACE: 644 return "NEW_SPACE"; 3068 NewSpace* new_space = heap()->new_space(); local [all...] |
incremental-marking.cc | 397 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space()); 429 ActivateIncrementalWriteBarrier(heap_->new_space()); 544 heap_->new_space()->AddInlineAllocationObserver(&observer_); 942 heap_->new_space()->RemoveInlineAllocationObserver(&observer_); [all...] |
heap.cc | 264 if (space != NEW_SPACE) { 485 case NEW_SPACE: 486 return "new_space"; 696 (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); 738 UPDATE_COUNTERS_FOR_SPACE(new_space) 753 new_space_top_after_last_gc_ = new_space()->top(); 867 // not matter, so long as we do not specify NEW_SPACE, which would not 877 // not matter, so long as we do not specify NEW_SPACE, which would not [all...] |
gc-tracer.cc | 164 heap_->new_space()->top() - heap_->new_space()->bottom(); 527 "evacuate.new_space=%.1f " [all...] |
heap.h | 1080 NewSpace* new_space() { return &new_space_; } function in class:v8::internal::Heap [all...] |
heap-inl.h | 44 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), 211 if (NEW_SPACE == space) { 242 // NEW_SPACE is not allowed here. 430 case NEW_SPACE:
|
spaces.h | [all...] |
/external/v8/src/extensions/ |
statistics-extension.cc | 120 {heap->new_space()->Size(), "new_space_live_bytes"}, 121 {heap->new_space()->Available(), "new_space_available_bytes"}, 122 {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
|
/external/v8/src/ |
string-stream.cc | 567 char* new_space = NewArray<char>(new_bytes); local 568 if (new_space == NULL) { 571 MemCopy(new_space, space_, *bytes); 574 space_ = new_space; 575 return new_space;
|
api.cc | [all...] |
/external/blktrace/ |
blkparse.c | 326 int new_space, size; local 339 new_space = (new_count - ncpus) * sizeof(struct per_cpu_info); 340 memset(new_start, 0, new_space); [all...] |
/external/v8/test/cctest/compiler/ |
test-simplified-lowering.cc | 657 CHECK(t.heap()->new_space()->Contains(result) || flag[i] == TENURED); [all...] |