Home | History | Annotate | Download | only in collector

Lines Matching refs:ref

411     // objects, aside from weak ref accesses with read barriers (Reference::GetReferent() and system
418 // call above. At the same time, disable weak ref accesses using a per-thread flag. It's
420 // won't newly gray objects and push new refs onto the mark stack due to weak ref accesses and
422 // the thread-local mark stacks), without a race. This is why we use a thread-local weak ref
456 // Re-enable weak ref accesses.
477 // Iterate all threads (don't need to or can't use a checkpoint) and re-enable weak ref access.
540 // still in the middle of a read barrier which may have a from-space ref cached in a local
577 for (auto& ref : temp) {
578 gc_mark_stack_->PushBack(ref.AsMirrorPtr());
636 << "ref=" << to_ref
665 void operator()(mirror::Object* ref) const
667 if (ref == nullptr) {
671 collector_->AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
673 if (collector_->RegionSpace()->IsInToSpace(ref)) {
674 CHECK(ref->GetReadBarrierPointer() == nullptr)
675 << "To-space ref " << ref << " " << PrettyTypeOf(ref)
676 << " has non-white rb_ptr " << ref->GetReadBarrierPointer();
678 CHECK(ref->GetReadBarrierPointer() == ReadBarrier::BlackPtr() ||
679 (ref->GetReadBarrierPointer() == ReadBarrier::WhitePtr() &&
680 collector_->IsOnAllocStack(ref)))
681 << "Non-moving/unevac from space ref " << ref << " " << PrettyTypeOf(ref)
682 << " has non-black rb_ptr " << ref->GetReadBarrierPointer()
685 << (collector_->GetHeap()->GetNonMovingSpace()->HasAddress(ref));
707 mirror::Object* ref =
710 visitor(ref);
712 void operator()(mirror::Class* klass, mirror::Reference* ref) const
715 this->operator()(ref, mirror::Reference::ReferentOffset(), false);
759 << "Non-moving space/unevac from space ref " << obj << " " << PrettyTypeOf(obj)
820 void operator()(mirror::Object* ref) const
822 if (ref == nullptr) {
826 collector_->AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
840 mirror::Object* ref =
843 visitor(ref);
845 void operator()(mirror::Class* klass, mirror::Reference* ref ATTRIBUTE_UNUSED) const
910 // Disable weak ref access.
1006 for (mirror::Object* ref : refs) {
1007 ProcessMarkStackRef(ref);
1070 // Scan ref fields.
1072 // Mark the gray ref as white or black.
1084 DCHECK(to_ref->AsReference()->GetPendingNext() != nullptr) << "Left unenqueued ref gray " << to_ref;
1136 // mode and disable weak ref accesses.
1139 LOG(INFO) << "Switched to shared mark stack mode and disabled weak ref access";
1380 void operator()(mirror::Object* ref) const SHARED_REQUIRES(Locks::mutator_lock_)
1382 DCHECK(ref != nullptr);
1383 DCHECK(collector_->region_space_bitmap_->Test(ref)) << ref;
1384 DCHECK(collector_->region_space_->IsInUnevacFromSpace(ref)) << ref;
1386 DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::BlackPtr()) << ref;
1388 ref->AtomicSetReadBarrierPointer(ReadBarrier::BlackPtr(), ReadBarrier::WhitePtr());
1389 DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr()) << ref;
1391 size_t obj_size = ref->SizeOf();
1393 collector_->region_space_->AddLiveBytes(ref, alloc_size);
1411 mirror::Object* ref) {
1414 if (region_space_->IsInToSpace(ref)) {
1417 } else if (region_space_->IsInUnevacFromSpace(ref)) {
1418 CHECK(region_space_bitmap_->Test(ref)) << ref;
1419 } else if (region_space_->IsInFromSpace(ref)) {
1424 ref->GetLockWord(false).Dump(LOG(INTERNAL_FATAL));
1425 CHECK(false) << "Found from-space ref " << ref << " " << PrettyTypeOf(ref);
1427 AssertToSpaceInvariantInNonMovingSpace(obj, ref);
1447 LOG(INTERNAL_FATAL) << "root=" << root << " ref=" << *root;
1453 LOG(INTERNAL_FATAL) << "root=" << root << " ref=" << root->AsMirrorPtr();
1458 mirror::Object* ref) {
1461 if (region_space_->IsInToSpace(ref)) {
1464 } else if (region_space_->IsInUnevacFromSpace(ref)) {
1465 CHECK(region_space_bitmap_->Test(ref)) << ref;
1466 } else if (region_space_->IsInFromSpace(ref)) {
1481 ref->GetLockWord(false).Dump(LOG(INTERNAL_FATAL));
1485 CHECK(false) << "Found from-space ref " << ref << " " << PrettyTypeOf(ref);
1487 AssertToSpaceInvariantInNonMovingSpace(nullptr, ref);
1536 // If ref is on the allocation stack, then it is considered
1550 mirror::Object* ref) {
1551 // In a non-moving spaces. Check that the ref is marked.
1552 if (immune_spaces_.ContainsObject(ref)) {
1554 cc_heap_bitmap_->GetContinuousSpaceBitmap(ref);
1556 << "An immune space ref must have a bitmap. " << ref;
1558 CHECK(cc_bitmap->Test(ref))
1559 << "Unmarked immune space ref. obj=" << obj << " rb_ptr="
1560 << obj->GetReadBarrierPointer() << " ref=" << ref;
1562 CHECK(cc_bitmap->Test(ref))
1563 << "Unmarked immune space ref. obj=" << obj << " ref=" << ref;
1567 heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
1569 heap_mark_bitmap_->GetLargeObjectBitmap(ref);
1572 if ((!is_los && mark_bitmap->Test(ref)) ||
1573 (is_los && los_bitmap->Test(ref))) {
1576 // If ref is on the allocation stack, then it may not be
1579 CHECK(IsOnAllocStack(ref)) << "Unmarked ref that's not on the allocation stack. "
1580 << "obj=" << obj << " ref=" << ref;
1585 // Used to scan ref fields of an object.
1597 void operator()(mirror::Class* klass, mirror::Reference* ref) const
1600 collector_->DelayReferenceReferent(klass, ref);
1621 // Scan ref fields of an object.
1632 mirror::Object* ref = obj->GetFieldObject<
1634 mirror::Object* to_ref = Mark(ref);
1635 if (to_ref == ref) {
1639 mirror::Object* expected_ref = ref;
1656 mirror::Object* ref = *root;
1657 mirror::Object* to_ref = Mark(ref);
1658 if (to_ref == ref) {
1662 mirror::Object* expected_ref = ref;
1675 mirror::Object* const ref = root->AsMirrorPtr();
1676 mirror::Object* to_ref = Mark(ref);
1677 if (to_ref != ref) {
1679 auto expected_ref = mirror::CompressedReference<mirror::Object>::FromMirrorPtr(ref);
1683 if (ref != addr->LoadRelaxed().AsMirrorPtr()) {
1789 // invariant. Note that from_ref is a from space ref so the SizeOf()
1984 bool ConcurrentCopying::IsOnAllocStack(mirror::Object* ref) {
1987 return alloc_stack->Contains(ref);
1990 mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref) {
1991 // ref is in a non-moving space (from_ref == to_ref).
1992 DCHECK(!region_space_->HasAddress(ref)) << ref;
1993 if (immune_spaces_.ContainsObject(ref)) {
1995 cc_heap_bitmap_->GetContinuousSpaceBitmap(ref);
1999 DCHECK(heap_mark_bitmap_->GetContinuousSpaceBitmap(ref)->Test(ref))
2004 ref->AtomicSetReadBarrierPointer(ReadBarrier::WhitePtr(), ReadBarrier::GrayPtr());
2006 if (cc_bitmap->AtomicTestAndSet(ref)) {
2011 DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::GrayPtr());
2013 PushOntoMarkStack(ref);
2018 heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
2020 heap_mark_bitmap_->GetLargeObjectBitmap(ref);
2023 if (!is_los && mark_bitmap->Test(ref)) {
2026 DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
2027 ref->GetReadBarrierPointer() == ReadBarrier::BlackPtr());
2029 } else if (is_los && los_bitmap->Test(ref)) {
2032 DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
2033 ref->GetReadBarrierPointer() == ReadBarrier::BlackPtr());
2037 if (IsOnAllocStack(ref)) {
2041 DCHECK(!mark_bitmap->Test(ref));
2043 DCHECK(!los_bitmap->Test(ref));
2046 DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr());
2052 ref->AtomicSetReadBarrierPointer(ReadBarrier::WhitePtr(), ReadBarrier::GrayPtr());
2054 if (!is_los && mark_bitmap->AtomicTestAndSet(ref)) {
2056 } else if (is_los && los_bitmap->AtomicTestAndSet(ref)) {
2061 DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::GrayPtr());
2063 PushOntoMarkStack(ref);
2068 return ref;