Lines Matching refs:ref
309 mirror::Object* ref = *root;
310 if (ref != nullptr) {
311 mirror::Object* to_ref = concurrent_copying_->Mark(ref);
312 if (to_ref != ref) {
326 mirror::Object* ref = root->AsMirrorPtr();
327 mirror::Object* to_ref = concurrent_copying_->Mark(ref);
328 if (to_ref != ref) {
401 void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
404 CheckReference(ref->GetReferent<kWithoutReadBarrier>(),
405 ref,
426 void CheckReference(ObjPtr<mirror::Object> ref,
430 if (ref != nullptr) {
431 if (!collector_->immune_spaces_.ContainsObject(ref.Ptr())) {
434 Thread::Current(), ref.Ptr()))
435 << "Non gray object references non immune, non zygote large object "<< ref << " "
436 << mirror::Object::PrettyTypeOf(ref) << " in holder " << holder << " "
441 ref->GetClass<kVerifyNone, kWithoutReadBarrier>()));
483 ObjPtr<mirror::Reference> ref) const
486 this->operator()(ref, mirror::Reference::ReferentOffset(), false);
501 void CheckReference(mirror::Object* ref, int32_t offset = -1) const
503 CHECK(ref == nullptr || !cc_->region_space_->IsInNewlyAllocatedRegion(ref))
505 << ref->PrettyTypeOf() << "(" << ref << ") in newly allocated region at offset=" << offset;
753 // objects, aside from weak ref accesses with read barriers (Reference::GetReferent() and system
760 // call above. At the same time, disable weak ref accesses using a per-thread flag. It's
762 // won't newly gray objects and push new refs onto the mark stack due to weak ref accesses and
764 // the thread-local mark stacks), without a race. This is why we use a thread-local weak ref
798 // Re-enable weak ref accesses.
823 // Iterate all threads (don't need to or can't use a checkpoint) and re-enable weak ref access.
901 // to ensure no threads are still in the middle of a read barrier which may have a from-space ref
912 void ConcurrentCopying::PushOntoFalseGrayStack(mirror::Object* ref) {
914 DCHECK(ref != nullptr);
916 false_gray_stack_.push_back(ref);
952 for (auto& ref : temp) {
953 gc_mark_stack_->PushBack(ref.AsMirrorPtr());
1011 << "ref=" << to_ref
1040 void operator()(mirror::Object* ref,
1044 if (ref == nullptr) {
1048 collector_->AssertToSpaceInvariant(holder, offset, ref);
1050 CHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::WhiteState())
1051 << "Ref " << ref << " " << ref->PrettyTypeOf()
1075 mirror::Object* ref =
1078 visitor(ref, offset, obj.Ptr());
1081 ObjPtr<mirror::Reference> ref) const
1084 this->operator()(ref, mirror::Reference::ReferentOffset(), false);
1180 void operator()(mirror::Object* ref) const
1182 if (ref == nullptr) {
1186 collector_->AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
1202 mirror::Object* ref =
1205 visitor(ref);
1207 void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
1274 // Disable weak ref access.
1353 // barrier but before pushing onto the mark stack. b/32508093. Note the weak ref access is
1375 for (mirror::Object* ref : refs) {
1376 ProcessMarkStackRef(ref);
1465 DCHECK(to_ref->AsReference()->GetPendingNext() != nullptr) << "Left unenqueued ref gray " << to_ref;
1524 // mode and disable weak ref accesses.
1527 LOG(INFO) << "Switched to shared mark stack mode and disabled weak ref access";
1734 mirror::Object* ref) {
1738 space::RegionSpace::RegionType type = region_space_->GetRegionType(ref);
1743 CHECK(IsMarkedInUnevacFromSpace(ref)) << ref;
1749 ref->GetLockWord(false).Dump(LOG_STREAM(FATAL_WITHOUT_ABORT));
1750 CHECK(false) << "Found from-space ref " << ref << " " << ref->PrettyTypeOf();
1752 AssertToSpaceInvariantInNonMovingSpace(obj, ref);
1772 LOG(FATAL_WITHOUT_ABORT) << "root=" << root << " ref=" << *root;
1778 LOG(FATAL_WITHOUT_ABORT) << "root=" << root << " ref=" << root->AsMirrorPtr();
1783 mirror::Object* ref) {
1786 if (region_space_->IsInToSpace(ref)) {
1789 } else if (region_space_->IsInUnevacFromSpace(ref)) {
1790 CHECK(IsMarkedInUnevacFromSpace(ref)) << ref;
1791 } else if (region_space_->IsInFromSpace(ref)) {
1808 ref->GetLockWord(false).Dump(LOG_STREAM(FATAL_WITHOUT_ABORT));
1812 CHECK(false) << "Found from-space ref " << ref << " " << ref->PrettyTypeOf();
1814 AssertToSpaceInvariantInNonMovingSpace(nullptr, ref);
1854 // If ref is on the allocation stack, then it is considered
1868 mirror::Object* ref) {
1869 // In a non-moving spaces. Check that the ref is marked.
1870 if (immune_spaces_.ContainsObject(ref)) {
1877 CHECK(updated_all_immune_objects || ref->GetReadBarrierState() == ReadBarrier::GrayState())
1878 << "Unmarked immune space ref. obj=" << obj << " rb_state="
1880 << " ref=" << ref << " ref rb_state=" << ref->GetReadBarrierState()
1885 heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
1887 heap_mark_bitmap_->GetLargeObjectBitmap(ref);
1889 if ((!is_los && mark_bitmap->Test(ref)) ||
1890 (is_los && los_bitmap->Test(ref))) {
1893 // If ref is on the allocation stack, then it may not be
1896 CHECK(IsOnAllocStack(ref)) << "Unmarked ref that's not on the allocation stack. "
1897 << "obj=" << obj << " ref=" << ref;
1902 // Used to scan ref fields of an object.
1914 void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
1917 collector_->DelayReferenceReferent(klass, ref);
1938 // Scan ref fields of an object.
1960 mirror::Object* ref = obj->GetFieldObject<
1963 ref,
1966 if (to_ref == ref) {
1970 mirror::Object* expected_ref = ref;
1990 mirror::Object* ref = *root;
1991 mirror::Object* to_ref = Mark(ref);
1992 if (to_ref == ref) {
1996 mirror::Object* expected_ref = ref;
2010 mirror::Object* const ref = root->AsMirrorPtr();
2011 mirror::Object* to_ref = Mark<kGrayImmuneObject>(ref);
2012 if (to_ref != ref) {
2014 auto expected_ref = mirror::CompressedReference<mirror::Object>::FromMirrorPtr(ref);
2018 if (ref != addr->LoadRelaxed().AsMirrorPtr()) {
2175 // Note that from_ref is a from space ref so the SizeOf() call will access the from-space meta
2379 bool ConcurrentCopying::IsOnAllocStack(mirror::Object* ref) {
2382 return alloc_stack->Contains(ref);
2385 mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref,
2388 // ref is in a non-moving space (from_ref == to_ref).
2389 DCHECK(!region_space_->HasAddress(ref)) << ref;
2390 DCHECK(!immune_spaces_.ContainsObject(ref));
2393 heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
2395 heap_mark_bitmap_->GetLargeObjectBitmap(ref);
2397 if (!is_los && mark_bitmap->Test(ref)) {
2400 DCHECK(ref->GetReadBarrierState() == ReadBarrier::GrayState() ||
2401 ref->GetReadBarrierState() == ReadBarrier::WhiteState());
2403 } else if (is_los && los_bitmap->Test(ref)) {
2406 DCHECK(ref->GetReadBarrierState() == ReadBarrier::GrayState() ||
2407 ref->GetReadBarrierState() == ReadBarrier::WhiteState());
2411 if (IsOnAllocStack(ref)) {
2415 DCHECK(!mark_bitmap->Test(ref));
2417 DCHECK(!los_bitmap->Test(ref));
2420 DCHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::WhiteState());
2427 if ((!is_los && mark_bitmap->Test(ref)) ||
2428 (is_los && los_bitmap->Test(ref))) {
2429 return ref;
2432 if (is_los && !IsAligned<kPageSize>(ref)) {
2433 // Ref is a large object that is not aligned, it must be heap corruption. Dump data before
2435 heap_->GetVerification()->LogHeapCorruption(holder, offset, ref, /* fatal */ true);
2441 cas_success = ref->AtomicSetReadBarrierState(ReadBarrier::WhiteState(),
2444 if (!is_los && mark_bitmap->AtomicTestAndSet(ref)) {
2447 ref->GetReadBarrierState() == ReadBarrier::GrayState()) {
2448 PushOntoFalseGrayStack(ref);
2450 } else if (is_los && los_bitmap->AtomicTestAndSet(ref)) {
2453 ref->GetReadBarrierState() == ReadBarrier::GrayState()) {
2454 PushOntoFalseGrayStack(ref);
2459 DCHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::GrayState());
2461 PushOntoMarkStack(ref);
2465 return ref;