HomeSort by relevance Sort by last modified time
    Searched refs:from_ref (Results 1 - 3 of 3) sorted by null

  /art/runtime/gc/collector/
concurrent_copying-inl.h 102 inline mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref,
105 if (from_ref == nullptr) {
124 return from_ref;
127 if (region_space_->HasAddress(from_ref)) {
128 space::RegionSpace::RegionType rtype = region_space_->GetRegionTypeUnsafe(from_ref);
132 return from_ref;
134 mirror::Object* to_ref = GetFwdPtr(from_ref);
137 to_ref = Copy(from_ref, holder, offset);
142 << "from_ref=" << from_ref << " to_ref=" << to_ref
    [all...]
concurrent_copying.h 114 // Mark object `from_ref`, copying it to the to-space if needed.
115 ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref,
120 ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref)
143 virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE
149 mirror::Object* Copy(mirror::Object* from_ref,
203 virtual mirror::Object* MarkObject(mirror::Object* from_ref) OVERRIDE
206 virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* from_ref,
210 bool IsMarkedInUnevacFromSpace(mirror::Object* from_ref)
232 mirror::Object* GetFwdPtr(mirror::Object* from_ref)
256 mirror::Object* MarkNonMoving(mirror::Object* from_ref,
    [all...]
concurrent_copying.cc 133 mirror::Object* from_ref = field->AsMirrorPtr(); local
134 if (from_ref == nullptr) {
137 mirror::Object* to_ref = Mark(from_ref);
138 if (from_ref != to_ref) {
140 if (field->AsMirrorPtr() != from_ref) {
144 } while (!field->CasWeakRelaxed(from_ref, to_ref));
2716 mirror::Object* from_ref = field->AsMirrorPtr(); local
    [all...]

Completed in 89 milliseconds