HomeSort by relevance Sort by last modified time
    Searched refs:to_ref (Results 1 - 4 of 4) sorted by null

  /art/runtime/gc/collector/
concurrent_copying.cc 167 mirror::Object* to_ref = Mark(self, from_ref); local
168 if (from_ref != to_ref) {
174 } while (!field->CasWeakRelaxed(from_ref, to_ref));
479 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); local
480 if (to_ref != ref) {
481 *root = to_ref;
496 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); local
497 if (to_ref != ref) {
498 root->Assign(to_ref);
2046 mirror::Object* to_ref = gc_mark_stack_->PopBack(); local
2089 mirror::Object* to_ref = gc_mark_stack_->PopBack(); local
2116 mirror::Object* to_ref = p->AsMirrorPtr(); local
3099 mirror::Object* to_ref = Mark<\/*kGrayImmuneObject=*\/false, kNoUnEvac, \/*kFromGCThread=*\/true>( local
3132 mirror::Object* to_ref = Mark(self, ref); local
3153 mirror::Object* to_ref = Mark<kGrayImmuneObject>(self, ref); local
3339 mirror::Object* to_ref = region_space_->AllocNonvirtual<\/*kForEvac=*\/ true>( local
3499 mirror::Object* to_ref; local
3504 << "from_ref=" << from_ref << " to_ref=" << to_ref; local
3694 mirror::Object* to_ref = IsMarked(from_ref); local
    [all...]
concurrent_copying-inl.h 162 mirror::Object* to_ref = GetFwdPtr(from_ref); local
163 if (to_ref == nullptr) {
165 to_ref = Copy(self, from_ref, holder, offset);
169 DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref))
170 << "from_ref=" << from_ref << " to_ref=" << to_ref; local
171 return to_ref;
concurrent_copying.h 167 // Scan the reference fields of object `to_ref`.
169 void Scan(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_)
201 void ProcessMarkStackRef(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_)
  /art/runtime/entrypoints/quick/
quick_jni_entrypoints.cc 46 mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack); local
47 handle_on_stack->Assign(to_ref);

Completed in 244 milliseconds