HomeSort by relevance Sort by last modified time
    Searched refs:to_ref (Results 1 - 3 of 3) sorted by null

  /art/runtime/gc/collector/
concurrent_copying.cc 459 void ConcurrentCopying::PushOntoMarkStack(mirror::Object* to_ref) {
461 << " " << to_ref << " " << PrettyTypeOf(to_ref);
463 CHECK(mark_queue_.Enqueue(to_ref)) << "Mark queue overflow";
465 CHECK(mark_queue_.EnqueueThreadUnsafe(to_ref)) << "Mark queue overflow";
699 mirror::Object* to_ref; local
700 while ((to_ref = PopOffMarkStack()) != nullptr) {
702 DCHECK(!region_space_->IsInFromSpace(to_ref));
704 DCHECK(to_ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr())
705 << " " << to_ref << " " << to_ref->GetReadBarrierPointer(
1133 mirror::Object* to_ref = Mark(ref); local
1159 mirror::Object* to_ref = Mark(ref); local
1184 mirror::Object* to_ref = Mark(ref); local
1295 mirror::Object* to_ref = region_space_->AllocNonvirtual<true>( local
1370 heap_->non_moving_space_->Free(Thread::Current(), to_ref); local
1423 mirror::Object* to_ref; local
1428 << "from_ref=" << from_ref << " to_ref=" << to_ref; local
1514 mirror::Object* to_ref; local
1518 DCHECK(to_ref != ReadBarrier::GrayPtr()) << "from_ref=" << from_ref << " to_ref=" << to_ref; local
1525 << "from_ref=" << from_ref << " to_ref=" << to_ref; local
1651 mirror::Object* to_ref = reinterpret_cast<ConcurrentCopying*>(arg)->IsMarked(from_ref); local
    [all...]
concurrent_copying.h 68 bool Enqueue(mirror::Object* to_ref) {
79 GetSlotAddr(t)->StoreSequentiallyConsistent(to_ref);
84 bool EnqueueThreadUnsafe(mirror::Object* to_ref) {
91 GetSlotAddr(t)->StoreRelaxed(to_ref);
192 void Scan(mirror::Object* to_ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
  /art/runtime/entrypoints/quick/
quick_jni_entrypoints.cc 28 mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack); local
29 handle_on_stack->Assign(to_ref);

Completed in 253 milliseconds