Lines Matching defs:self
61 Thread* self = Thread::Current();
63 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
71 MutexLock mu(self, mark_stack_lock_);
97 Thread* self = Thread::Current();
98 thread_running_gc_ = self;
99 Locks::mutator_lock_->AssertNotHeld(self);
101 ReaderMutexLock mu(self, *Locks::mutator_lock_);
106 ReaderMutexLock mu(self, *Locks::mutator_lock_);
124 ReaderMutexLock mu(self, *Locks::mutator_lock_);
134 Thread* self = Thread::Current();
135 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
198 // Note: self is not necessarily equal to thread since thread may be suspended.
199 Thread* self = Thread::Current();
200 CHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
201 << thread->GetState() << " thread " << thread << " self " << self;
217 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
219 concurrent_copying_->GetBarrier().Pass(self);
237 // Note: self is not necessarily equal to thread since thread may be suspended.
238 Thread* self = Thread::Current();
239 CHECK(thread == self);
240 Locks::mutator_lock_->AssertExclusiveHeld(self);
244 cc->RecordLiveStackFreezeSize(self);
268 Thread* self = Thread::Current();
269 Locks::mutator_lock_->AssertNotHeld(self);
270 gc_barrier_->Init(self, 0);
273 heap_->ThreadFlipBegin(self); // Sync with JNI critical calls.
276 heap_->ThreadFlipEnd(self);
278 ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
279 gc_barrier_->Increment(self, barrier_count);
294 void ConcurrentCopying::RecordLiveStackFreezeSize(Thread* self) {
295 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
341 // Note: self is not necessarily equal to thread since thread may be suspended.
342 Thread* self = Thread::Current();
343 CHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
344 << thread->GetState() << " thread " << thread << " self " << self;
347 concurrent_copying_->GetBarrier().Pass(self);
400 Thread* self = Thread::Current();
425 CHECK(!self->GetWeakRefAccessEnabled());
442 ProcessReferences(self);
447 SweepSystemWeaks(self);
457 ReenableWeakRefAccess(self);
471 void ConcurrentCopying::ReenableWeakRefAccess(Thread* self) {
479 MutexLock mu(self, *Locks::thread_list_lock_);
486 GetHeap()->GetReferenceProcessor()->BroadcastForSlowPath(self);
497 // Note: self is not necessarily equal to thread since thread may be suspended.
498 Thread* self = Thread::Current();
499 DCHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
500 << thread->GetState() << " thread " << thread << " self " << self;
507 concurrent_copying_->GetBarrier().Pass(self);
515 Thread* self = Thread::Current();
518 gc_barrier_->Init(self, 0);
526 Locks::mutator_lock_->SharedUnlock(self);
528 ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
529 gc_barrier_->Increment(self, barrier_count);
531 Locks::mutator_lock_->SharedLock(self);
552 Thread* self = Thread::Current();
555 gc_barrier_->Init(self, 0);
563 Locks::mutator_lock_->SharedUnlock(self);
565 ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
566 gc_barrier_->Increment(self, barrier_count);
568 Locks::mutator_lock_->SharedLock(self);
586 Thread* self = Thread::Current(); // TODO: pass self as an argument from call sites?
590 if (LIKELY(self == thread_running_gc_)) {
592 CHECK(self->GetThreadLocalMarkStack() == nullptr);
599 accounting::AtomicStack<mirror::Object>* tl_mark_stack = self->GetThreadLocalMarkStack();
601 MutexLock mu(self, mark_stack_lock_);
617 self->SetThreadLocalMarkStack(new_tl_mark_stack);
628 MutexLock mu(self, mark_stack_lock_);
637 << " self->gc_marking=" << self->GetIsGcMarking()
639 CHECK(self == thread_running_gc_)
773 Thread* self = Thread::Current();
774 DCHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
777 MutexLock mu(self, *Locks::thread_list_lock_);
786 self, *Locks::heap_bitmap_lock_);
795 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
899 // Note: self is not necessarily equal to thread since thread may be suspended.
900 Thread* self = Thread::Current();
901 CHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
902 << thread->GetState() << " thread " << thread << " self " << self;
906 MutexLock mu(self, concurrent_copying_->mark_stack_lock_);
916 concurrent_copying_->GetBarrier().Pass(self);
925 Thread* self = Thread::Current();
928 gc_barrier_->Init(self, 0);
935 Locks::mutator_lock_->SharedUnlock(self);
937 ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
938 gc_barrier_->Increment(self, barrier_count);
940 Locks::mutator_lock_->SharedLock(self);
944 Thread* self = Thread::Current();
945 CHECK_EQ(self, thread);
949 MutexLock mu(self, mark_stack_lock_);
971 Thread* self = Thread::Current();
973 CHECK(self == thread_running_gc_);
974 CHECK(self->GetThreadLocalMarkStack() == nullptr);
989 MutexLock mu(self, mark_stack_lock_);
996 MutexLock mu(self, mark_stack_lock_);
1015 MutexLock mu(self, mark_stack_lock_);
1124 Thread* self = Thread::Current();
1126 CHECK_EQ(self, thread_running_gc_);
1127 CHECK(self->GetThreadLocalMarkStack() == nullptr);
1144 Thread* self = Thread::Current();
1146 CHECK_EQ(self, thread_running_gc_);
1147 CHECK(self->GetThreadLocalMarkStack() == nullptr);
1159 Thread* self = Thread::Current();
1161 CHECK_EQ(self, thread_running_gc_);
1162 CHECK(self->GetThreadLocalMarkStack() == nullptr);
1192 void ConcurrentCopying::SweepSystemWeaks(Thread* self) {
1194 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1289 Thread* self = Thread::Current();
1351 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
1483 PrintFileToLog("/proc/self/maps", LogSeverity::INTERNAL_FATAL);
1739 Thread* self = Thread::Current();
1741 MutexLock mu(self, skipped_blocks_lock_);
2072 Thread* const self = Thread::Current();
2074 MutexLock mu(self, mark_stack_lock_);
2082 ReaderMutexLock mu(self, *Locks::mutator_lock_);
2083 WriterMutexLock mu2(self, *Locks::heap_bitmap_lock_);
2109 void ConcurrentCopying::ProcessReferences(Thread* self) {
2112 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);