Home | History | Annotate | Download | only in gc
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_HEAP_VISIT_OBJECTS_INL_H_
     18 #define ART_RUNTIME_GC_HEAP_VISIT_OBJECTS_INL_H_
     19 
     20 #include "heap.h"
     21 
     22 #include "base/mutex-inl.h"
     23 #include "gc/accounting/heap_bitmap-inl.h"
     24 #include "gc/space/bump_pointer_space-walk-inl.h"
     25 #include "gc/space/region_space-inl.h"
     26 #include "mirror/object-inl.h"
     27 #include "obj_ptr-inl.h"
     28 #include "scoped_thread_state_change-inl.h"
     29 #include "thread-current-inl.h"
     30 #include "thread_list.h"
     31 
     32 namespace art {
     33 namespace gc {
     34 
     35 // Visit objects when threads aren't suspended. If concurrent moving
     36 // GC, disable moving GC and suspend threads and then visit objects.
     37 template <typename Visitor>
     38 inline void Heap::VisitObjects(Visitor&& visitor) {
     39   Thread* self = Thread::Current();
     40   Locks::mutator_lock_->AssertSharedHeld(self);
     41   DCHECK(!Locks::mutator_lock_->IsExclusiveHeld(self)) << "Call VisitObjectsPaused() instead";
     42   if (IsGcConcurrentAndMoving()) {
     43     // Concurrent moving GC. Just suspending threads isn't sufficient
     44     // because a collection isn't one big pause and we could suspend
     45     // threads in the middle (between phases) of a concurrent moving
     46     // collection where it's not easily known which objects are alive
     47     // (both the region space and the non-moving space) or which
     48     // copies of objects to visit, and the to-space invariant could be
     49     // easily broken. Visit objects while GC isn't running by using
     50     // IncrementDisableMovingGC() and threads are suspended.
     51     IncrementDisableMovingGC(self);
     52     {
     53       ScopedThreadSuspension sts(self, kWaitingForVisitObjects);
     54       ScopedSuspendAll ssa(__FUNCTION__);
     55       VisitObjectsInternalRegionSpace(visitor);
     56       VisitObjectsInternal(visitor);
     57     }
     58     DecrementDisableMovingGC(self);
     59   } else {
     60     // Since concurrent moving GC has thread suspension, also poison ObjPtr the normal case to
     61     // catch bugs.
     62     self->PoisonObjectPointers();
     63     // GCs can move objects, so don't allow this.
     64     ScopedAssertNoThreadSuspension ants("Visiting objects");
     65     DCHECK(region_space_ == nullptr);
     66     VisitObjectsInternal(visitor);
     67     self->PoisonObjectPointers();
     68   }
     69 }
     70 
     71 template <typename Visitor>
     72 inline void Heap::VisitObjectsPaused(Visitor&& visitor) {
     73   Thread* self = Thread::Current();
     74   Locks::mutator_lock_->AssertExclusiveHeld(self);
     75   VisitObjectsInternalRegionSpace(visitor);
     76   VisitObjectsInternal(visitor);
     77 }
     78 
     79 // Visit objects in the region spaces.
     80 template <typename Visitor>
     81 inline void Heap::VisitObjectsInternalRegionSpace(Visitor&& visitor) {
     82   Thread* self = Thread::Current();
     83   Locks::mutator_lock_->AssertExclusiveHeld(self);
     84   if (region_space_ != nullptr) {
     85     DCHECK(IsGcConcurrentAndMoving());
     86     if (!zygote_creation_lock_.IsExclusiveHeld(self)) {
     87       // Exclude the pre-zygote fork time where the semi-space collector
     88       // calls VerifyHeapReferences() as part of the zygote compaction
     89       // which then would call here without the moving GC disabled,
     90       // which is fine.
     91       bool is_thread_running_gc = false;
     92       if (kIsDebugBuild) {
     93         MutexLock mu(self, *gc_complete_lock_);
     94         is_thread_running_gc = self == thread_running_gc_;
     95       }
     96       // If we are not the thread running the GC on in a GC exclusive region, then moving GC
     97       // must be disabled.
     98       DCHECK(is_thread_running_gc || IsMovingGCDisabled(self));
     99     }
    100     region_space_->Walk(visitor);
    101   }
    102 }
    103 
    104 // Visit objects in the other spaces.
    105 template <typename Visitor>
    106 inline void Heap::VisitObjectsInternal(Visitor&& visitor) {
    107   if (bump_pointer_space_ != nullptr) {
    108     // Visit objects in bump pointer space.
    109     bump_pointer_space_->Walk(visitor);
    110   }
    111   // TODO: Switch to standard begin and end to use ranged a based loop.
    112   for (auto* it = allocation_stack_->Begin(), *end = allocation_stack_->End(); it < end; ++it) {
    113     mirror::Object* const obj = it->AsMirrorPtr();
    114 
    115     mirror::Class* kls = nullptr;
    116     if (obj != nullptr && (kls = obj->GetClass()) != nullptr) {
    117       // Below invariant is safe regardless of what space the Object is in.
    118       // For speed reasons, only perform it when Rosalloc could possibly be used.
    119       // (Disabled for read barriers because it never uses Rosalloc).
    120       // (See the DCHECK in RosAllocSpace constructor).
    121       if (!kUseReadBarrier) {
    122         // Rosalloc has a race in allocation. Objects can be written into the allocation
    123         // stack before their header writes are visible to this thread.
    124         // See b/28790624 for more details.
    125         //
    126         // obj.class will either be pointing to a valid Class*, or it will point
    127         // to a rosalloc free buffer.
    128         //
    129         // If it's pointing to a valid Class* then that Class's Class will be the
    130         // ClassClass (whose Class is itself).
    131         //
    132         // A rosalloc free buffer will point to another rosalloc free buffer
    133         // (or to null), and never to itself.
    134         //
    135         // Either way dereferencing while its not-null is safe because it will
    136         // always point to another valid pointer or to null.
    137         mirror::Class* klsClass = kls->GetClass();
    138 
    139         if (klsClass == nullptr) {
    140           continue;
    141         } else if (klsClass->GetClass() != klsClass) {
    142           continue;
    143         }
    144       } else {
    145         // Ensure the invariant is not broken for non-rosalloc cases.
    146         DCHECK(Heap::rosalloc_space_ == nullptr)
    147             << "unexpected rosalloc with read barriers";
    148         DCHECK(kls->GetClass() != nullptr)
    149             << "invalid object: class does not have a class";
    150         DCHECK_EQ(kls->GetClass()->GetClass(), kls->GetClass())
    151             << "invalid object: class's class is not ClassClass";
    152       }
    153 
    154       // Avoid the race condition caused by the object not yet being written into the allocation
    155       // stack or the class not yet being written in the object. Or, if
    156       // kUseThreadLocalAllocationStack, there can be nulls on the allocation stack.
    157       visitor(obj);
    158     }
    159   }
    160   {
    161     ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    162     GetLiveBitmap()->Visit<Visitor>(visitor);
    163   }
    164 }
    165 
    166 }  // namespace gc
    167 }  // namespace art
    168 
    169 #endif  // ART_RUNTIME_GC_HEAP_VISIT_OBJECTS_INL_H_
    170