Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_READ_BARRIER_INL_H_
     18 #define ART_RUNTIME_READ_BARRIER_INL_H_
     19 
     20 #include "read_barrier.h"
     21 
     22 #include "gc/accounting/read_barrier_table.h"
     23 #include "gc/collector/concurrent_copying-inl.h"
     24 #include "gc/heap.h"
     25 #include "mirror/object-readbarrier-inl.h"
     26 #include "mirror/object_reference.h"
     27 #include "mirror/reference.h"
     28 #include "runtime.h"
     29 
     30 namespace art {
     31 
     32 // Disabled for performance reasons.
     33 static constexpr bool kCheckDebugDisallowReadBarrierCount = false;
     34 
     35 template <typename MirrorType, bool kIsVolatile, ReadBarrierOption kReadBarrierOption,
     36           bool kAlwaysUpdateField>
     37 inline MirrorType* ReadBarrier::Barrier(
     38     mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) {
     39   constexpr bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
     40   if (kUseReadBarrier && with_read_barrier) {
     41     if (kCheckDebugDisallowReadBarrierCount) {
     42       Thread* const self = Thread::Current();
     43       if (self != nullptr) {
     44         CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
     45       }
     46     }
     47     if (kUseBakerReadBarrier) {
     48       // fake_address_dependency (must be zero) is used to create artificial data dependency from
     49       // the is_gray load to the ref field (ptr) load to avoid needing a load-load barrier between
     50       // the two.
     51       uintptr_t fake_address_dependency;
     52       bool is_gray = IsGray(obj, &fake_address_dependency);
     53       if (kEnableReadBarrierInvariantChecks) {
     54         CHECK_EQ(fake_address_dependency, 0U) << obj << " rb_state=" << obj->GetReadBarrierState();
     55       }
     56       ref_addr = reinterpret_cast<mirror::HeapReference<MirrorType>*>(
     57           fake_address_dependency | reinterpret_cast<uintptr_t>(ref_addr));
     58       MirrorType* ref = ref_addr->template AsMirrorPtr<kIsVolatile>();
     59       MirrorType* old_ref = ref;
     60       if (is_gray) {
     61         // Slow-path.
     62         ref = reinterpret_cast<MirrorType*>(Mark(ref));
     63         // If kAlwaysUpdateField is true, update the field atomically. This may fail if mutator
     64         // updates before us, but it's OK.
     65         if (kAlwaysUpdateField && ref != old_ref) {
     66           obj->CasFieldObjectWithoutWriteBarrier<false, false>(offset,
     67                                                                old_ref,
     68                                                                ref,
     69                                                                CASMode::kStrong,
     70                                                                std::memory_order_release);
     71         }
     72       }
     73       AssertToSpaceInvariant(obj, offset, ref);
     74       return ref;
     75     } else if (kUseBrooksReadBarrier) {
     76       // To be implemented.
     77       return ref_addr->template AsMirrorPtr<kIsVolatile>();
     78     } else if (kUseTableLookupReadBarrier) {
     79       MirrorType* ref = ref_addr->template AsMirrorPtr<kIsVolatile>();
     80       MirrorType* old_ref = ref;
     81       // The heap or the collector can be null at startup. TODO: avoid the need for this null check.
     82       gc::Heap* heap = Runtime::Current()->GetHeap();
     83       if (heap != nullptr && heap->GetReadBarrierTable()->IsSet(old_ref)) {
     84         ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
     85         // Update the field atomically. This may fail if mutator updates before us, but it's ok.
     86         if (ref != old_ref) {
     87           obj->CasFieldObjectWithoutWriteBarrier<false, false>(offset,
     88                                                                old_ref,
     89                                                                ref,
     90                                                                CASMode::kStrong,
     91                                                                std::memory_order_release);
     92         }
     93       }
     94       AssertToSpaceInvariant(obj, offset, ref);
     95       return ref;
     96     } else {
     97       LOG(FATAL) << "Unexpected read barrier type";
     98       UNREACHABLE();
     99     }
    100   } else {
    101     // No read barrier.
    102     return ref_addr->template AsMirrorPtr<kIsVolatile>();
    103   }
    104 }
    105 
    106 template <typename MirrorType, ReadBarrierOption kReadBarrierOption>
    107 inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root,
    108                                                GcRootSource* gc_root_source) {
    109   MirrorType* ref = *root;
    110   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
    111   if (kUseReadBarrier && with_read_barrier) {
    112     if (kIsDebugBuild) {
    113       Thread* const self = Thread::Current();
    114       if (self != nullptr) {
    115         CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
    116       }
    117     }
    118     if (kUseBakerReadBarrier) {
    119       // TODO: separate the read barrier code from the collector code more.
    120       Thread* self = Thread::Current();
    121       if (self != nullptr && self->GetIsGcMarking()) {
    122         ref = reinterpret_cast<MirrorType*>(Mark(ref));
    123       }
    124       AssertToSpaceInvariant(gc_root_source, ref);
    125       return ref;
    126     } else if (kUseBrooksReadBarrier) {
    127       // To be implemented.
    128       return ref;
    129     } else if (kUseTableLookupReadBarrier) {
    130       Thread* self = Thread::Current();
    131       if (self != nullptr &&
    132           self->GetIsGcMarking() &&
    133           Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
    134         MirrorType* old_ref = ref;
    135         ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
    136         // Update the field atomically. This may fail if mutator updates before us, but it's ok.
    137         if (ref != old_ref) {
    138           Atomic<MirrorType*>* atomic_root = reinterpret_cast<Atomic<MirrorType*>*>(root);
    139           atomic_root->CompareAndSetStrongRelaxed(old_ref, ref);
    140         }
    141       }
    142       AssertToSpaceInvariant(gc_root_source, ref);
    143       return ref;
    144     } else {
    145       LOG(FATAL) << "Unexpected read barrier type";
    146       UNREACHABLE();
    147     }
    148   } else {
    149     return ref;
    150   }
    151 }
    152 
    153 // TODO: Reduce copy paste
    154 template <typename MirrorType, ReadBarrierOption kReadBarrierOption>
    155 inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
    156                                                GcRootSource* gc_root_source) {
    157   MirrorType* ref = root->AsMirrorPtr();
    158   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
    159   if (with_read_barrier && kUseBakerReadBarrier) {
    160     // TODO: separate the read barrier code from the collector code more.
    161     Thread* self = Thread::Current();
    162     if (self != nullptr && self->GetIsGcMarking()) {
    163       ref = reinterpret_cast<MirrorType*>(Mark(ref));
    164     }
    165     AssertToSpaceInvariant(gc_root_source, ref);
    166     return ref;
    167   } else if (with_read_barrier && kUseBrooksReadBarrier) {
    168     // To be implemented.
    169     return ref;
    170   } else if (with_read_barrier && kUseTableLookupReadBarrier) {
    171     Thread* self = Thread::Current();
    172     if (self != nullptr &&
    173         self->GetIsGcMarking() &&
    174         Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
    175       auto old_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
    176       ref = reinterpret_cast<MirrorType*>(Mark(ref));
    177       auto new_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
    178       // Update the field atomically. This may fail if mutator updates before us, but it's ok.
    179       if (new_ref.AsMirrorPtr() != old_ref.AsMirrorPtr()) {
    180         auto* atomic_root =
    181             reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
    182         atomic_root->CompareAndSetStrongRelaxed(old_ref, new_ref);
    183       }
    184     }
    185     AssertToSpaceInvariant(gc_root_source, ref);
    186     return ref;
    187   } else {
    188     return ref;
    189   }
    190 }
    191 
    192 template <typename MirrorType>
    193 inline MirrorType* ReadBarrier::IsMarked(MirrorType* ref) {
    194   // Only read-barrier configurations can have mutators run while
    195   // the GC is marking.
    196   if (!kUseReadBarrier) {
    197     return ref;
    198   }
    199   // IsMarked does not handle null, so handle it here.
    200   if (ref == nullptr) {
    201     return nullptr;
    202   }
    203   // IsMarked should only be called when the GC is marking.
    204   if (!Thread::Current()->GetIsGcMarking()) {
    205     return ref;
    206   }
    207 
    208   return reinterpret_cast<MirrorType*>(
    209       Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarked(ref));
    210 }
    211 
    212 inline bool ReadBarrier::IsDuringStartup() {
    213   gc::Heap* heap = Runtime::Current()->GetHeap();
    214   if (heap == nullptr) {
    215     // During startup, the heap can be null.
    216     return true;
    217   }
    218   if (heap->CurrentCollectorType() != gc::kCollectorTypeCC) {
    219     // CC isn't running.
    220     return true;
    221   }
    222   gc::collector::ConcurrentCopying* collector = heap->ConcurrentCopyingCollector();
    223   if (collector == nullptr) {
    224     // During startup, the collector can be null.
    225     return true;
    226   }
    227   return false;
    228 }
    229 
    230 inline void ReadBarrier::AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
    231                                                 mirror::Object* ref) {
    232   if (kEnableToSpaceInvariantChecks) {
    233     if (ref == nullptr || IsDuringStartup()) {
    234       return;
    235     }
    236     Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
    237         AssertToSpaceInvariant(obj, offset, ref);
    238   }
    239 }
    240 
    241 inline void ReadBarrier::AssertToSpaceInvariant(GcRootSource* gc_root_source,
    242                                                 mirror::Object* ref) {
    243   if (kEnableToSpaceInvariantChecks) {
    244     if (ref == nullptr || IsDuringStartup()) {
    245       return;
    246     }
    247     Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
    248         AssertToSpaceInvariant(gc_root_source, ref);
    249   }
    250 }
    251 
    252 inline mirror::Object* ReadBarrier::Mark(mirror::Object* obj) {
    253   return Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->MarkFromReadBarrier(obj);
    254 }
    255 
    256 inline bool ReadBarrier::IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency) {
    257   return obj->GetReadBarrierState(fake_address_dependency) == kGrayState;
    258 }
    259 
    260 inline bool ReadBarrier::IsGray(mirror::Object* obj) {
    261   // Use a load-acquire to load the read barrier bit to avoid reordering with the subsequent load.
    262   // GetReadBarrierStateAcquire() has load-acquire semantics.
    263   return obj->GetReadBarrierStateAcquire() == kGrayState;
    264 }
    265 
    266 }  // namespace art
    267 
    268 #endif  // ART_RUNTIME_READ_BARRIER_INL_H_
    269