Home | History | Annotate | Download | only in accounting
      1 /*
      2  * Copyright (C) 2008 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
     18 #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
     19 
     20 #include <limits.h>
     21 #include <stdint.h>
     22 #include <memory>
     23 #include <set>
     24 #include <vector>
     25 
     26 #include "base/mutex.h"
     27 #include "globals.h"
     28 
     29 namespace art {
     30 
     31 namespace mirror {
     32   class Class;
     33   class Object;
     34 }  // namespace mirror
     35 class MemMap;
     36 
     37 namespace gc {
     38 namespace accounting {
     39 
     40 template<size_t kAlignment>
     41 class SpaceBitmap {
     42  public:
     43   typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
     44   typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
     45 
     46   // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
     47   // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
     48   static SpaceBitmap* Create(const std::string& name, uint8_t* heap_begin, size_t heap_capacity);
     49 
     50   // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
     51   // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
     52   // Objects are kAlignement-aligned.
     53   static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
     54                                        uint8_t* heap_begin, size_t heap_capacity);
     55 
     56   ~SpaceBitmap();
     57 
     58   // <offset> is the difference from .base to a pointer address.
     59   // <index> is the index of .bits that contains the bit representing
     60   //         <offset>.
     61   static constexpr size_t OffsetToIndex(size_t offset) {
     62     return offset / kAlignment / kBitsPerIntPtrT;
     63   }
     64 
     65   template<typename T>
     66   static constexpr T IndexToOffset(T index) {
     67     return static_cast<T>(index * kAlignment * kBitsPerIntPtrT);
     68   }
     69 
     70   ALWAYS_INLINE static constexpr uintptr_t OffsetBitIndex(uintptr_t offset) {
     71     return (offset / kAlignment) % kBitsPerIntPtrT;
     72   }
     73 
     74   // Bits are packed in the obvious way.
     75   static constexpr uintptr_t OffsetToMask(uintptr_t offset) {
     76     return static_cast<size_t>(1) << OffsetBitIndex(offset);
     77   }
     78 
     79   bool Set(const mirror::Object* obj) ALWAYS_INLINE {
     80     return Modify<true>(obj);
     81   }
     82 
     83   bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
     84     return Modify<false>(obj);
     85   }
     86 
     87   // Returns true if the object was previously marked.
     88   bool AtomicTestAndSet(const mirror::Object* obj);
     89 
     90   // Fill the bitmap with zeroes.  Returns the bitmap's memory to the system as a side-effect.
     91   void Clear();
     92 
     93   // Clear a covered by the bitmap using madvise if possible.
     94   void ClearRange(const mirror::Object* begin, const mirror::Object* end);
     95 
     96   bool Test(const mirror::Object* obj) const;
     97 
     98   // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
     99   // even if a bit has not been set for it.
    100   bool HasAddress(const void* obj) const {
    101     // If obj < heap_begin_ then offset underflows to some very large value past the end of the
    102     // bitmap.
    103     const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
    104     const size_t index = OffsetToIndex(offset);
    105     return index < bitmap_size_ / sizeof(intptr_t);
    106   }
    107 
    108   class ClearVisitor {
    109    public:
    110     explicit ClearVisitor(SpaceBitmap* const bitmap)
    111         : bitmap_(bitmap) {
    112     }
    113 
    114     void operator()(mirror::Object* obj) const {
    115       bitmap_->Clear(obj);
    116     }
    117    private:
    118     SpaceBitmap* const bitmap_;
    119   };
    120 
    121   template <typename Visitor>
    122   void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
    123     for (; visit_begin < visit_end; visit_begin += kAlignment) {
    124       visitor(reinterpret_cast<mirror::Object*>(visit_begin));
    125     }
    126   }
    127 
    128   // Visit the live objects in the range [visit_begin, visit_end).
    129   // TODO: Use lock annotations when clang is fixed.
    130   // REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    131   template <typename Visitor>
    132   void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, Visitor&& visitor) const
    133       NO_THREAD_SAFETY_ANALYSIS;
    134 
    135   // Visits set bits in address order.  The callback is not permitted to change the bitmap bits or
    136   // max during the traversal.
    137   template <typename Visitor>
    138   void Walk(Visitor&& visitor)
    139       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    140 
    141   // Walk through the bitmaps in increasing address order, and find the object pointers that
    142   // correspond to garbage objects.  Call <callback> zero or more times with lists of these object
    143   // pointers. The callback is not permitted to increase the max of either bitmap.
    144   static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
    145                         uintptr_t max, SweepCallback* thunk, void* arg);
    146 
    147   void CopyFrom(SpaceBitmap* source_bitmap);
    148 
    149   // Starting address of our internal storage.
    150   Atomic<uintptr_t>* Begin() {
    151     return bitmap_begin_;
    152   }
    153 
    154   // Size of our internal storage
    155   size_t Size() const {
    156     return bitmap_size_;
    157   }
    158 
    159   // Size in bytes of the memory that the bitmaps spans.
    160   uint64_t HeapSize() const {
    161     return IndexToOffset<uint64_t>(Size() / sizeof(intptr_t));
    162   }
    163 
    164   void SetHeapSize(size_t bytes) {
    165     // TODO: Un-map the end of the mem map.
    166     heap_limit_ = heap_begin_ + bytes;
    167     bitmap_size_ = OffsetToIndex(bytes) * sizeof(intptr_t);
    168     CHECK_EQ(HeapSize(), bytes);
    169   }
    170 
    171   uintptr_t HeapBegin() const {
    172     return heap_begin_;
    173   }
    174 
    175   // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
    176   uint64_t HeapLimit() const {
    177     return heap_limit_;
    178   }
    179 
    180   // Set the max address which can covered by the bitmap.
    181   void SetHeapLimit(uintptr_t new_end);
    182 
    183   std::string GetName() const {
    184     return name_;
    185   }
    186 
    187   void SetName(const std::string& name) {
    188     name_ = name;
    189   }
    190 
    191   std::string Dump() const;
    192 
    193   // Helper function for computing bitmap size based on a 64 bit capacity.
    194   static size_t ComputeBitmapSize(uint64_t capacity);
    195   static size_t ComputeHeapSize(uint64_t bitmap_bytes);
    196 
    197  private:
    198   // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
    199   // however, we document that this is expected on heap_end_
    200   SpaceBitmap(const std::string& name,
    201               MemMap* mem_map,
    202               uintptr_t* bitmap_begin,
    203               size_t bitmap_size,
    204               const void* heap_begin,
    205               size_t heap_capacity);
    206 
    207   template<bool kSetBit>
    208   bool Modify(const mirror::Object* obj);
    209 
    210   // Backing storage for bitmap.
    211   std::unique_ptr<MemMap> mem_map_;
    212 
    213   // This bitmap itself, word sized for efficiency in scanning.
    214   Atomic<uintptr_t>* const bitmap_begin_;
    215 
    216   // Size of this bitmap.
    217   size_t bitmap_size_;
    218 
    219   // The start address of the memory covered by the bitmap, which corresponds to the word
    220   // containing the first bit in the bitmap.
    221   const uintptr_t heap_begin_;
    222 
    223   // The end address of the memory covered by the bitmap. This may not be on a word boundary.
    224   uintptr_t heap_limit_;
    225 
    226   // Name of this bitmap.
    227   std::string name_;
    228 };
    229 
    230 typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
    231 typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
    232 
    233 template<size_t kAlignment>
    234 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
    235 
    236 }  // namespace accounting
    237 }  // namespace gc
    238 }  // namespace art
    239 
    240 #endif  // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
    241