Home | History | Annotate | Download | only in accounting
      1 /*
      2  * Copyright (C) 2008 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "space_bitmap-inl.h"
     18 
     19 #include "android-base/stringprintf.h"
     20 
     21 #include "art_field-inl.h"
     22 #include "base/mem_map.h"
     23 #include "dex/dex_file-inl.h"
     24 #include "mirror/class-inl.h"
     25 #include "mirror/object-inl.h"
     26 #include "mirror/object_array.h"
     27 
     28 namespace art {
     29 namespace gc {
     30 namespace accounting {
     31 
     32 using android::base::StringPrintf;
     33 
     34 template<size_t kAlignment>
     35 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
     36   // Number of space (heap) bytes covered by one bitmap word.
     37   // (Word size in bytes = `sizeof(intptr_t)`, which is expected to be
     38   // 4 on a 32-bit architecture and 8 on a 64-bit one.)
     39   const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT;
     40   // Calculate the number of words required to cover a space (heap)
     41   // having a size of `capacity` bytes.
     42   return (RoundUp(capacity, kBytesCoveredPerWord) / kBytesCoveredPerWord) * sizeof(intptr_t);
     43 }
     44 
     45 template<size_t kAlignment>
     46 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) {
     47   return bitmap_bytes * kBitsPerByte * kAlignment;
     48 }
     49 
     50 template<size_t kAlignment>
     51 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
     52     const std::string& name, MemMap&& mem_map, uint8_t* heap_begin, size_t heap_capacity) {
     53   CHECK(mem_map.IsValid());
     54   uintptr_t* bitmap_begin = reinterpret_cast<uintptr_t*>(mem_map.Begin());
     55   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
     56   return new SpaceBitmap(
     57       name, std::move(mem_map), bitmap_begin, bitmap_size, heap_begin, heap_capacity);
     58 }
     59 
     60 template<size_t kAlignment>
     61 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name,
     62                                      MemMap&& mem_map,
     63                                      uintptr_t* bitmap_begin,
     64                                      size_t bitmap_size,
     65                                      const void* heap_begin,
     66                                      size_t heap_capacity)
     67     : mem_map_(std::move(mem_map)),
     68       bitmap_begin_(reinterpret_cast<Atomic<uintptr_t>*>(bitmap_begin)),
     69       bitmap_size_(bitmap_size),
     70       heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
     71       heap_limit_(reinterpret_cast<uintptr_t>(heap_begin) + heap_capacity),
     72       name_(name) {
     73   CHECK(bitmap_begin_ != nullptr);
     74   CHECK_NE(bitmap_size, 0U);
     75 }
     76 
     77 template<size_t kAlignment>
     78 SpaceBitmap<kAlignment>::~SpaceBitmap() {}
     79 
     80 template<size_t kAlignment>
     81 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
     82     const std::string& name, uint8_t* heap_begin, size_t heap_capacity) {
     83   // Round up since `heap_capacity` is not necessarily a multiple of `kAlignment * kBitsPerIntPtrT`
     84   // (we represent one word as an `intptr_t`).
     85   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
     86   std::string error_msg;
     87   MemMap mem_map = MemMap::MapAnonymous(name.c_str(),
     88                                         bitmap_size,
     89                                         PROT_READ | PROT_WRITE,
     90                                         /*low_4gb=*/ false,
     91                                         &error_msg);
     92   if (UNLIKELY(!mem_map.IsValid())) {
     93     LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
     94     return nullptr;
     95   }
     96   return CreateFromMemMap(name, std::move(mem_map), heap_begin, heap_capacity);
     97 }
     98 
     99 template<size_t kAlignment>
    100 void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
    101   DCHECK_ALIGNED(new_end, kBitsPerIntPtrT * kAlignment);
    102   size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t);
    103   if (new_size < bitmap_size_) {
    104     bitmap_size_ = new_size;
    105   }
    106   heap_limit_ = new_end;
    107   // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
    108   // should be marked.
    109 }
    110 
    111 template<size_t kAlignment>
    112 std::string SpaceBitmap<kAlignment>::Dump() const {
    113   return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
    114                       reinterpret_cast<void*>(HeapLimit()));
    115 }
    116 
    117 template<size_t kAlignment>
    118 void SpaceBitmap<kAlignment>::Clear() {
    119   if (bitmap_begin_ != nullptr) {
    120     mem_map_.MadviseDontNeedAndZero();
    121   }
    122 }
    123 
    124 template<size_t kAlignment>
    125 void SpaceBitmap<kAlignment>::ClearRange(const mirror::Object* begin, const mirror::Object* end) {
    126   uintptr_t begin_offset = reinterpret_cast<uintptr_t>(begin) - heap_begin_;
    127   uintptr_t end_offset = reinterpret_cast<uintptr_t>(end) - heap_begin_;
    128   // Align begin and end to bitmap word boundaries.
    129   while (begin_offset < end_offset && OffsetBitIndex(begin_offset) != 0) {
    130     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + begin_offset));
    131     begin_offset += kAlignment;
    132   }
    133   while (begin_offset < end_offset && OffsetBitIndex(end_offset) != 0) {
    134     end_offset -= kAlignment;
    135     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + end_offset));
    136   }
    137   // Bitmap word boundaries.
    138   const uintptr_t start_index = OffsetToIndex(begin_offset);
    139   const uintptr_t end_index = OffsetToIndex(end_offset);
    140   ZeroAndReleasePages(reinterpret_cast<uint8_t*>(&bitmap_begin_[start_index]),
    141                       (end_index - start_index) * sizeof(*bitmap_begin_));
    142 }
    143 
    144 template<size_t kAlignment>
    145 void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
    146   DCHECK_EQ(Size(), source_bitmap->Size());
    147   const size_t count = source_bitmap->Size() / sizeof(intptr_t);
    148   Atomic<uintptr_t>* const src = source_bitmap->Begin();
    149   Atomic<uintptr_t>* const dest = Begin();
    150   for (size_t i = 0; i < count; ++i) {
    151     dest[i].store(src[i].load(std::memory_order_relaxed), std::memory_order_relaxed);
    152   }
    153 }
    154 
    155 template<size_t kAlignment>
    156 void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
    157                                         const SpaceBitmap<kAlignment>& mark_bitmap,
    158                                         uintptr_t sweep_begin, uintptr_t sweep_end,
    159                                         SpaceBitmap::SweepCallback* callback, void* arg) {
    160   CHECK(live_bitmap.bitmap_begin_ != nullptr);
    161   CHECK(mark_bitmap.bitmap_begin_ != nullptr);
    162   CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
    163   CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
    164   CHECK(callback != nullptr);
    165   CHECK_LE(sweep_begin, sweep_end);
    166   CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
    167 
    168   if (sweep_end <= sweep_begin) {
    169     return;
    170   }
    171 
    172   size_t buffer_size = sizeof(intptr_t) * kBitsPerIntPtrT;
    173   Atomic<uintptr_t>* live = live_bitmap.bitmap_begin_;
    174   Atomic<uintptr_t>* mark = mark_bitmap.bitmap_begin_;
    175   const size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
    176   const size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
    177   CHECK_LT(end, live_bitmap.Size() / sizeof(intptr_t));
    178 
    179   if (Runtime::Current()->IsRunningOnMemoryTool()) {
    180     // For memory tool, make the buffer large enough to hold all allocations. This is done since
    181     // we get the size of objects (and hence read the class) inside of the freeing logic. This can
    182     // cause crashes for unloaded classes since the class may get zeroed out before it is read.
    183     // See b/131542326
    184     for (size_t i = start; i <= end; i++) {
    185       uintptr_t garbage =
    186           live[i].load(std::memory_order_relaxed) & ~mark[i].load(std::memory_order_relaxed);
    187       buffer_size += POPCOUNT(garbage);
    188     }
    189   }
    190   std::vector<mirror::Object*> pointer_buf(buffer_size);
    191   mirror::Object** cur_pointer = &pointer_buf[0];
    192   mirror::Object** pointer_end = cur_pointer + (buffer_size - kBitsPerIntPtrT);
    193 
    194   for (size_t i = start; i <= end; i++) {
    195     uintptr_t garbage =
    196         live[i].load(std::memory_order_relaxed) & ~mark[i].load(std::memory_order_relaxed);
    197     if (UNLIKELY(garbage != 0)) {
    198       uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
    199       do {
    200         const size_t shift = CTZ(garbage);
    201         garbage ^= (static_cast<uintptr_t>(1)) << shift;
    202         *cur_pointer++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
    203       } while (garbage != 0);
    204       // Make sure that there are always enough slots available for an
    205       // entire word of one bits.
    206       if (cur_pointer >= pointer_end) {
    207         (*callback)(cur_pointer - &pointer_buf[0], &pointer_buf[0], arg);
    208         cur_pointer  = &pointer_buf[0];
    209       }
    210     }
    211   }
    212   if (cur_pointer > &pointer_buf[0]) {
    213     (*callback)(cur_pointer - &pointer_buf[0], &pointer_buf[0], arg);
    214   }
    215 }
    216 
    217 template class SpaceBitmap<kObjectAlignment>;
    218 template class SpaceBitmap<kPageSize>;
    219 
    220 }  // namespace accounting
    221 }  // namespace gc
    222 }  // namespace art
    223