Home | History | Annotate | Download | only in utils
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <algorithm>
     18 #include <numeric>
     19 
     20 #include "arena_allocator.h"
     21 #include "base/logging.h"
     22 #include "base/mutex.h"
     23 #include "thread-inl.h"
     24 #include <memcheck/memcheck.h>
     25 
     26 namespace art {
     27 
     28 // Memmap is a bit slower than malloc according to my measurements.
     29 static constexpr bool kUseMemMap = false;
     30 static constexpr bool kUseMemSet = true && kUseMemMap;
     31 static constexpr size_t kValgrindRedZoneBytes = 8;
     32 constexpr size_t Arena::kDefaultSize;
     33 
     34 template <bool kCount>
     35 const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
     36   "Misc       ",
     37   "BasicBlock ",
     38   "LIR        ",
     39   "LIR masks  ",
     40   "MIR        ",
     41   "DataFlow   ",
     42   "GrowList   ",
     43   "GrowBitMap ",
     44   "Dalvik2SSA ",
     45   "DebugInfo  ",
     46   "Successor  ",
     47   "RegAlloc   ",
     48   "Data       ",
     49   "Preds      ",
     50   "STL        ",
     51 };
     52 
     53 template <bool kCount>
     54 ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
     55     : num_allocations_(0u) {
     56   std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
     57 }
     58 
     59 template <bool kCount>
     60 void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
     61   num_allocations_ = other.num_allocations_;
     62   std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
     63 }
     64 
     65 template <bool kCount>
     66 void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
     67   alloc_stats_[kind] += bytes;
     68   ++num_allocations_;
     69 }
     70 
     71 template <bool kCount>
     72 size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
     73   return num_allocations_;
     74 }
     75 
     76 template <bool kCount>
     77 size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
     78   const size_t init = 0u;  // Initial value of the correct type.
     79   return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
     80 }
     81 
     82 template <bool kCount>
     83 void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
     84                                            ssize_t lost_bytes_adjustment) const {
     85   size_t malloc_bytes = 0u;
     86   size_t lost_bytes = 0u;
     87   size_t num_arenas = 0u;
     88   for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
     89     malloc_bytes += arena->Size();
     90     lost_bytes += arena->RemainingSpace();
     91     ++num_arenas;
     92   }
     93   // The lost_bytes_adjustment is used to make up for the fact that the current arena
     94   // may not have the bytes_allocated_ updated correctly.
     95   lost_bytes += lost_bytes_adjustment;
     96   const size_t bytes_allocated = BytesAllocated();
     97   os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
     98      << ", lost: " << lost_bytes << "\n";
     99   size_t num_allocations = NumAllocations();
    100   if (num_allocations != 0) {
    101     os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
    102        << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
    103   }
    104   os << "===== Allocation by kind\n";
    105   COMPILE_ASSERT(arraysize(kAllocNames) == kNumArenaAllocKinds, check_arraysize_kAllocNames);
    106   for (int i = 0; i < kNumArenaAllocKinds; i++) {
    107       os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
    108   }
    109 }
    110 
    111 // Explicitly instantiate the used implementation.
    112 template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
    113 
    114 Arena::Arena(size_t size)
    115     : bytes_allocated_(0),
    116       map_(nullptr),
    117       next_(nullptr) {
    118   if (kUseMemMap) {
    119     std::string error_msg;
    120     map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
    121                                 &error_msg);
    122     CHECK(map_ != nullptr) << error_msg;
    123     memory_ = map_->Begin();
    124     size_ = map_->Size();
    125   } else {
    126     memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
    127     size_ = size;
    128   }
    129 }
    130 
    131 Arena::~Arena() {
    132   if (kUseMemMap) {
    133     delete map_;
    134   } else {
    135     free(reinterpret_cast<void*>(memory_));
    136   }
    137 }
    138 
    139 void Arena::Reset() {
    140   if (bytes_allocated_) {
    141     if (kUseMemSet || !kUseMemMap) {
    142       memset(Begin(), 0, bytes_allocated_);
    143     } else {
    144       map_->MadviseDontNeedAndZero();
    145     }
    146     bytes_allocated_ = 0;
    147   }
    148 }
    149 
    150 ArenaPool::ArenaPool()
    151     : lock_("Arena pool lock"),
    152       free_arenas_(nullptr) {
    153 }
    154 
    155 ArenaPool::~ArenaPool() {
    156   while (free_arenas_ != nullptr) {
    157     auto* arena = free_arenas_;
    158     free_arenas_ = free_arenas_->next_;
    159     delete arena;
    160   }
    161 }
    162 
    163 Arena* ArenaPool::AllocArena(size_t size) {
    164   Thread* self = Thread::Current();
    165   Arena* ret = nullptr;
    166   {
    167     MutexLock lock(self, lock_);
    168     if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
    169       ret = free_arenas_;
    170       free_arenas_ = free_arenas_->next_;
    171     }
    172   }
    173   if (ret == nullptr) {
    174     ret = new Arena(size);
    175   }
    176   ret->Reset();
    177   return ret;
    178 }
    179 
    180 size_t ArenaPool::GetBytesAllocated() const {
    181   size_t total = 0;
    182   MutexLock lock(Thread::Current(), lock_);
    183   for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
    184     total += arena->GetBytesAllocated();
    185   }
    186   return total;
    187 }
    188 
    189 void ArenaPool::FreeArenaChain(Arena* first) {
    190   if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
    191     for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
    192       VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
    193     }
    194   }
    195   if (first != nullptr) {
    196     Arena* last = first;
    197     while (last->next_ != nullptr) {
    198       last = last->next_;
    199     }
    200     Thread* self = Thread::Current();
    201     MutexLock lock(self, lock_);
    202     last->next_ = free_arenas_;
    203     free_arenas_ = first;
    204   }
    205 }
    206 
    207 size_t ArenaAllocator::BytesAllocated() const {
    208   return ArenaAllocatorStats::BytesAllocated();
    209 }
    210 
    211 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
    212   : pool_(pool),
    213     begin_(nullptr),
    214     end_(nullptr),
    215     ptr_(nullptr),
    216     arena_head_(nullptr),
    217     running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
    218 }
    219 
    220 void ArenaAllocator::UpdateBytesAllocated() {
    221   if (arena_head_ != nullptr) {
    222     // Update how many bytes we have allocated into the arena so that the arena pool knows how
    223     // much memory to zero out.
    224     arena_head_->bytes_allocated_ = ptr_ - begin_;
    225   }
    226 }
    227 
    228 void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
    229   size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
    230   if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
    231     // Obtain a new block.
    232     ObtainNewArenaForAllocation(rounded_bytes);
    233     if (UNLIKELY(ptr_ == nullptr)) {
    234       return nullptr;
    235     }
    236   }
    237   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
    238   uint8_t* ret = ptr_;
    239   ptr_ += rounded_bytes;
    240   // Check that the memory is already zeroed out.
    241   for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
    242     CHECK_EQ(*ptr, 0U);
    243   }
    244   VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
    245   return ret;
    246 }
    247 
    248 ArenaAllocator::~ArenaAllocator() {
    249   // Reclaim all the arenas by giving them back to the thread pool.
    250   UpdateBytesAllocated();
    251   pool_->FreeArenaChain(arena_head_);
    252 }
    253 
    254 void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
    255   UpdateBytesAllocated();
    256   Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
    257   new_arena->next_ = arena_head_;
    258   arena_head_ = new_arena;
    259   // Update our internal data structures.
    260   ptr_ = begin_ = new_arena->Begin();
    261   end_ = new_arena->End();
    262 }
    263 
    264 MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
    265                    ssize_t lost_bytes_adjustment)
    266     : name_(name),
    267       stats_(stats),
    268       first_arena_(first_arena),
    269       lost_bytes_adjustment_(lost_bytes_adjustment) {
    270 }
    271 
    272 void MemStats::Dump(std::ostream& os) const {
    273   os << name_ << " stats:\n";
    274   stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
    275 }
    276 
    277 // Dump memory usage stats.
    278 MemStats ArenaAllocator::GetMemStats() const {
    279   ssize_t lost_bytes_adjustment =
    280       (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
    281   return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
    282 }
    283 
    284 }  // namespace art
    285