Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     19 
     20 #include <stdint.h>
     21 #include <stddef.h>
     22 
     23 #include "base/bit_utils.h"
     24 #include "debug_stack.h"
     25 #include "macros.h"
     26 #include "mutex.h"
     27 
     28 namespace art {
     29 
     30 class Arena;
     31 class ArenaPool;
     32 class ArenaAllocator;
     33 class ArenaStack;
     34 class ScopedArenaAllocator;
     35 class MemMap;
     36 class MemStats;
     37 
     38 template <typename T>
     39 class ArenaAllocatorAdapter;
     40 
     41 static constexpr bool kArenaAllocatorCountAllocations = false;
     42 
     43 // Type of allocation for memory tuning.
     44 enum ArenaAllocKind {
     45   kArenaAllocMisc,
     46   kArenaAllocBB,
     47   kArenaAllocBBList,
     48   kArenaAllocBBPredecessors,
     49   kArenaAllocDfsPreOrder,
     50   kArenaAllocDfsPostOrder,
     51   kArenaAllocDomPostOrder,
     52   kArenaAllocTopologicalSortOrder,
     53   kArenaAllocLoweringInfo,
     54   kArenaAllocLIR,
     55   kArenaAllocLIRResourceMask,
     56   kArenaAllocSwitchTable,
     57   kArenaAllocFillArrayData,
     58   kArenaAllocSlowPaths,
     59   kArenaAllocMIR,
     60   kArenaAllocDFInfo,
     61   kArenaAllocGrowableArray,
     62   kArenaAllocGrowableBitMap,
     63   kArenaAllocSSAToDalvikMap,
     64   kArenaAllocDalvikToSSAMap,
     65   kArenaAllocDebugInfo,
     66   kArenaAllocSuccessor,
     67   kArenaAllocRegAlloc,
     68   kArenaAllocData,
     69   kArenaAllocPredecessors,
     70   kArenaAllocSTL,
     71   kNumArenaAllocKinds
     72 };
     73 
     74 template <bool kCount>
     75 class ArenaAllocatorStatsImpl;
     76 
     77 template <>
     78 class ArenaAllocatorStatsImpl<false> {
     79  public:
     80   ArenaAllocatorStatsImpl() = default;
     81   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
     82   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
     83 
     84   void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
     85   void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
     86   size_t NumAllocations() const { return 0u; }
     87   size_t BytesAllocated() const { return 0u; }
     88   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
     89     UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
     90   }
     91 };
     92 
     93 template <bool kCount>
     94 class ArenaAllocatorStatsImpl {
     95  public:
     96   ArenaAllocatorStatsImpl();
     97   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
     98   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
     99 
    100   void Copy(const ArenaAllocatorStatsImpl& other);
    101   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
    102   size_t NumAllocations() const;
    103   size_t BytesAllocated() const;
    104   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
    105 
    106  private:
    107   size_t num_allocations_;
    108   // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
    109   size_t alloc_stats_[kNumArenaAllocKinds];  // Bytes used by various allocation kinds.
    110 
    111   static const char* const kAllocNames[];
    112 };
    113 
    114 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
    115 
    116 class Arena {
    117  public:
    118   static constexpr size_t kDefaultSize = 128 * KB;
    119   Arena();
    120   virtual ~Arena() { }
    121   // Reset is for pre-use and uses memset for performance.
    122   void Reset();
    123   // Release is used inbetween uses and uses madvise for memory usage.
    124   virtual void Release() { }
    125   uint8_t* Begin() {
    126     return memory_;
    127   }
    128 
    129   uint8_t* End() {
    130     return memory_ + size_;
    131   }
    132 
    133   size_t Size() const {
    134     return size_;
    135   }
    136 
    137   size_t RemainingSpace() const {
    138     return Size() - bytes_allocated_;
    139   }
    140 
    141   size_t GetBytesAllocated() const {
    142     return bytes_allocated_;
    143   }
    144 
    145   // Return true if ptr is contained in the arena.
    146   bool Contains(const void* ptr) const {
    147     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
    148   }
    149 
    150  protected:
    151   size_t bytes_allocated_;
    152   uint8_t* memory_;
    153   size_t size_;
    154   Arena* next_;
    155   friend class ArenaPool;
    156   friend class ArenaAllocator;
    157   friend class ArenaStack;
    158   friend class ScopedArenaAllocator;
    159   template <bool kCount> friend class ArenaAllocatorStatsImpl;
    160 
    161  private:
    162   DISALLOW_COPY_AND_ASSIGN(Arena);
    163 };
    164 
    165 class MallocArena FINAL : public Arena {
    166  public:
    167   explicit MallocArena(size_t size = Arena::kDefaultSize);
    168   virtual ~MallocArena();
    169 };
    170 
    171 class MemMapArena FINAL : public Arena {
    172  public:
    173   explicit MemMapArena(size_t size, bool low_4gb);
    174   virtual ~MemMapArena();
    175   void Release() OVERRIDE;
    176 
    177  private:
    178   std::unique_ptr<MemMap> map_;
    179 };
    180 
    181 class ArenaPool {
    182  public:
    183   explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
    184   ~ArenaPool();
    185   Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
    186   void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
    187   size_t GetBytesAllocated() const LOCKS_EXCLUDED(lock_);
    188   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
    189   // use_malloc is false.
    190   void TrimMaps() LOCKS_EXCLUDED(lock_);
    191 
    192  private:
    193   const bool use_malloc_;
    194   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    195   Arena* free_arenas_ GUARDED_BY(lock_);
    196   const bool low_4gb_;
    197   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
    198 };
    199 
    200 class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
    201  public:
    202   explicit ArenaAllocator(ArenaPool* pool);
    203   ~ArenaAllocator();
    204 
    205   // Get adapter for use in STL containers. See arena_containers.h .
    206   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    207 
    208   // Returns zeroed memory.
    209   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    210     if (UNLIKELY(running_on_valgrind_)) {
    211       return AllocValgrind(bytes, kind);
    212     }
    213     bytes = RoundUp(bytes, kAlignment);
    214     if (UNLIKELY(ptr_ + bytes > end_)) {
    215       // Obtain a new block.
    216       ObtainNewArenaForAllocation(bytes);
    217       if (UNLIKELY(ptr_ == nullptr)) {
    218         return nullptr;
    219       }
    220     }
    221     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    222     uint8_t* ret = ptr_;
    223     ptr_ += bytes;
    224     return ret;
    225   }
    226 
    227   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
    228   void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
    229                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    230     DCHECK_GE(new_size, ptr_size);
    231     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
    232     auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
    233     // If we haven't allocated anything else, we can safely extend.
    234     if (end == ptr_) {
    235       const size_t size_delta = new_size - ptr_size;
    236       // Check remain space.
    237       const size_t remain = end_ - ptr_;
    238       if (remain >= size_delta) {
    239         ptr_ += size_delta;
    240         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
    241         return ptr;
    242       }
    243     }
    244     auto* new_ptr = Alloc(new_size, kind);
    245     memcpy(new_ptr, ptr, ptr_size);
    246     // TODO: Call free on ptr if linear alloc supports free.
    247     return new_ptr;
    248   }
    249 
    250   template <typename T>
    251   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    252     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    253   }
    254 
    255   void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
    256 
    257   void ObtainNewArenaForAllocation(size_t allocation_size);
    258 
    259   size_t BytesAllocated() const;
    260 
    261   MemStats GetMemStats() const;
    262 
    263   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
    264   // TODO: Change BytesAllocated to this behavior?
    265   size_t BytesUsed() const;
    266 
    267   ArenaPool* GetArenaPool() const {
    268     return pool_;
    269   }
    270 
    271   bool Contains(const void* ptr) const;
    272 
    273  private:
    274   static constexpr size_t kAlignment = 8;
    275 
    276   void UpdateBytesAllocated();
    277 
    278   ArenaPool* pool_;
    279   uint8_t* begin_;
    280   uint8_t* end_;
    281   uint8_t* ptr_;
    282   Arena* arena_head_;
    283   bool running_on_valgrind_;
    284 
    285   template <typename U>
    286   friend class ArenaAllocatorAdapter;
    287 
    288   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
    289 };  // ArenaAllocator
    290 
    291 class MemStats {
    292  public:
    293   MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
    294            ssize_t lost_bytes_adjustment = 0);
    295   void Dump(std::ostream& os) const;
    296 
    297  private:
    298   const char* const name_;
    299   const ArenaAllocatorStats* const stats_;
    300   const Arena* const first_arena_;
    301   const ssize_t lost_bytes_adjustment_;
    302 };  // MemStats
    303 
    304 }  // namespace art
    305 
    306 #endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
    307