Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     19 
     20 #include <stddef.h>
     21 #include <stdint.h>
     22 
     23 #include "base/bit_utils.h"
     24 #include "base/debug_stack.h"
     25 #include "base/dchecked_vector.h"
     26 #include "base/macros.h"
     27 #include "base/memory_tool.h"
     28 #include "mutex.h"
     29 
     30 namespace art {
     31 
     32 class Arena;
     33 class ArenaPool;
     34 class ArenaAllocator;
     35 class ArenaStack;
     36 class ScopedArenaAllocator;
     37 class MemStats;
     38 
     39 template <typename T>
     40 class ArenaAllocatorAdapter;
     41 
     42 static constexpr bool kArenaAllocatorCountAllocations = false;
     43 
     44 // Type of allocation for memory tuning.
     45 enum ArenaAllocKind {
     46   kArenaAllocMisc,
     47   kArenaAllocSwitchTable,
     48   kArenaAllocSlowPaths,
     49   kArenaAllocGrowableBitMap,
     50   kArenaAllocSTL,
     51   kArenaAllocGraphBuilder,
     52   kArenaAllocGraph,
     53   kArenaAllocBasicBlock,
     54   kArenaAllocBlockList,
     55   kArenaAllocReversePostOrder,
     56   kArenaAllocLinearOrder,
     57   kArenaAllocConstantsMap,
     58   kArenaAllocPredecessors,
     59   kArenaAllocSuccessors,
     60   kArenaAllocDominated,
     61   kArenaAllocInstruction,
     62   kArenaAllocConstructorFenceInputs,
     63   kArenaAllocInvokeInputs,
     64   kArenaAllocPhiInputs,
     65   kArenaAllocLoopInfo,
     66   kArenaAllocLoopInfoBackEdges,
     67   kArenaAllocTryCatchInfo,
     68   kArenaAllocUseListNode,
     69   kArenaAllocEnvironment,
     70   kArenaAllocEnvironmentVRegs,
     71   kArenaAllocEnvironmentLocations,
     72   kArenaAllocLocationSummary,
     73   kArenaAllocSsaBuilder,
     74   kArenaAllocMoveOperands,
     75   kArenaAllocCodeBuffer,
     76   kArenaAllocStackMaps,
     77   kArenaAllocOptimization,
     78   kArenaAllocGvn,
     79   kArenaAllocInductionVarAnalysis,
     80   kArenaAllocBoundsCheckElimination,
     81   kArenaAllocDCE,
     82   kArenaAllocLSA,
     83   kArenaAllocLSE,
     84   kArenaAllocCFRE,
     85   kArenaAllocLICM,
     86   kArenaAllocLoopOptimization,
     87   kArenaAllocSsaLiveness,
     88   kArenaAllocSsaPhiElimination,
     89   kArenaAllocReferenceTypePropagation,
     90   kArenaAllocSideEffectsAnalysis,
     91   kArenaAllocRegisterAllocator,
     92   kArenaAllocRegisterAllocatorValidate,
     93   kArenaAllocStackMapStream,
     94   kArenaAllocVectorNode,
     95   kArenaAllocCodeGenerator,
     96   kArenaAllocAssembler,
     97   kArenaAllocParallelMoveResolver,
     98   kArenaAllocGraphChecker,
     99   kArenaAllocVerifier,
    100   kArenaAllocCallingConvention,
    101   kArenaAllocCHA,
    102   kArenaAllocScheduler,
    103   kArenaAllocProfile,
    104   kArenaAllocSuperblockCloner,
    105   kNumArenaAllocKinds
    106 };
    107 
    108 template <bool kCount>
    109 class ArenaAllocatorStatsImpl;
    110 
    111 template <>
    112 class ArenaAllocatorStatsImpl<false> {
    113  public:
    114   ArenaAllocatorStatsImpl() = default;
    115   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    116   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    117 
    118   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
    119   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
    120   size_t NumAllocations() const { return 0u; }
    121   size_t BytesAllocated() const { return 0u; }
    122   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
    123             const Arena* first ATTRIBUTE_UNUSED,
    124             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
    125 };
    126 
    127 template <bool kCount>
    128 class ArenaAllocatorStatsImpl {
    129  public:
    130   ArenaAllocatorStatsImpl();
    131   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    132   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    133 
    134   void Copy(const ArenaAllocatorStatsImpl& other);
    135   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
    136   size_t NumAllocations() const;
    137   size_t BytesAllocated() const;
    138   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
    139 
    140  private:
    141   size_t num_allocations_;
    142   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
    143 
    144   static const char* const kAllocNames[];
    145 };
    146 
    147 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
    148 
    149 template <bool kAvailable, bool kValgrind>
    150 class ArenaAllocatorMemoryToolCheckImpl {
    151   // This is the generic template but since there is a partial specialization
    152   // for kValgrind == false, this can be instantiated only for kValgrind == true.
    153   static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
    154   static_assert(kAvailable, "Valgrind implies memory tool availability.");
    155 
    156  public:
    157   ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
    158   bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
    159 
    160  private:
    161   const bool is_running_on_valgrind_;
    162 };
    163 
    164 template <bool kAvailable>
    165 class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
    166  public:
    167   ArenaAllocatorMemoryToolCheckImpl() { }
    168   bool IsRunningOnMemoryTool() { return kAvailable; }
    169 };
    170 
    171 typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
    172     ArenaAllocatorMemoryToolCheck;
    173 
    174 class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
    175  public:
    176   using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
    177 
    178   void MakeDefined(void* ptr, size_t size) {
    179     if (UNLIKELY(IsRunningOnMemoryTool())) {
    180       DoMakeDefined(ptr, size);
    181     }
    182   }
    183   void MakeUndefined(void* ptr, size_t size) {
    184     if (UNLIKELY(IsRunningOnMemoryTool())) {
    185       DoMakeUndefined(ptr, size);
    186     }
    187   }
    188   void MakeInaccessible(void* ptr, size_t size) {
    189     if (UNLIKELY(IsRunningOnMemoryTool())) {
    190       DoMakeInaccessible(ptr, size);
    191     }
    192   }
    193 
    194  private:
    195   void DoMakeDefined(void* ptr, size_t size);
    196   void DoMakeUndefined(void* ptr, size_t size);
    197   void DoMakeInaccessible(void* ptr, size_t size);
    198 };
    199 
    200 class Arena {
    201  public:
    202   Arena();
    203   virtual ~Arena() { }
    204   // Reset is for pre-use and uses memset for performance.
    205   void Reset();
    206   // Release is used inbetween uses and uses madvise for memory usage.
    207   virtual void Release() { }
    208   uint8_t* Begin() {
    209     return memory_;
    210   }
    211 
    212   uint8_t* End() {
    213     return memory_ + size_;
    214   }
    215 
    216   size_t Size() const {
    217     return size_;
    218   }
    219 
    220   size_t RemainingSpace() const {
    221     return Size() - bytes_allocated_;
    222   }
    223 
    224   size_t GetBytesAllocated() const {
    225     return bytes_allocated_;
    226   }
    227 
    228   // Return true if ptr is contained in the arena.
    229   bool Contains(const void* ptr) const {
    230     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
    231   }
    232 
    233  protected:
    234   size_t bytes_allocated_;
    235   uint8_t* memory_;
    236   size_t size_;
    237   Arena* next_;
    238   friend class ArenaPool;
    239   friend class ArenaAllocator;
    240   friend class ArenaStack;
    241   friend class ScopedArenaAllocator;
    242   template <bool kCount> friend class ArenaAllocatorStatsImpl;
    243 
    244   friend class ArenaAllocatorTest;
    245 
    246  private:
    247   DISALLOW_COPY_AND_ASSIGN(Arena);
    248 };
    249 
    250 class ArenaPool {
    251  public:
    252   explicit ArenaPool(bool use_malloc = true,
    253                      bool low_4gb = false,
    254                      const char* name = "LinearAlloc");
    255   ~ArenaPool();
    256   Arena* AllocArena(size_t size) REQUIRES(!lock_);
    257   void FreeArenaChain(Arena* first) REQUIRES(!lock_);
    258   size_t GetBytesAllocated() const REQUIRES(!lock_);
    259   void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
    260   void LockReclaimMemory() REQUIRES(!lock_);
    261   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
    262   // use_malloc is false.
    263   void TrimMaps() REQUIRES(!lock_);
    264 
    265  private:
    266   const bool use_malloc_;
    267   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    268   Arena* free_arenas_ GUARDED_BY(lock_);
    269   const bool low_4gb_;
    270   const char* name_;
    271   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
    272 };
    273 
    274 // Fast single-threaded allocator for zero-initialized memory chunks.
    275 //
    276 // Memory is allocated from ArenaPool in large chunks and then rationed through
    277 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
    278 // is destroyed.
    279 class ArenaAllocator
    280     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
    281  public:
    282   explicit ArenaAllocator(ArenaPool* pool);
    283   ~ArenaAllocator();
    284 
    285   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
    286   using ArenaAllocatorMemoryTool::MakeDefined;
    287   using ArenaAllocatorMemoryTool::MakeUndefined;
    288   using ArenaAllocatorMemoryTool::MakeInaccessible;
    289 
    290   // Get adapter for use in STL containers. See arena_containers.h .
    291   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    292 
    293   // Returns zeroed memory.
    294   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    295     if (UNLIKELY(IsRunningOnMemoryTool())) {
    296       return AllocWithMemoryTool(bytes, kind);
    297     }
    298     bytes = RoundUp(bytes, kAlignment);
    299     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    300     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
    301       return AllocFromNewArena(bytes);
    302     }
    303     uint8_t* ret = ptr_;
    304     DCHECK_ALIGNED(ret, kAlignment);
    305     ptr_ += bytes;
    306     return ret;
    307   }
    308 
    309   // Returns zeroed memory.
    310   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    311     // It is an error to request 16-byte aligned allocation of unaligned size.
    312     DCHECK_ALIGNED(bytes, 16);
    313     if (UNLIKELY(IsRunningOnMemoryTool())) {
    314       return AllocWithMemoryToolAlign16(bytes, kind);
    315     }
    316     uintptr_t padding =
    317         ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
    318     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    319     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
    320       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
    321       return AllocFromNewArena(bytes);
    322     }
    323     ptr_ += padding;
    324     uint8_t* ret = ptr_;
    325     DCHECK_ALIGNED(ret, 16);
    326     ptr_ += bytes;
    327     return ret;
    328   }
    329 
    330   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
    331   void* Realloc(void* ptr,
    332                 size_t ptr_size,
    333                 size_t new_size,
    334                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    335     DCHECK_GE(new_size, ptr_size);
    336     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
    337     // We always allocate aligned.
    338     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
    339     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
    340     // If we haven't allocated anything else, we can safely extend.
    341     if (end == ptr_) {
    342       // Red zone prevents end == ptr_ (unless input = allocator state = null).
    343       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
    344       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
    345       const size_t size_delta = aligned_new_size - aligned_ptr_size;
    346       // Check remain space.
    347       const size_t remain = end_ - ptr_;
    348       if (remain >= size_delta) {
    349         ptr_ += size_delta;
    350         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
    351         DCHECK_ALIGNED(ptr_, kAlignment);
    352         return ptr;
    353       }
    354     }
    355     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
    356     memcpy(new_ptr, ptr, ptr_size);
    357     // TODO: Call free on ptr if linear alloc supports free.
    358     return new_ptr;
    359   }
    360 
    361   template <typename T>
    362   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
    363     return AllocArray<T>(1, kind);
    364   }
    365 
    366   template <typename T>
    367   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    368     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    369   }
    370 
    371   size_t BytesAllocated() const;
    372 
    373   MemStats GetMemStats() const;
    374 
    375   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
    376   // TODO: Change BytesAllocated to this behavior?
    377   size_t BytesUsed() const;
    378 
    379   ArenaPool* GetArenaPool() const {
    380     return pool_;
    381   }
    382 
    383   bool Contains(const void* ptr) const;
    384 
    385   // The alignment guaranteed for individual allocations.
    386   static constexpr size_t kAlignment = 8u;
    387 
    388   // The alignment required for the whole Arena rather than individual allocations.
    389   static constexpr size_t kArenaAlignment = 16u;
    390 
    391  private:
    392   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
    393   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
    394   uint8_t* AllocFromNewArena(size_t bytes);
    395   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
    396 
    397   void UpdateBytesAllocated();
    398 
    399   ArenaPool* pool_;
    400   uint8_t* begin_;
    401   uint8_t* end_;
    402   uint8_t* ptr_;
    403   Arena* arena_head_;
    404 
    405   template <typename U>
    406   friend class ArenaAllocatorAdapter;
    407 
    408   friend class ArenaAllocatorTest;
    409 
    410   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
    411 };  // ArenaAllocator
    412 
    413 class MemStats {
    414  public:
    415   MemStats(const char* name,
    416            const ArenaAllocatorStats* stats,
    417            const Arena* first_arena,
    418            ssize_t lost_bytes_adjustment = 0);
    419   void Dump(std::ostream& os) const;
    420 
    421  private:
    422   const char* const name_;
    423   const ArenaAllocatorStats* const stats_;
    424   const Arena* const first_arena_;
    425   const ssize_t lost_bytes_adjustment_;
    426 };  // MemStats
    427 
    428 }  // namespace art
    429 
    430 #endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
    431