Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     19 
     20 #include <stdint.h>
     21 #include <stddef.h>
     22 
     23 #include "base/bit_utils.h"
     24 #include "base/dchecked_vector.h"
     25 #include "base/memory_tool.h"
     26 #include "debug_stack.h"
     27 #include "macros.h"
     28 #include "mutex.h"
     29 
     30 namespace art {
     31 
     32 class Arena;
     33 class ArenaPool;
     34 class ArenaAllocator;
     35 class ArenaStack;
     36 class ScopedArenaAllocator;
     37 class MemStats;
     38 
     39 template <typename T>
     40 class ArenaAllocatorAdapter;
     41 
     42 static constexpr bool kArenaAllocatorCountAllocations = false;
     43 
     44 // Type of allocation for memory tuning.
     45 enum ArenaAllocKind {
     46   kArenaAllocMisc,
     47   kArenaAllocSwitchTable,
     48   kArenaAllocSlowPaths,
     49   kArenaAllocGrowableBitMap,
     50   kArenaAllocSTL,
     51   kArenaAllocGraphBuilder,
     52   kArenaAllocGraph,
     53   kArenaAllocBasicBlock,
     54   kArenaAllocBlockList,
     55   kArenaAllocReversePostOrder,
     56   kArenaAllocLinearOrder,
     57   kArenaAllocConstantsMap,
     58   kArenaAllocPredecessors,
     59   kArenaAllocSuccessors,
     60   kArenaAllocDominated,
     61   kArenaAllocInstruction,
     62   kArenaAllocConstructorFenceInputs,
     63   kArenaAllocInvokeInputs,
     64   kArenaAllocPhiInputs,
     65   kArenaAllocLoopInfo,
     66   kArenaAllocLoopInfoBackEdges,
     67   kArenaAllocTryCatchInfo,
     68   kArenaAllocUseListNode,
     69   kArenaAllocEnvironment,
     70   kArenaAllocEnvironmentVRegs,
     71   kArenaAllocEnvironmentLocations,
     72   kArenaAllocLocationSummary,
     73   kArenaAllocSsaBuilder,
     74   kArenaAllocMoveOperands,
     75   kArenaAllocCodeBuffer,
     76   kArenaAllocStackMaps,
     77   kArenaAllocOptimization,
     78   kArenaAllocGvn,
     79   kArenaAllocInductionVarAnalysis,
     80   kArenaAllocBoundsCheckElimination,
     81   kArenaAllocDCE,
     82   kArenaAllocLSE,
     83   kArenaAllocLICM,
     84   kArenaAllocLoopOptimization,
     85   kArenaAllocSsaLiveness,
     86   kArenaAllocSsaPhiElimination,
     87   kArenaAllocReferenceTypePropagation,
     88   kArenaAllocSideEffectsAnalysis,
     89   kArenaAllocRegisterAllocator,
     90   kArenaAllocRegisterAllocatorValidate,
     91   kArenaAllocStackMapStream,
     92   kArenaAllocVectorNode,
     93   kArenaAllocCodeGenerator,
     94   kArenaAllocAssembler,
     95   kArenaAllocParallelMoveResolver,
     96   kArenaAllocGraphChecker,
     97   kArenaAllocVerifier,
     98   kArenaAllocCallingConvention,
     99   kArenaAllocCHA,
    100   kArenaAllocScheduler,
    101   kArenaAllocProfile,
    102   kNumArenaAllocKinds
    103 };
    104 
    105 template <bool kCount>
    106 class ArenaAllocatorStatsImpl;
    107 
    108 template <>
    109 class ArenaAllocatorStatsImpl<false> {
    110  public:
    111   ArenaAllocatorStatsImpl() = default;
    112   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    113   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    114 
    115   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
    116   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
    117   size_t NumAllocations() const { return 0u; }
    118   size_t BytesAllocated() const { return 0u; }
    119   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
    120             const Arena* first ATTRIBUTE_UNUSED,
    121             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
    122 };
    123 
    124 template <bool kCount>
    125 class ArenaAllocatorStatsImpl {
    126  public:
    127   ArenaAllocatorStatsImpl();
    128   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    129   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    130 
    131   void Copy(const ArenaAllocatorStatsImpl& other);
    132   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
    133   size_t NumAllocations() const;
    134   size_t BytesAllocated() const;
    135   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
    136 
    137  private:
    138   size_t num_allocations_;
    139   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
    140 
    141   static const char* const kAllocNames[];
    142 };
    143 
    144 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
    145 
    146 template <bool kAvailable, bool kValgrind>
    147 class ArenaAllocatorMemoryToolCheckImpl {
    148   // This is the generic template but since there is a partial specialization
    149   // for kValgrind == false, this can be instantiated only for kValgrind == true.
    150   static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
    151   static_assert(kAvailable, "Valgrind implies memory tool availability.");
    152 
    153  public:
    154   ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
    155   bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
    156 
    157  private:
    158   const bool is_running_on_valgrind_;
    159 };
    160 
    161 template <bool kAvailable>
    162 class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
    163  public:
    164   ArenaAllocatorMemoryToolCheckImpl() { }
    165   bool IsRunningOnMemoryTool() { return kAvailable; }
    166 };
    167 
    168 typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
    169     ArenaAllocatorMemoryToolCheck;
    170 
    171 class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
    172  public:
    173   using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
    174 
    175   void MakeDefined(void* ptr, size_t size) {
    176     if (UNLIKELY(IsRunningOnMemoryTool())) {
    177       DoMakeDefined(ptr, size);
    178     }
    179   }
    180   void MakeUndefined(void* ptr, size_t size) {
    181     if (UNLIKELY(IsRunningOnMemoryTool())) {
    182       DoMakeUndefined(ptr, size);
    183     }
    184   }
    185   void MakeInaccessible(void* ptr, size_t size) {
    186     if (UNLIKELY(IsRunningOnMemoryTool())) {
    187       DoMakeInaccessible(ptr, size);
    188     }
    189   }
    190 
    191  private:
    192   void DoMakeDefined(void* ptr, size_t size);
    193   void DoMakeUndefined(void* ptr, size_t size);
    194   void DoMakeInaccessible(void* ptr, size_t size);
    195 };
    196 
    197 class Arena {
    198  public:
    199   Arena();
    200   virtual ~Arena() { }
    201   // Reset is for pre-use and uses memset for performance.
    202   void Reset();
    203   // Release is used inbetween uses and uses madvise for memory usage.
    204   virtual void Release() { }
    205   uint8_t* Begin() {
    206     return memory_;
    207   }
    208 
    209   uint8_t* End() {
    210     return memory_ + size_;
    211   }
    212 
    213   size_t Size() const {
    214     return size_;
    215   }
    216 
    217   size_t RemainingSpace() const {
    218     return Size() - bytes_allocated_;
    219   }
    220 
    221   size_t GetBytesAllocated() const {
    222     return bytes_allocated_;
    223   }
    224 
    225   // Return true if ptr is contained in the arena.
    226   bool Contains(const void* ptr) const {
    227     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
    228   }
    229 
    230  protected:
    231   size_t bytes_allocated_;
    232   uint8_t* memory_;
    233   size_t size_;
    234   Arena* next_;
    235   friend class ArenaPool;
    236   friend class ArenaAllocator;
    237   friend class ArenaStack;
    238   friend class ScopedArenaAllocator;
    239   template <bool kCount> friend class ArenaAllocatorStatsImpl;
    240 
    241   friend class ArenaAllocatorTest;
    242 
    243  private:
    244   DISALLOW_COPY_AND_ASSIGN(Arena);
    245 };
    246 
    247 class ArenaPool {
    248  public:
    249   explicit ArenaPool(bool use_malloc = true,
    250                      bool low_4gb = false,
    251                      const char* name = "LinearAlloc");
    252   ~ArenaPool();
    253   Arena* AllocArena(size_t size) REQUIRES(!lock_);
    254   void FreeArenaChain(Arena* first) REQUIRES(!lock_);
    255   size_t GetBytesAllocated() const REQUIRES(!lock_);
    256   void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
    257   void LockReclaimMemory() REQUIRES(!lock_);
    258   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
    259   // use_malloc is false.
    260   void TrimMaps() REQUIRES(!lock_);
    261 
    262  private:
    263   const bool use_malloc_;
    264   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    265   Arena* free_arenas_ GUARDED_BY(lock_);
    266   const bool low_4gb_;
    267   const char* name_;
    268   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
    269 };
    270 
    271 // Fast single-threaded allocator for zero-initialized memory chunks.
    272 //
    273 // Memory is allocated from ArenaPool in large chunks and then rationed through
    274 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
    275 // is destroyed.
    276 class ArenaAllocator
    277     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
    278  public:
    279   explicit ArenaAllocator(ArenaPool* pool);
    280   ~ArenaAllocator();
    281 
    282   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
    283   using ArenaAllocatorMemoryTool::MakeDefined;
    284   using ArenaAllocatorMemoryTool::MakeUndefined;
    285   using ArenaAllocatorMemoryTool::MakeInaccessible;
    286 
    287   // Get adapter for use in STL containers. See arena_containers.h .
    288   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    289 
    290   // Returns zeroed memory.
    291   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    292     if (UNLIKELY(IsRunningOnMemoryTool())) {
    293       return AllocWithMemoryTool(bytes, kind);
    294     }
    295     bytes = RoundUp(bytes, kAlignment);
    296     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    297     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
    298       return AllocFromNewArena(bytes);
    299     }
    300     uint8_t* ret = ptr_;
    301     DCHECK_ALIGNED(ret, kAlignment);
    302     ptr_ += bytes;
    303     return ret;
    304   }
    305 
    306   // Returns zeroed memory.
    307   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    308     // It is an error to request 16-byte aligned allocation of unaligned size.
    309     DCHECK_ALIGNED(bytes, 16);
    310     if (UNLIKELY(IsRunningOnMemoryTool())) {
    311       return AllocWithMemoryToolAlign16(bytes, kind);
    312     }
    313     uintptr_t padding =
    314         ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
    315     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    316     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
    317       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
    318       return AllocFromNewArena(bytes);
    319     }
    320     ptr_ += padding;
    321     uint8_t* ret = ptr_;
    322     DCHECK_ALIGNED(ret, 16);
    323     ptr_ += bytes;
    324     return ret;
    325   }
    326 
    327   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
    328   void* Realloc(void* ptr,
    329                 size_t ptr_size,
    330                 size_t new_size,
    331                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    332     DCHECK_GE(new_size, ptr_size);
    333     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
    334     // We always allocate aligned.
    335     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
    336     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
    337     // If we haven't allocated anything else, we can safely extend.
    338     if (end == ptr_) {
    339       // Red zone prevents end == ptr_ (unless input = allocator state = null).
    340       DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
    341       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
    342       const size_t size_delta = aligned_new_size - aligned_ptr_size;
    343       // Check remain space.
    344       const size_t remain = end_ - ptr_;
    345       if (remain >= size_delta) {
    346         ptr_ += size_delta;
    347         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
    348         DCHECK_ALIGNED(ptr_, kAlignment);
    349         return ptr;
    350       }
    351     }
    352     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
    353     memcpy(new_ptr, ptr, ptr_size);
    354     // TODO: Call free on ptr if linear alloc supports free.
    355     return new_ptr;
    356   }
    357 
    358   template <typename T>
    359   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
    360     return AllocArray<T>(1, kind);
    361   }
    362 
    363   template <typename T>
    364   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    365     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    366   }
    367 
    368   size_t BytesAllocated() const;
    369 
    370   MemStats GetMemStats() const;
    371 
    372   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
    373   // TODO: Change BytesAllocated to this behavior?
    374   size_t BytesUsed() const;
    375 
    376   ArenaPool* GetArenaPool() const {
    377     return pool_;
    378   }
    379 
    380   bool Contains(const void* ptr) const;
    381 
    382   // The alignment guaranteed for individual allocations.
    383   static constexpr size_t kAlignment = 8u;
    384 
    385   // The alignment required for the whole Arena rather than individual allocations.
    386   static constexpr size_t kArenaAlignment = 16u;
    387 
    388  private:
    389   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
    390   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
    391   uint8_t* AllocFromNewArena(size_t bytes);
    392   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
    393 
    394   void UpdateBytesAllocated();
    395 
    396   ArenaPool* pool_;
    397   uint8_t* begin_;
    398   uint8_t* end_;
    399   uint8_t* ptr_;
    400   Arena* arena_head_;
    401 
    402   template <typename U>
    403   friend class ArenaAllocatorAdapter;
    404 
    405   friend class ArenaAllocatorTest;
    406 
    407   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
    408 };  // ArenaAllocator
    409 
    410 class MemStats {
    411  public:
    412   MemStats(const char* name,
    413            const ArenaAllocatorStats* stats,
    414            const Arena* first_arena,
    415            ssize_t lost_bytes_adjustment = 0);
    416   void Dump(std::ostream& os) const;
    417 
    418  private:
    419   const char* const name_;
    420   const ArenaAllocatorStats* const stats_;
    421   const Arena* const first_arena_;
    422   const ssize_t lost_bytes_adjustment_;
    423 };  // MemStats
    424 
    425 }  // namespace art
    426 
    427 #endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
    428