Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
     19 
     20 #include <stdint.h>
     21 #include <stddef.h>
     22 
     23 #include "base/bit_utils.h"
     24 #include "base/dchecked_vector.h"
     25 #include "base/memory_tool.h"
     26 #include "debug_stack.h"
     27 #include "macros.h"
     28 #include "mutex.h"
     29 
     30 namespace art {
     31 
     32 class Arena;
     33 class ArenaPool;
     34 class ArenaAllocator;
     35 class ArenaStack;
     36 class ScopedArenaAllocator;
     37 class MemStats;
     38 
     39 template <typename T>
     40 class ArenaAllocatorAdapter;
     41 
     42 static constexpr bool kArenaAllocatorCountAllocations = false;
     43 
     44 // Type of allocation for memory tuning.
     45 enum ArenaAllocKind {
     46   kArenaAllocMisc,
     47   kArenaAllocSwitchTable,
     48   kArenaAllocSlowPaths,
     49   kArenaAllocGrowableBitMap,
     50   kArenaAllocSTL,
     51   kArenaAllocGraphBuilder,
     52   kArenaAllocGraph,
     53   kArenaAllocBasicBlock,
     54   kArenaAllocBlockList,
     55   kArenaAllocReversePostOrder,
     56   kArenaAllocLinearOrder,
     57   kArenaAllocConstantsMap,
     58   kArenaAllocPredecessors,
     59   kArenaAllocSuccessors,
     60   kArenaAllocDominated,
     61   kArenaAllocInstruction,
     62   kArenaAllocInvokeInputs,
     63   kArenaAllocPhiInputs,
     64   kArenaAllocLoopInfo,
     65   kArenaAllocLoopInfoBackEdges,
     66   kArenaAllocTryCatchInfo,
     67   kArenaAllocUseListNode,
     68   kArenaAllocEnvironment,
     69   kArenaAllocEnvironmentVRegs,
     70   kArenaAllocEnvironmentLocations,
     71   kArenaAllocLocationSummary,
     72   kArenaAllocSsaBuilder,
     73   kArenaAllocMoveOperands,
     74   kArenaAllocCodeBuffer,
     75   kArenaAllocStackMaps,
     76   kArenaAllocOptimization,
     77   kArenaAllocGvn,
     78   kArenaAllocInductionVarAnalysis,
     79   kArenaAllocBoundsCheckElimination,
     80   kArenaAllocDCE,
     81   kArenaAllocLSE,
     82   kArenaAllocLICM,
     83   kArenaAllocLoopOptimization,
     84   kArenaAllocSsaLiveness,
     85   kArenaAllocSsaPhiElimination,
     86   kArenaAllocReferenceTypePropagation,
     87   kArenaAllocSideEffectsAnalysis,
     88   kArenaAllocRegisterAllocator,
     89   kArenaAllocRegisterAllocatorValidate,
     90   kArenaAllocStackMapStream,
     91   kArenaAllocVectorNode,
     92   kArenaAllocCodeGenerator,
     93   kArenaAllocAssembler,
     94   kArenaAllocParallelMoveResolver,
     95   kArenaAllocGraphChecker,
     96   kArenaAllocVerifier,
     97   kArenaAllocCallingConvention,
     98   kArenaAllocCHA,
     99   kArenaAllocScheduler,
    100   kArenaAllocProfile,
    101   kNumArenaAllocKinds
    102 };
    103 
    104 template <bool kCount>
    105 class ArenaAllocatorStatsImpl;
    106 
    107 template <>
    108 class ArenaAllocatorStatsImpl<false> {
    109  public:
    110   ArenaAllocatorStatsImpl() = default;
    111   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    112   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    113 
    114   void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
    115   void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
    116   size_t NumAllocations() const { return 0u; }
    117   size_t BytesAllocated() const { return 0u; }
    118   void Dump(std::ostream& os ATTRIBUTE_UNUSED,
    119             const Arena* first ATTRIBUTE_UNUSED,
    120             ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
    121 };
    122 
    123 template <bool kCount>
    124 class ArenaAllocatorStatsImpl {
    125  public:
    126   ArenaAllocatorStatsImpl();
    127   ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
    128   ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
    129 
    130   void Copy(const ArenaAllocatorStatsImpl& other);
    131   void RecordAlloc(size_t bytes, ArenaAllocKind kind);
    132   size_t NumAllocations() const;
    133   size_t BytesAllocated() const;
    134   void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
    135 
    136  private:
    137   size_t num_allocations_;
    138   dchecked_vector<size_t> alloc_stats_;  // Bytes used by various allocation kinds.
    139 
    140   static const char* const kAllocNames[];
    141 };
    142 
    143 typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
    144 
    145 template <bool kAvailable, bool kValgrind>
    146 class ArenaAllocatorMemoryToolCheckImpl {
    147   // This is the generic template but since there is a partial specialization
    148   // for kValgrind == false, this can be instantiated only for kValgrind == true.
    149   static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
    150   static_assert(kAvailable, "Valgrind implies memory tool availability.");
    151 
    152  public:
    153   ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
    154   bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
    155 
    156  private:
    157   const bool is_running_on_valgrind_;
    158 };
    159 
    160 template <bool kAvailable>
    161 class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
    162  public:
    163   ArenaAllocatorMemoryToolCheckImpl() { }
    164   bool IsRunningOnMemoryTool() { return kAvailable; }
    165 };
    166 
    167 typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
    168     ArenaAllocatorMemoryToolCheck;
    169 
    170 class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
    171  public:
    172   using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
    173 
    174   void MakeDefined(void* ptr, size_t size) {
    175     if (UNLIKELY(IsRunningOnMemoryTool())) {
    176       DoMakeDefined(ptr, size);
    177     }
    178   }
    179   void MakeUndefined(void* ptr, size_t size) {
    180     if (UNLIKELY(IsRunningOnMemoryTool())) {
    181       DoMakeUndefined(ptr, size);
    182     }
    183   }
    184   void MakeInaccessible(void* ptr, size_t size) {
    185     if (UNLIKELY(IsRunningOnMemoryTool())) {
    186       DoMakeInaccessible(ptr, size);
    187     }
    188   }
    189 
    190  private:
    191   void DoMakeDefined(void* ptr, size_t size);
    192   void DoMakeUndefined(void* ptr, size_t size);
    193   void DoMakeInaccessible(void* ptr, size_t size);
    194 };
    195 
    196 class Arena {
    197  public:
    198   static constexpr size_t kDefaultSize = 128 * KB;
    199   Arena();
    200   virtual ~Arena() { }
    201   // Reset is for pre-use and uses memset for performance.
    202   void Reset();
    203   // Release is used inbetween uses and uses madvise for memory usage.
    204   virtual void Release() { }
    205   uint8_t* Begin() {
    206     return memory_;
    207   }
    208 
    209   uint8_t* End() {
    210     return memory_ + size_;
    211   }
    212 
    213   size_t Size() const {
    214     return size_;
    215   }
    216 
    217   size_t RemainingSpace() const {
    218     return Size() - bytes_allocated_;
    219   }
    220 
    221   size_t GetBytesAllocated() const {
    222     return bytes_allocated_;
    223   }
    224 
    225   // Return true if ptr is contained in the arena.
    226   bool Contains(const void* ptr) const {
    227     return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
    228   }
    229 
    230  protected:
    231   size_t bytes_allocated_;
    232   uint8_t* memory_;
    233   size_t size_;
    234   Arena* next_;
    235   friend class ArenaPool;
    236   friend class ArenaAllocator;
    237   friend class ArenaStack;
    238   friend class ScopedArenaAllocator;
    239   template <bool kCount> friend class ArenaAllocatorStatsImpl;
    240 
    241   friend class ArenaAllocatorTest;
    242 
    243  private:
    244   DISALLOW_COPY_AND_ASSIGN(Arena);
    245 };
    246 
    247 class ArenaPool {
    248  public:
    249   explicit ArenaPool(bool use_malloc = true,
    250                      bool low_4gb = false,
    251                      const char* name = "LinearAlloc");
    252   ~ArenaPool();
    253   Arena* AllocArena(size_t size) REQUIRES(!lock_);
    254   void FreeArenaChain(Arena* first) REQUIRES(!lock_);
    255   size_t GetBytesAllocated() const REQUIRES(!lock_);
    256   void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
    257   void LockReclaimMemory() REQUIRES(!lock_);
    258   // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
    259   // use_malloc is false.
    260   void TrimMaps() REQUIRES(!lock_);
    261 
    262  private:
    263   const bool use_malloc_;
    264   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    265   Arena* free_arenas_ GUARDED_BY(lock_);
    266   const bool low_4gb_;
    267   const char* name_;
    268   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
    269 };
    270 
    271 // Fast single-threaded allocator for zero-initialized memory chunks.
    272 //
    273 // Memory is allocated from ArenaPool in large chunks and then rationed through
    274 // the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
    275 // is destroyed.
    276 class ArenaAllocator
    277     : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
    278  public:
    279   explicit ArenaAllocator(ArenaPool* pool);
    280   ~ArenaAllocator();
    281 
    282   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
    283   using ArenaAllocatorMemoryTool::MakeDefined;
    284   using ArenaAllocatorMemoryTool::MakeUndefined;
    285   using ArenaAllocatorMemoryTool::MakeInaccessible;
    286 
    287   // Get adapter for use in STL containers. See arena_containers.h .
    288   ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    289 
    290   // Returns zeroed memory.
    291   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    292     if (UNLIKELY(IsRunningOnMemoryTool())) {
    293       return AllocWithMemoryTool(bytes, kind);
    294     }
    295     bytes = RoundUp(bytes, kAlignment);
    296     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    297     if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
    298       return AllocFromNewArena(bytes);
    299     }
    300     uint8_t* ret = ptr_;
    301     DCHECK_ALIGNED(ret, kAlignment);
    302     ptr_ += bytes;
    303     return ret;
    304   }
    305 
    306   // Returns zeroed memory.
    307   void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    308     // It is an error to request 16-byte aligned allocation of unaligned size.
    309     DCHECK_ALIGNED(bytes, 16);
    310     if (UNLIKELY(IsRunningOnMemoryTool())) {
    311       return AllocWithMemoryToolAlign16(bytes, kind);
    312     }
    313     uintptr_t padding =
    314         ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
    315     ArenaAllocatorStats::RecordAlloc(bytes, kind);
    316     if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
    317       static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
    318       return AllocFromNewArena(bytes);
    319     }
    320     ptr_ += padding;
    321     uint8_t* ret = ptr_;
    322     DCHECK_ALIGNED(ret, 16);
    323     ptr_ += bytes;
    324     return ret;
    325   }
    326 
    327   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
    328   void* Realloc(void* ptr,
    329                 size_t ptr_size,
    330                 size_t new_size,
    331                 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    332     DCHECK_GE(new_size, ptr_size);
    333     DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
    334     // We always allocate aligned.
    335     const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
    336     auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
    337     // If we haven't allocated anything else, we can safely extend.
    338     if (end == ptr_) {
    339       DCHECK(!IsRunningOnMemoryTool());  // Red zone prevents end == ptr_.
    340       const size_t aligned_new_size = RoundUp(new_size, kAlignment);
    341       const size_t size_delta = aligned_new_size - aligned_ptr_size;
    342       // Check remain space.
    343       const size_t remain = end_ - ptr_;
    344       if (remain >= size_delta) {
    345         ptr_ += size_delta;
    346         ArenaAllocatorStats::RecordAlloc(size_delta, kind);
    347         DCHECK_ALIGNED(ptr_, kAlignment);
    348         return ptr;
    349       }
    350     }
    351     auto* new_ptr = Alloc(new_size, kind);  // Note: Alloc will take care of aligning new_size.
    352     memcpy(new_ptr, ptr, ptr_size);
    353     // TODO: Call free on ptr if linear alloc supports free.
    354     return new_ptr;
    355   }
    356 
    357   template <typename T>
    358   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
    359     return AllocArray<T>(1, kind);
    360   }
    361 
    362   template <typename T>
    363   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    364     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    365   }
    366 
    367   size_t BytesAllocated() const;
    368 
    369   MemStats GetMemStats() const;
    370 
    371   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
    372   // TODO: Change BytesAllocated to this behavior?
    373   size_t BytesUsed() const;
    374 
    375   ArenaPool* GetArenaPool() const {
    376     return pool_;
    377   }
    378 
    379   bool Contains(const void* ptr) const;
    380 
    381   // The alignment guaranteed for individual allocations.
    382   static constexpr size_t kAlignment = 8u;
    383 
    384   // The alignment required for the whole Arena rather than individual allocations.
    385   static constexpr size_t kArenaAlignment = 16u;
    386 
    387  private:
    388   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
    389   void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
    390   uint8_t* AllocFromNewArena(size_t bytes);
    391   uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
    392 
    393   void UpdateBytesAllocated();
    394 
    395   ArenaPool* pool_;
    396   uint8_t* begin_;
    397   uint8_t* end_;
    398   uint8_t* ptr_;
    399   Arena* arena_head_;
    400 
    401   template <typename U>
    402   friend class ArenaAllocatorAdapter;
    403 
    404   friend class ArenaAllocatorTest;
    405 
    406   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
    407 };  // ArenaAllocator
    408 
    409 class MemStats {
    410  public:
    411   MemStats(const char* name,
    412            const ArenaAllocatorStats* stats,
    413            const Arena* first_arena,
    414            ssize_t lost_bytes_adjustment = 0);
    415   void Dump(std::ostream& os) const;
    416 
    417  private:
    418   const char* const name_;
    419   const ArenaAllocatorStats* const stats_;
    420   const Arena* const first_arena_;
    421   const ssize_t lost_bytes_adjustment_;
    422 };  // MemStats
    423 
    424 }  // namespace art
    425 
    426 #endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
    427