Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
     19 
     20 #include <android-base/logging.h>
     21 
     22 #include "arena_allocator.h"
     23 #include "base/debug_stack.h"
     24 #include "base/globals.h"
     25 #include "base/macros.h"
     26 
     27 namespace art {
     28 
     29 class ArenaStack;
     30 class ScopedArenaAllocator;
     31 
     32 template <typename T>
     33 class ScopedArenaAllocatorAdapter;
     34 
     35 // Tag associated with each allocation to help prevent double free.
     36 enum class ArenaFreeTag : uint8_t {
     37   // Allocation is used and has not yet been destroyed.
     38   kUsed,
     39   // Allocation has been destroyed.
     40   kFree,
     41 };
     42 
     43 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
     44 // The memory is returned to the ArenaPool when the ArenaStack is destroyed.
     45 class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
     46  public:
     47   explicit ArenaStack(ArenaPool* arena_pool);
     48   ~ArenaStack();
     49 
     50   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
     51   using ArenaAllocatorMemoryTool::MakeDefined;
     52   using ArenaAllocatorMemoryTool::MakeUndefined;
     53   using ArenaAllocatorMemoryTool::MakeInaccessible;
     54 
     55   void Reset();
     56 
     57   size_t PeakBytesAllocated() {
     58     DebugStackRefCounter::CheckNoRefs();
     59     return PeakStats()->BytesAllocated();
     60   }
     61 
     62   MemStats GetPeakStats() const;
     63 
     64   // Return the arena tag associated with a pointer.
     65   static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
     66     DCHECK(kIsDebugBuild) << "Only debug builds have tags";
     67     return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
     68   }
     69 
     70   // The alignment guaranteed for individual allocations.
     71   static constexpr size_t kAlignment = 8u;
     72 
     73  private:
     74   struct Peak;
     75   struct Current;
     76   template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
     77   struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
     78     explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
     79     ArenaPool* const pool;
     80   };
     81 
     82   ArenaAllocatorStats* PeakStats() {
     83     return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
     84   }
     85 
     86   const ArenaAllocatorStats* PeakStats() const {
     87     return static_cast<const TaggedStats<Peak>*>(&stats_and_pool_);
     88   }
     89 
     90   ArenaAllocatorStats* CurrentStats() {
     91     return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
     92   }
     93 
     94   // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
     95   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
     96     if (UNLIKELY(IsRunningOnMemoryTool())) {
     97       return AllocWithMemoryTool(bytes, kind);
     98     }
     99     // Add kAlignment for the free or used tag. Required to preserve alignment.
    100     size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
    101     uint8_t* ptr = top_ptr_;
    102     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
    103       ptr = AllocateFromNextArena(rounded_bytes);
    104     }
    105     CurrentStats()->RecordAlloc(bytes, kind);
    106     top_ptr_ = ptr + rounded_bytes;
    107     if (kIsDebugBuild) {
    108       ptr += kAlignment;
    109       ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
    110     }
    111     return ptr;
    112   }
    113 
    114   uint8_t* AllocateFromNextArena(size_t rounded_bytes);
    115   void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
    116   void UpdateBytesAllocated();
    117   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
    118 
    119   StatsAndPool stats_and_pool_;
    120   Arena* bottom_arena_;
    121   Arena* top_arena_;
    122   uint8_t* top_ptr_;
    123   uint8_t* top_end_;
    124 
    125   friend class ScopedArenaAllocator;
    126   template <typename T>
    127   friend class ScopedArenaAllocatorAdapter;
    128 
    129   DISALLOW_COPY_AND_ASSIGN(ArenaStack);
    130 };
    131 
    132 // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
    133 //
    134 // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
    135 // objects and allows nesting multiple allocators. Only the top allocator can be used but
    136 // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
    137 // stack. This is facilitated by returning the memory to the ArenaStack.
    138 class ScopedArenaAllocator
    139     : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
    140  public:
    141   ScopedArenaAllocator(ScopedArenaAllocator&& other);
    142   explicit ScopedArenaAllocator(ArenaStack* arena_stack);
    143   ~ScopedArenaAllocator();
    144 
    145   ArenaStack* GetArenaStack() const {
    146     return arena_stack_;
    147   }
    148 
    149   void Reset();
    150 
    151   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    152     DebugStackReference::CheckTop();
    153     return arena_stack_->Alloc(bytes, kind);
    154   }
    155 
    156   template <typename T>
    157   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
    158     return AllocArray<T>(1, kind);
    159   }
    160 
    161   template <typename T>
    162   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    163     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    164   }
    165 
    166   // Get adapter for use in STL containers. See scoped_arena_containers.h .
    167   ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    168 
    169   // Allow a delete-expression to destroy but not deallocate allocators created by Create().
    170   static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
    171 
    172  private:
    173   ArenaStack* arena_stack_;
    174   Arena* mark_arena_;
    175   uint8_t* mark_ptr_;
    176   uint8_t* mark_end_;
    177 
    178   void DoReset();
    179 
    180   template <typename T>
    181   friend class ScopedArenaAllocatorAdapter;
    182 
    183   DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
    184 };
    185 
    186 }  // namespace art
    187 
    188 #endif  // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
    189