Home | History | Annotate | Download | only in base
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
     18 #define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
     19 
     20 #include "arena_allocator.h"
     21 #include "debug_stack.h"
     22 #include "globals.h"
     23 #include "logging.h"
     24 #include "macros.h"
     25 
     26 namespace art {
     27 
     28 class ArenaStack;
     29 class ScopedArenaAllocator;
     30 
     31 template <typename T>
     32 class ScopedArenaAllocatorAdapter;
     33 
     34 // Tag associated with each allocation to help prevent double free.
     35 enum class ArenaFreeTag : uint8_t {
     36   // Allocation is used and has not yet been destroyed.
     37   kUsed,
     38   // Allocation has been destroyed.
     39   kFree,
     40 };
     41 
     42 static constexpr size_t kArenaAlignment = 8;
     43 
     44 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
     45 // The memory is returned to the ArenaPool when the ArenaStack is destroyed.
     46 class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
     47  public:
     48   explicit ArenaStack(ArenaPool* arena_pool);
     49   ~ArenaStack();
     50 
     51   using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
     52   using ArenaAllocatorMemoryTool::MakeDefined;
     53   using ArenaAllocatorMemoryTool::MakeUndefined;
     54   using ArenaAllocatorMemoryTool::MakeInaccessible;
     55 
     56   void Reset();
     57 
     58   size_t PeakBytesAllocated() {
     59     return PeakStats()->BytesAllocated();
     60   }
     61 
     62   MemStats GetPeakStats() const;
     63 
     64   // Return the arena tag associated with a pointer.
     65   static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
     66     DCHECK(kIsDebugBuild) << "Only debug builds have tags";
     67     return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
     68   }
     69 
     70  private:
     71   struct Peak;
     72   struct Current;
     73   template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
     74   struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
     75     explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
     76     ArenaPool* const pool;
     77   };
     78 
     79   ArenaAllocatorStats* PeakStats() {
     80     return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
     81   }
     82 
     83   ArenaAllocatorStats* CurrentStats() {
     84     return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
     85   }
     86 
     87   // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
     88   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
     89     if (UNLIKELY(IsRunningOnMemoryTool())) {
     90       return AllocWithMemoryTool(bytes, kind);
     91     }
     92     // Add kArenaAlignment for the free or used tag. Required to preserve alignment.
     93     size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kArenaAlignment : 0u), kArenaAlignment);
     94     uint8_t* ptr = top_ptr_;
     95     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
     96       ptr = AllocateFromNextArena(rounded_bytes);
     97     }
     98     CurrentStats()->RecordAlloc(bytes, kind);
     99     top_ptr_ = ptr + rounded_bytes;
    100     if (kIsDebugBuild) {
    101       ptr += kArenaAlignment;
    102       ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
    103     }
    104     return ptr;
    105   }
    106 
    107   uint8_t* AllocateFromNextArena(size_t rounded_bytes);
    108   void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
    109   void UpdateBytesAllocated();
    110   void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
    111 
    112   StatsAndPool stats_and_pool_;
    113   Arena* bottom_arena_;
    114   Arena* top_arena_;
    115   uint8_t* top_ptr_;
    116   uint8_t* top_end_;
    117 
    118   friend class ScopedArenaAllocator;
    119   template <typename T>
    120   friend class ScopedArenaAllocatorAdapter;
    121 
    122   DISALLOW_COPY_AND_ASSIGN(ArenaStack);
    123 };
    124 
    125 // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
    126 //
    127 // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
    128 // objects and allows nesting multiple allocators. Only the top allocator can be used but
    129 // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
    130 // stack. This is facilitated by returning the memory to the ArenaStack.
    131 class ScopedArenaAllocator
    132     : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
    133  public:
    134   // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
    135   // the allocator is not exactly a C++ block scope. For example, an optimization
    136   // pass can create the scoped allocator in Start() and destroy it in End().
    137   static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
    138     void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
    139     ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
    140     allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
    141     return allocator;
    142   }
    143 
    144   explicit ScopedArenaAllocator(ArenaStack* arena_stack);
    145   ~ScopedArenaAllocator();
    146 
    147   void Reset();
    148 
    149   void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
    150     DebugStackReference::CheckTop();
    151     return arena_stack_->Alloc(bytes, kind);
    152   }
    153 
    154   template <typename T>
    155   T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
    156     return AllocArray<T>(1, kind);
    157   }
    158 
    159   template <typename T>
    160   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
    161     return static_cast<T*>(Alloc(length * sizeof(T), kind));
    162   }
    163 
    164   // Get adapter for use in STL containers. See scoped_arena_containers.h .
    165   ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    166 
    167   // Allow a delete-expression to destroy but not deallocate allocators created by Create().
    168   static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
    169 
    170  private:
    171   ArenaStack* const arena_stack_;
    172   Arena* mark_arena_;
    173   uint8_t* mark_ptr_;
    174   uint8_t* mark_end_;
    175 
    176   void DoReset();
    177 
    178   template <typename T>
    179   friend class ScopedArenaAllocatorAdapter;
    180 
    181   DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
    182 };
    183 
    184 }  // namespace art
    185 
    186 #endif  // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
    187