Home | History | Annotate | Download | only in utils
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
     18 #define ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
     19 
     20 #include "base/logging.h"
     21 #include "base/macros.h"
     22 #include "utils/arena_allocator.h"
     23 #include "utils/debug_stack.h"
     24 #include "globals.h"
     25 
     26 namespace art {
     27 
     28 class ArenaStack;
     29 class ScopedArenaAllocator;
     30 
     31 template <typename T>
     32 class ScopedArenaAllocatorAdapter;
     33 
     34 // Holds a list of Arenas for use by ScopedArenaAllocator stack.
     35 class ArenaStack : private DebugStackRefCounter {
     36  public:
     37   explicit ArenaStack(ArenaPool* arena_pool);
     38   ~ArenaStack();
     39 
     40   void Reset();
     41 
     42   size_t PeakBytesAllocated() {
     43     return PeakStats()->BytesAllocated();
     44   }
     45 
     46   MemStats GetPeakStats() const;
     47 
     48  private:
     49   struct Peak;
     50   struct Current;
     51   template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
     52   struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
     53     explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
     54     ArenaPool* const pool;
     55   };
     56 
     57   ArenaAllocatorStats* PeakStats() {
     58     return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
     59   }
     60 
     61   ArenaAllocatorStats* CurrentStats() {
     62     return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
     63   }
     64 
     65   // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
     66   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
     67     if (UNLIKELY(running_on_valgrind_)) {
     68       return AllocValgrind(bytes, kind);
     69     }
     70     size_t rounded_bytes = RoundUp(bytes, 8);
     71     uint8_t* ptr = top_ptr_;
     72     if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
     73       ptr = AllocateFromNextArena(rounded_bytes);
     74     }
     75     CurrentStats()->RecordAlloc(bytes, kind);
     76     top_ptr_ = ptr + rounded_bytes;
     77     return ptr;
     78   }
     79 
     80   uint8_t* AllocateFromNextArena(size_t rounded_bytes);
     81   void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
     82   void UpdateBytesAllocated();
     83   void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
     84 
     85   StatsAndPool stats_and_pool_;
     86   Arena* bottom_arena_;
     87   Arena* top_arena_;
     88   uint8_t* top_ptr_;
     89   uint8_t* top_end_;
     90 
     91   const bool running_on_valgrind_;
     92 
     93   friend class ScopedArenaAllocator;
     94   template <typename T>
     95   friend class ScopedArenaAllocatorAdapter;
     96 
     97   DISALLOW_COPY_AND_ASSIGN(ArenaStack);
     98 };
     99 
    100 class ScopedArenaAllocator
    101     : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
    102  public:
    103   // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
    104   // the allocator is not exactly a C++ block scope. For example, an optimization
    105   // pass can create the scoped allocator in Start() and destroy it in End().
    106   static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
    107     void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
    108     ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
    109     allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
    110     return allocator;
    111   }
    112 
    113   explicit ScopedArenaAllocator(ArenaStack* arena_stack);
    114   ~ScopedArenaAllocator();
    115 
    116   void Reset();
    117 
    118   void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
    119     DebugStackReference::CheckTop();
    120     return arena_stack_->Alloc(bytes, kind);
    121   }
    122 
    123   // Get adapter for use in STL containers. See scoped_arena_containers.h .
    124   ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
    125 
    126   // Allow a delete-expression to destroy but not deallocate allocators created by Create().
    127   static void operator delete(void* ptr) { UNUSED(ptr); }
    128 
    129  private:
    130   ArenaStack* const arena_stack_;
    131   Arena* mark_arena_;
    132   uint8_t* mark_ptr_;
    133   uint8_t* mark_end_;
    134 
    135   template <typename T>
    136   friend class ScopedArenaAllocatorAdapter;
    137 
    138   DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
    139 };
    140 
    141 }  // namespace art
    142 
    143 #endif  // ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
    144