Home | History | Annotate | Download | only in space
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_SPACE_ROSALLOC_SPACE_H_
     18 #define ART_RUNTIME_GC_SPACE_ROSALLOC_SPACE_H_
     19 
     20 #include "gc/allocator/rosalloc.h"
     21 #include "malloc_space.h"
     22 #include "space.h"
     23 
     24 namespace art {
     25 namespace gc {
     26 
     27 namespace collector {
     28 class MarkSweep;
     29 }  // namespace collector
     30 
     31 namespace space {
     32 
     33 // An alloc space implemented using a runs-of-slots memory allocator. Not final as may be
     34 // overridden by a MemoryToolMallocSpace.
     35 class RosAllocSpace : public MallocSpace {
     36  public:
     37   // Create a RosAllocSpace with the requested sizes. The requested
     38   // base address is not guaranteed to be granted, if it is required,
     39   // the caller should call Begin on the returned space to confirm the
     40   // request was granted.
     41   static RosAllocSpace* Create(const std::string& name, size_t initial_size, size_t growth_limit,
     42                                size_t capacity, uint8_t* requested_begin, bool low_memory_mode,
     43                                bool can_move_objects);
     44   static RosAllocSpace* CreateFromMemMap(MemMap* mem_map, const std::string& name,
     45                                          size_t starting_size, size_t initial_size,
     46                                          size_t growth_limit, size_t capacity,
     47                                          bool low_memory_mode, bool can_move_objects);
     48 
     49   mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated,
     50                                   size_t* usable_size, size_t* bytes_tl_bulk_allocated)
     51       OVERRIDE REQUIRES(!lock_);
     52   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
     53                         size_t* usable_size, size_t* bytes_tl_bulk_allocated) OVERRIDE {
     54     return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size,
     55                            bytes_tl_bulk_allocated);
     56   }
     57   mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
     58                                     size_t* usable_size, size_t* bytes_tl_bulk_allocated)
     59       OVERRIDE REQUIRES(Locks::mutator_lock_) {
     60     return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size,
     61                                        bytes_tl_bulk_allocated);
     62   }
     63   size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE {
     64     return AllocationSizeNonvirtual<true>(obj, usable_size);
     65   }
     66   size_t Free(Thread* self, mirror::Object* ptr) OVERRIDE
     67       REQUIRES_SHARED(Locks::mutator_lock_);
     68   size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) OVERRIDE
     69       REQUIRES_SHARED(Locks::mutator_lock_);
     70 
     71   mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated,
     72                                   size_t* usable_size, size_t* bytes_tl_bulk_allocated) {
     73     // RosAlloc zeroes memory internally.
     74     return AllocCommon(self, num_bytes, bytes_allocated, usable_size,
     75                        bytes_tl_bulk_allocated);
     76   }
     77   mirror::Object* AllocNonvirtualThreadUnsafe(Thread* self, size_t num_bytes,
     78                                               size_t* bytes_allocated, size_t* usable_size,
     79                                               size_t* bytes_tl_bulk_allocated) {
     80     // RosAlloc zeroes memory internally. Pass in false for thread unsafe.
     81     return AllocCommon<false>(self, num_bytes, bytes_allocated, usable_size,
     82                               bytes_tl_bulk_allocated);
     83   }
     84 
     85   // Returns true if the given allocation request can be allocated in
     86   // an existing thread local run without allocating a new run.
     87   ALWAYS_INLINE bool CanAllocThreadLocal(Thread* self, size_t num_bytes);
     88   // Allocate the given allocation request in an existing thread local
     89   // run without allocating a new run.
     90   ALWAYS_INLINE mirror::Object* AllocThreadLocal(Thread* self, size_t num_bytes,
     91                                                  size_t* bytes_allocated);
     92   size_t MaxBytesBulkAllocatedFor(size_t num_bytes) OVERRIDE {
     93     return MaxBytesBulkAllocatedForNonvirtual(num_bytes);
     94   }
     95   ALWAYS_INLINE size_t MaxBytesBulkAllocatedForNonvirtual(size_t num_bytes);
     96 
     97   // TODO: NO_THREAD_SAFETY_ANALYSIS because SizeOf() requires that mutator_lock is held.
     98   template<bool kMaybeIsRunningOnMemoryTool>
     99   size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
    100       NO_THREAD_SAFETY_ANALYSIS;
    101 
    102   allocator::RosAlloc* GetRosAlloc() const {
    103     return rosalloc_;
    104   }
    105 
    106   size_t Trim() OVERRIDE;
    107   void Walk(WalkCallback callback, void* arg) OVERRIDE REQUIRES(!lock_);
    108   size_t GetFootprint() OVERRIDE;
    109   size_t GetFootprintLimit() OVERRIDE;
    110   void SetFootprintLimit(size_t limit) OVERRIDE;
    111 
    112   void Clear() OVERRIDE;
    113 
    114   MallocSpace* CreateInstance(MemMap* mem_map, const std::string& name, void* allocator,
    115                               uint8_t* begin, uint8_t* end, uint8_t* limit, size_t growth_limit,
    116                               bool can_move_objects) OVERRIDE;
    117 
    118   uint64_t GetBytesAllocated() OVERRIDE;
    119   uint64_t GetObjectsAllocated() OVERRIDE;
    120 
    121   size_t RevokeThreadLocalBuffers(Thread* thread);
    122   size_t RevokeAllThreadLocalBuffers();
    123   void AssertThreadLocalBuffersAreRevoked(Thread* thread);
    124   void AssertAllThreadLocalBuffersAreRevoked();
    125 
    126   // Returns the class of a recently freed object.
    127   mirror::Class* FindRecentFreedObject(const mirror::Object* obj);
    128 
    129   bool IsRosAllocSpace() const OVERRIDE {
    130     return true;
    131   }
    132 
    133   RosAllocSpace* AsRosAllocSpace() OVERRIDE {
    134     return this;
    135   }
    136 
    137   void Verify() REQUIRES(Locks::mutator_lock_) {
    138     rosalloc_->Verify();
    139   }
    140 
    141   virtual ~RosAllocSpace();
    142 
    143   void LogFragmentationAllocFailure(std::ostream& os, size_t failed_alloc_bytes) OVERRIDE {
    144     rosalloc_->LogFragmentationAllocFailure(os, failed_alloc_bytes);
    145   }
    146 
    147   void DumpStats(std::ostream& os);
    148 
    149  protected:
    150   RosAllocSpace(MemMap* mem_map, size_t initial_size, const std::string& name,
    151                 allocator::RosAlloc* rosalloc, uint8_t* begin, uint8_t* end, uint8_t* limit,
    152                 size_t growth_limit, bool can_move_objects, size_t starting_size,
    153                 bool low_memory_mode);
    154 
    155  private:
    156   template<bool kThreadSafe = true>
    157   mirror::Object* AllocCommon(Thread* self, size_t num_bytes, size_t* bytes_allocated,
    158                               size_t* usable_size, size_t* bytes_tl_bulk_allocated);
    159 
    160   void* CreateAllocator(void* base, size_t morecore_start, size_t initial_size,
    161                         size_t maximum_size, bool low_memory_mode) OVERRIDE {
    162     return CreateRosAlloc(base, morecore_start, initial_size, maximum_size, low_memory_mode,
    163                           RUNNING_ON_MEMORY_TOOL != 0);
    164   }
    165   static allocator::RosAlloc* CreateRosAlloc(void* base, size_t morecore_start, size_t initial_size,
    166                                              size_t maximum_size, bool low_memory_mode,
    167                                              bool running_on_memory_tool);
    168 
    169   void InspectAllRosAlloc(void (*callback)(void *start, void *end, size_t num_bytes, void* callback_arg),
    170                           void* arg, bool do_null_callback_at_end)
    171       REQUIRES(!Locks::runtime_shutdown_lock_, !Locks::thread_list_lock_);
    172   void InspectAllRosAllocWithSuspendAll(
    173       void (*callback)(void *start, void *end, size_t num_bytes, void* callback_arg),
    174       void* arg, bool do_null_callback_at_end)
    175       REQUIRES(!Locks::runtime_shutdown_lock_, !Locks::thread_list_lock_);
    176 
    177   // Underlying rosalloc.
    178   allocator::RosAlloc* rosalloc_;
    179 
    180   const bool low_memory_mode_;
    181 
    182   friend class collector::MarkSweep;
    183 
    184   DISALLOW_COPY_AND_ASSIGN(RosAllocSpace);
    185 };
    186 
    187 }  // namespace space
    188 }  // namespace gc
    189 }  // namespace art
    190 
    191 #endif  // ART_RUNTIME_GC_SPACE_ROSALLOC_SPACE_H_
    192