Home | History | Annotate | Download | only in space
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
     18 #define ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
     19 
     20 #include "gc/accounting/gc_allocator.h"
     21 #include "dlmalloc_space.h"
     22 #include "safe_map.h"
     23 #include "space.h"
     24 
     25 #include <set>
     26 #include <vector>
     27 
     28 namespace art {
     29 namespace gc {
     30 namespace space {
     31 
     32 // Abstraction implemented by all large object spaces.
     33 class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace {
     34  public:
     35   virtual SpaceType GetType() const {
     36     return kSpaceTypeLargeObjectSpace;
     37   }
     38 
     39   virtual void SwapBitmaps();
     40   virtual void CopyLiveToMarked();
     41   virtual void Walk(DlMallocSpace::WalkCallback, void* arg) = 0;
     42   virtual ~LargeObjectSpace() {}
     43 
     44   uint64_t GetBytesAllocated() const {
     45     return num_bytes_allocated_;
     46   }
     47 
     48   uint64_t GetObjectsAllocated() const {
     49     return num_objects_allocated_;
     50   }
     51 
     52   uint64_t GetTotalBytesAllocated() const {
     53     return total_bytes_allocated_;
     54   }
     55 
     56   uint64_t GetTotalObjectsAllocated() const {
     57     return total_objects_allocated_;
     58   }
     59 
     60   size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs);
     61 
     62  protected:
     63   explicit LargeObjectSpace(const std::string& name);
     64 
     65   // Approximate number of bytes which have been allocated into the space.
     66   size_t num_bytes_allocated_;
     67   size_t num_objects_allocated_;
     68   size_t total_bytes_allocated_;
     69   size_t total_objects_allocated_;
     70 
     71   friend class Space;
     72 
     73  private:
     74   DISALLOW_COPY_AND_ASSIGN(LargeObjectSpace);
     75 };
     76 
     77 // A discontinuous large object space implemented by individual mmap/munmap calls.
     78 class LargeObjectMapSpace : public LargeObjectSpace {
     79  public:
     80   // Creates a large object space. Allocations into the large object space use memory maps instead
     81   // of malloc.
     82   static LargeObjectMapSpace* Create(const std::string& name);
     83 
     84   // Return the storage space required by obj.
     85   size_t AllocationSize(const mirror::Object* obj);
     86   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated);
     87   size_t Free(Thread* self, mirror::Object* ptr);
     88   void Walk(DlMallocSpace::WalkCallback, void* arg) LOCKS_EXCLUDED(lock_);
     89   // TODO: disabling thread safety analysis as this may be called when we already hold lock_.
     90   bool Contains(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS;
     91 
     92  private:
     93   explicit LargeObjectMapSpace(const std::string& name);
     94   virtual ~LargeObjectMapSpace() {}
     95 
     96   // Used to ensure mutual exclusion when the allocation spaces data structures are being modified.
     97   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
     98   std::vector<mirror::Object*,
     99       accounting::GCAllocator<mirror::Object*> > large_objects_ GUARDED_BY(lock_);
    100   typedef SafeMap<mirror::Object*, MemMap*, std::less<mirror::Object*>,
    101       accounting::GCAllocator<std::pair<const mirror::Object*, MemMap*> > > MemMaps;
    102   MemMaps mem_maps_ GUARDED_BY(lock_);
    103 };
    104 
    105 // A continuous large object space with a free-list to handle holes.
    106 class FreeListSpace : public LargeObjectSpace {
    107  public:
    108   virtual ~FreeListSpace();
    109   static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity);
    110 
    111   size_t AllocationSize(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(lock_);
    112   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated);
    113   size_t Free(Thread* self, mirror::Object* obj);
    114   bool Contains(const mirror::Object* obj) const;
    115   void Walk(DlMallocSpace::WalkCallback callback, void* arg) LOCKS_EXCLUDED(lock_);
    116 
    117   // Address at which the space begins.
    118   byte* Begin() const {
    119     return begin_;
    120   }
    121 
    122   // Address at which the space ends, which may vary as the space is filled.
    123   byte* End() const {
    124     return end_;
    125   }
    126 
    127   // Current size of space
    128   size_t Size() const {
    129     return End() - Begin();
    130   }
    131 
    132   void Dump(std::ostream& os) const;
    133 
    134  private:
    135   static const size_t kAlignment = kPageSize;
    136 
    137   class AllocationHeader {
    138    public:
    139     // Returns the allocation size, includes the header.
    140     size_t AllocationSize() const {
    141       return alloc_size_;
    142     }
    143 
    144     // Updates the allocation size in the header, the allocation size includes the header itself.
    145     void SetAllocationSize(size_t size) {
    146       DCHECK(IsAligned<kPageSize>(size));
    147       alloc_size_ = size;
    148     }
    149 
    150     bool IsFree() const {
    151       return AllocationSize() == 0;
    152     }
    153 
    154     // Returns the previous free allocation header by using the prev_free_ member to figure out
    155     // where it is. If prev free is 0 then we just return ourself.
    156     AllocationHeader* GetPrevFreeAllocationHeader() {
    157       return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) - prev_free_);
    158     }
    159 
    160     // Returns the address of the object associated with this allocation header.
    161     mirror::Object* GetObjectAddress() {
    162       return reinterpret_cast<mirror::Object*>(reinterpret_cast<uintptr_t>(this) + sizeof(*this));
    163     }
    164 
    165     // Returns the next allocation header after the object associated with this allocation header.
    166     AllocationHeader* GetNextAllocationHeader() {
    167       DCHECK_NE(alloc_size_, 0U);
    168       return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) + alloc_size_);
    169     }
    170 
    171     // Returns how many free bytes there is before the block.
    172     size_t GetPrevFree() const {
    173       return prev_free_;
    174     }
    175 
    176     // Update the size of the free block prior to the allocation.
    177     void SetPrevFree(size_t prev_free) {
    178       DCHECK(IsAligned<kPageSize>(prev_free));
    179       prev_free_ = prev_free;
    180     }
    181 
    182     // Finds and returns the next non free allocation header after ourself.
    183     // TODO: Optimize, currently O(n) for n free following pages.
    184     AllocationHeader* GetNextNonFree();
    185 
    186     // Used to implement best fit object allocation. Each allocation has an AllocationHeader which
    187     // contains the size of the previous free block preceding it. Implemented in such a way that we
    188     // can also find the iterator for any allocation header pointer.
    189     class SortByPrevFree {
    190      public:
    191       bool operator()(const AllocationHeader* a, const AllocationHeader* b) const {
    192         if (a->GetPrevFree() < b->GetPrevFree()) return true;
    193         if (a->GetPrevFree() > b->GetPrevFree()) return false;
    194         if (a->AllocationSize() < b->AllocationSize()) return true;
    195         if (a->AllocationSize() > b->AllocationSize()) return false;
    196         return reinterpret_cast<uintptr_t>(a) < reinterpret_cast<uintptr_t>(b);
    197       }
    198     };
    199 
    200    private:
    201     // Contains the size of the previous free block, if 0 then the memory preceding us is an
    202     // allocation.
    203     size_t prev_free_;
    204 
    205     // Allocation size of this object, 0 means that the allocation header is free memory.
    206     size_t alloc_size_;
    207 
    208     friend class FreeListSpace;
    209   };
    210 
    211   FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end);
    212 
    213   // Removes header from the free blocks set by finding the corresponding iterator and erasing it.
    214   void RemoveFreePrev(AllocationHeader* header) EXCLUSIVE_LOCKS_REQUIRED(lock_);
    215 
    216   // Finds the allocation header corresponding to obj.
    217   AllocationHeader* GetAllocationHeader(const mirror::Object* obj);
    218 
    219   typedef std::set<AllocationHeader*, AllocationHeader::SortByPrevFree,
    220                    accounting::GCAllocator<AllocationHeader*> > FreeBlocks;
    221 
    222   byte* const begin_;
    223   byte* const end_;
    224 
    225   // There is not footer for any allocations at the end of the space, so we keep track of how much
    226   // free space there is at the end manually.
    227   UniquePtr<MemMap> mem_map_;
    228   Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
    229   size_t free_end_ GUARDED_BY(lock_);
    230   FreeBlocks free_blocks_ GUARDED_BY(lock_);
    231 };
    232 
    233 }  // namespace space
    234 }  // namespace gc
    235 }  // namespace art
    236 
    237 #endif  // ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
    238