Home | History | Annotate | Download | only in gpu
      1 
      2 /*
      3  * Copyright 2010 Google Inc.
      4  *
      5  * Use of this source code is governed by a BSD-style license that can be
      6  * found in the LICENSE file.
      7  */
      8 
      9 
     10 #include "GrBufferAllocPool.h"
     11 #include "GrTypes.h"
     12 #include "GrVertexBuffer.h"
     13 #include "GrIndexBuffer.h"
     14 #include "GrGpu.h"
     15 
     16 #if GR_DEBUG
     17     #define VALIDATE validate
     18 #else
     19     static void VALIDATE(bool x = false) {}
     20 #endif
     21 
     22 // page size
     23 #define GrBufferAllocPool_MIN_BLOCK_SIZE ((size_t)1 << 12)
     24 
     25 GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu,
     26                                      BufferType bufferType,
     27                                      bool frequentResetHint,
     28                                      size_t blockSize,
     29                                      int preallocBufferCnt) :
     30         fBlocks(GrMax(8, 2*preallocBufferCnt)) {
     31 
     32     GrAssert(NULL != gpu);
     33     fGpu = gpu;
     34     fGpu->ref();
     35     fGpuIsReffed = true;
     36 
     37     fBufferType = bufferType;
     38     fFrequentResetHint = frequentResetHint;
     39     fBufferPtr = NULL;
     40     fMinBlockSize = GrMax(GrBufferAllocPool_MIN_BLOCK_SIZE, blockSize);
     41 
     42     fBytesInUse = 0;
     43 
     44     fPreallocBuffersInUse = 0;
     45     fPreallocBufferStartIdx = 0;
     46     for (int i = 0; i < preallocBufferCnt; ++i) {
     47         GrGeometryBuffer* buffer = this->createBuffer(fMinBlockSize);
     48         if (NULL != buffer) {
     49             *fPreallocBuffers.append() = buffer;
     50         }
     51     }
     52 }
     53 
     54 GrBufferAllocPool::~GrBufferAllocPool() {
     55     VALIDATE();
     56     if (fBlocks.count()) {
     57         GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
     58         if (buffer->isLocked()) {
     59             buffer->unlock();
     60         }
     61     }
     62     while (!fBlocks.empty()) {
     63         destroyBlock();
     64     }
     65     fPreallocBuffers.unrefAll();
     66     releaseGpuRef();
     67 }
     68 
     69 void GrBufferAllocPool::releaseGpuRef() {
     70     if (fGpuIsReffed) {
     71         fGpu->unref();
     72         fGpuIsReffed = false;
     73     }
     74 }
     75 
     76 void GrBufferAllocPool::reset() {
     77     VALIDATE();
     78     fBytesInUse = 0;
     79     if (fBlocks.count()) {
     80         GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
     81         if (buffer->isLocked()) {
     82             buffer->unlock();
     83         }
     84     }
     85     // fPreallocBuffersInUse will be decremented down to zero in the while loop
     86     int preallocBuffersInUse = fPreallocBuffersInUse;
     87     while (!fBlocks.empty()) {
     88         this->destroyBlock();
     89     }
     90     if (fPreallocBuffers.count()) {
     91         // must set this after above loop.
     92         fPreallocBufferStartIdx = (fPreallocBufferStartIdx +
     93                                    preallocBuffersInUse) %
     94                                   fPreallocBuffers.count();
     95     }
     96     // we may have created a large cpu mirror of a large VB. Reset the size
     97     // to match our pre-allocated VBs.
     98     fCpuData.reset(fMinBlockSize);
     99     GrAssert(0 == fPreallocBuffersInUse);
    100     VALIDATE();
    101 }
    102 
    103 void GrBufferAllocPool::unlock() {
    104     VALIDATE();
    105 
    106     if (NULL != fBufferPtr) {
    107         BufferBlock& block = fBlocks.back();
    108         if (block.fBuffer->isLocked()) {
    109             block.fBuffer->unlock();
    110         } else {
    111             size_t flushSize = block.fBuffer->sizeInBytes() - block.fBytesFree;
    112             flushCpuData(fBlocks.back().fBuffer, flushSize);
    113         }
    114         fBufferPtr = NULL;
    115     }
    116     VALIDATE();
    117 }
    118 
    119 #if GR_DEBUG
    120 void GrBufferAllocPool::validate(bool unusedBlockAllowed) const {
    121     if (NULL != fBufferPtr) {
    122         GrAssert(!fBlocks.empty());
    123         if (fBlocks.back().fBuffer->isLocked()) {
    124             GrGeometryBuffer* buf = fBlocks.back().fBuffer;
    125             GrAssert(buf->lockPtr() == fBufferPtr);
    126         } else {
    127             GrAssert(fCpuData.get() == fBufferPtr);
    128         }
    129     } else {
    130         GrAssert(fBlocks.empty() || !fBlocks.back().fBuffer->isLocked());
    131     }
    132     size_t bytesInUse = 0;
    133     for (int i = 0; i < fBlocks.count() - 1; ++i) {
    134         GrAssert(!fBlocks[i].fBuffer->isLocked());
    135     }
    136     for (int i = 0; i < fBlocks.count(); ++i) {
    137         size_t bytes = fBlocks[i].fBuffer->sizeInBytes() - fBlocks[i].fBytesFree;
    138         bytesInUse += bytes;
    139         GrAssert(bytes || unusedBlockAllowed);
    140     }
    141 
    142     GrAssert(bytesInUse == fBytesInUse);
    143     if (unusedBlockAllowed) {
    144         GrAssert((fBytesInUse && !fBlocks.empty()) ||
    145                  (!fBytesInUse && (fBlocks.count() < 2)));
    146     } else {
    147         GrAssert((0 == fBytesInUse) == fBlocks.empty());
    148     }
    149 }
    150 #endif
    151 
    152 void* GrBufferAllocPool::makeSpace(size_t size,
    153                                    size_t alignment,
    154                                    const GrGeometryBuffer** buffer,
    155                                    size_t* offset) {
    156     VALIDATE();
    157 
    158     GrAssert(NULL != buffer);
    159     GrAssert(NULL != offset);
    160 
    161     if (NULL != fBufferPtr) {
    162         BufferBlock& back = fBlocks.back();
    163         size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
    164         size_t pad = GrSizeAlignUpPad(usedBytes,
    165                                       alignment);
    166         if ((size + pad) <= back.fBytesFree) {
    167             usedBytes += pad;
    168             *offset = usedBytes;
    169             *buffer = back.fBuffer;
    170             back.fBytesFree -= size + pad;
    171             fBytesInUse += size + pad;
    172             VALIDATE();
    173             return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
    174         }
    175     }
    176 
    177     // We could honor the space request using by a partial update of the current
    178     // VB (if there is room). But we don't currently use draw calls to GL that
    179     // allow the driver to know that previously issued draws won't read from
    180     // the part of the buffer we update. Also, the GL buffer implementation
    181     // may be cheating on the actual buffer size by shrinking the buffer on
    182     // updateData() if the amount of data passed is less than the full buffer
    183     // size.
    184 
    185     if (!createBlock(size)) {
    186         return NULL;
    187     }
    188     GrAssert(NULL != fBufferPtr);
    189 
    190     *offset = 0;
    191     BufferBlock& back = fBlocks.back();
    192     *buffer = back.fBuffer;
    193     back.fBytesFree -= size;
    194     fBytesInUse += size;
    195     VALIDATE();
    196     return fBufferPtr;
    197 }
    198 
    199 int GrBufferAllocPool::currentBufferItems(size_t itemSize) const {
    200     VALIDATE();
    201     if (NULL != fBufferPtr) {
    202         const BufferBlock& back = fBlocks.back();
    203         size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
    204         size_t pad = GrSizeAlignUpPad(usedBytes, itemSize);
    205         return (back.fBytesFree - pad) / itemSize;
    206     } else if (fPreallocBuffersInUse < fPreallocBuffers.count()) {
    207         return fMinBlockSize / itemSize;
    208     }
    209     return 0;
    210 }
    211 
    212 int GrBufferAllocPool::preallocatedBuffersRemaining() const {
    213     return fPreallocBuffers.count() - fPreallocBuffersInUse;
    214 }
    215 
    216 int GrBufferAllocPool::preallocatedBufferCount() const {
    217     return fPreallocBuffers.count();
    218 }
    219 
    220 void GrBufferAllocPool::putBack(size_t bytes) {
    221     VALIDATE();
    222 
    223     // if the putBack unwinds all the preallocated buffers then we will
    224     // advance the starting index. As blocks are destroyed fPreallocBuffersInUse
    225     // will be decremented. I will reach zero if all blocks using preallocated
    226     // buffers are released.
    227     int preallocBuffersInUse = fPreallocBuffersInUse;
    228 
    229     while (bytes) {
    230         // caller shouldnt try to put back more than they've taken
    231         GrAssert(!fBlocks.empty());
    232         BufferBlock& block = fBlocks.back();
    233         size_t bytesUsed = block.fBuffer->sizeInBytes() - block.fBytesFree;
    234         if (bytes >= bytesUsed) {
    235             bytes -= bytesUsed;
    236             fBytesInUse -= bytesUsed;
    237             // if we locked a vb to satisfy the make space and we're releasing
    238             // beyond it, then unlock it.
    239             if (block.fBuffer->isLocked()) {
    240                 block.fBuffer->unlock();
    241             }
    242             this->destroyBlock();
    243         } else {
    244             block.fBytesFree += bytes;
    245             fBytesInUse -= bytes;
    246             bytes = 0;
    247             break;
    248         }
    249     }
    250     if (!fPreallocBuffersInUse && fPreallocBuffers.count()) {
    251             fPreallocBufferStartIdx = (fPreallocBufferStartIdx +
    252                                        preallocBuffersInUse) %
    253                                       fPreallocBuffers.count();
    254     }
    255     VALIDATE();
    256 }
    257 
    258 bool GrBufferAllocPool::createBlock(size_t requestSize) {
    259 
    260     size_t size = GrMax(requestSize, fMinBlockSize);
    261     GrAssert(size >= GrBufferAllocPool_MIN_BLOCK_SIZE);
    262 
    263     VALIDATE();
    264 
    265     BufferBlock& block = fBlocks.push_back();
    266 
    267     if (size == fMinBlockSize &&
    268         fPreallocBuffersInUse < fPreallocBuffers.count()) {
    269 
    270         uint32_t nextBuffer = (fPreallocBuffersInUse +
    271                                fPreallocBufferStartIdx) %
    272                               fPreallocBuffers.count();
    273         block.fBuffer = fPreallocBuffers[nextBuffer];
    274         block.fBuffer->ref();
    275         ++fPreallocBuffersInUse;
    276     } else {
    277         block.fBuffer = this->createBuffer(size);
    278         if (NULL == block.fBuffer) {
    279             fBlocks.pop_back();
    280             return false;
    281         }
    282     }
    283 
    284     block.fBytesFree = size;
    285     if (NULL != fBufferPtr) {
    286         GrAssert(fBlocks.count() > 1);
    287         BufferBlock& prev = fBlocks.fromBack(1);
    288         if (prev.fBuffer->isLocked()) {
    289             prev.fBuffer->unlock();
    290         } else {
    291             flushCpuData(prev.fBuffer,
    292                          prev.fBuffer->sizeInBytes() - prev.fBytesFree);
    293         }
    294         fBufferPtr = NULL;
    295     }
    296 
    297     GrAssert(NULL == fBufferPtr);
    298 
    299     if (fGpu->getCaps().bufferLockSupport() &&
    300         size > GR_GEOM_BUFFER_LOCK_THRESHOLD &&
    301         (!fFrequentResetHint || requestSize > GR_GEOM_BUFFER_LOCK_THRESHOLD)) {
    302         fBufferPtr = block.fBuffer->lock();
    303     }
    304 
    305     if (NULL == fBufferPtr) {
    306         fBufferPtr = fCpuData.reset(size);
    307     }
    308 
    309     VALIDATE(true);
    310 
    311     return true;
    312 }
    313 
    314 void GrBufferAllocPool::destroyBlock() {
    315     GrAssert(!fBlocks.empty());
    316 
    317     BufferBlock& block = fBlocks.back();
    318     if (fPreallocBuffersInUse > 0) {
    319         uint32_t prevPreallocBuffer = (fPreallocBuffersInUse +
    320                                        fPreallocBufferStartIdx +
    321                                        (fPreallocBuffers.count() - 1)) %
    322                                       fPreallocBuffers.count();
    323         if (block.fBuffer == fPreallocBuffers[prevPreallocBuffer]) {
    324             --fPreallocBuffersInUse;
    325         }
    326     }
    327     GrAssert(!block.fBuffer->isLocked());
    328     block.fBuffer->unref();
    329     fBlocks.pop_back();
    330     fBufferPtr = NULL;
    331 }
    332 
    333 void GrBufferAllocPool::flushCpuData(GrGeometryBuffer* buffer,
    334                                      size_t flushSize) {
    335     GrAssert(NULL != buffer);
    336     GrAssert(!buffer->isLocked());
    337     GrAssert(fCpuData.get() == fBufferPtr);
    338     GrAssert(flushSize <= buffer->sizeInBytes());
    339     VALIDATE(true);
    340 
    341     if (fGpu->getCaps().bufferLockSupport() &&
    342         flushSize > GR_GEOM_BUFFER_LOCK_THRESHOLD) {
    343         void* data = buffer->lock();
    344         if (NULL != data) {
    345             memcpy(data, fBufferPtr, flushSize);
    346             buffer->unlock();
    347             return;
    348         }
    349     }
    350     buffer->updateData(fBufferPtr, flushSize);
    351     VALIDATE(true);
    352 }
    353 
    354 GrGeometryBuffer* GrBufferAllocPool::createBuffer(size_t size) {
    355     if (kIndex_BufferType == fBufferType) {
    356         return fGpu->createIndexBuffer(size, true);
    357     } else {
    358         GrAssert(kVertex_BufferType == fBufferType);
    359         return fGpu->createVertexBuffer(size, true);
    360     }
    361 }
    362 
    363 ////////////////////////////////////////////////////////////////////////////////
    364 
    365 GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu,
    366                                                  bool frequentResetHint,
    367                                                  size_t bufferSize,
    368                                                  int preallocBufferCnt)
    369 : GrBufferAllocPool(gpu,
    370                     kVertex_BufferType,
    371                     frequentResetHint,
    372                     bufferSize,
    373                     preallocBufferCnt) {
    374 }
    375 
    376 void* GrVertexBufferAllocPool::makeSpace(size_t vertexSize,
    377                                          int vertexCount,
    378                                          const GrVertexBuffer** buffer,
    379                                          int* startVertex) {
    380 
    381     GrAssert(vertexCount >= 0);
    382     GrAssert(NULL != buffer);
    383     GrAssert(NULL != startVertex);
    384 
    385     size_t offset = 0; // assign to suppress warning
    386     const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
    387     void* ptr = INHERITED::makeSpace(vertexSize * vertexCount,
    388                                      vertexSize,
    389                                      &geomBuffer,
    390                                      &offset);
    391 
    392     *buffer = (const GrVertexBuffer*) geomBuffer;
    393     GrAssert(0 == offset % vertexSize);
    394     *startVertex = offset / vertexSize;
    395     return ptr;
    396 }
    397 
    398 bool GrVertexBufferAllocPool::appendVertices(size_t vertexSize,
    399                                              int vertexCount,
    400                                              const void* vertices,
    401                                              const GrVertexBuffer** buffer,
    402                                              int* startVertex) {
    403     void* space = makeSpace(vertexSize, vertexCount, buffer, startVertex);
    404     if (NULL != space) {
    405         memcpy(space,
    406                vertices,
    407                vertexSize * vertexCount);
    408         return true;
    409     } else {
    410         return false;
    411     }
    412 }
    413 
    414 int GrVertexBufferAllocPool::preallocatedBufferVertices(size_t vertexSize) const {
    415     return INHERITED::preallocatedBufferSize() / vertexSize;
    416 }
    417 
    418 int GrVertexBufferAllocPool::currentBufferVertices(size_t vertexSize) const {
    419     return currentBufferItems(vertexSize);
    420 }
    421 
    422 ////////////////////////////////////////////////////////////////////////////////
    423 
    424 GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu,
    425                                                bool frequentResetHint,
    426                                                size_t bufferSize,
    427                                                int preallocBufferCnt)
    428 : GrBufferAllocPool(gpu,
    429                     kIndex_BufferType,
    430                     frequentResetHint,
    431                     bufferSize,
    432                     preallocBufferCnt) {
    433 }
    434 
    435 void* GrIndexBufferAllocPool::makeSpace(int indexCount,
    436                                         const GrIndexBuffer** buffer,
    437                                         int* startIndex) {
    438 
    439     GrAssert(indexCount >= 0);
    440     GrAssert(NULL != buffer);
    441     GrAssert(NULL != startIndex);
    442 
    443     size_t offset = 0; // assign to suppress warning
    444     const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
    445     void* ptr = INHERITED::makeSpace(indexCount * sizeof(uint16_t),
    446                                      sizeof(uint16_t),
    447                                      &geomBuffer,
    448                                      &offset);
    449 
    450     *buffer = (const GrIndexBuffer*) geomBuffer;
    451     GrAssert(0 == offset % sizeof(uint16_t));
    452     *startIndex = offset / sizeof(uint16_t);
    453     return ptr;
    454 }
    455 
    456 bool GrIndexBufferAllocPool::appendIndices(int indexCount,
    457                                            const void* indices,
    458                                            const GrIndexBuffer** buffer,
    459                                            int* startIndex) {
    460     void* space = makeSpace(indexCount, buffer, startIndex);
    461     if (NULL != space) {
    462         memcpy(space, indices, sizeof(uint16_t) * indexCount);
    463         return true;
    464     } else {
    465         return false;
    466     }
    467 }
    468 
    469 int GrIndexBufferAllocPool::preallocatedBufferIndices() const {
    470     return INHERITED::preallocatedBufferSize() / sizeof(uint16_t);
    471 }
    472 
    473 int GrIndexBufferAllocPool::currentBufferIndices() const {
    474     return currentBufferItems(sizeof(uint16_t));
    475 }
    476