Home | History | Annotate | Download | only in gpu
      1 
      2 /*
      3  * Copyright 2010 Google Inc.
      4  *
      5  * Use of this source code is governed by a BSD-style license that can be
      6  * found in the LICENSE file.
      7  */
      8 
      9 
     10 #include "GrBufferAllocPool.h"
     11 #include "GrTypes.h"
     12 #include "GrVertexBuffer.h"
     13 #include "GrIndexBuffer.h"
     14 #include "GrGpu.h"
     15 
     16 #if GR_DEBUG
     17     #define VALIDATE validate
     18 #else
     19     static void VALIDATE(bool x = false) {}
     20 #endif
     21 
     22 // page size
     23 #define GrBufferAllocPool_MIN_BLOCK_SIZE ((size_t)1 << 12)
     24 
     25 GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu,
     26                                      BufferType bufferType,
     27                                      bool frequentResetHint,
     28                                      size_t blockSize,
     29                                      int preallocBufferCnt) :
     30         fBlocks(GrMax(8, 2*preallocBufferCnt)) {
     31 
     32     GrAssert(NULL != gpu);
     33     fGpu = gpu;
     34     fGpu->ref();
     35     fGpuIsReffed = true;
     36 
     37     fBufferType = bufferType;
     38     fFrequentResetHint = frequentResetHint;
     39     fBufferPtr = NULL;
     40     fMinBlockSize = GrMax(GrBufferAllocPool_MIN_BLOCK_SIZE, blockSize);
     41 
     42     fBytesInUse = 0;
     43 
     44     fPreallocBuffersInUse = 0;
     45     fPreallocBufferStartIdx = 0;
     46     for (int i = 0; i < preallocBufferCnt; ++i) {
     47         GrGeometryBuffer* buffer = this->createBuffer(fMinBlockSize);
     48         if (NULL != buffer) {
     49             *fPreallocBuffers.append() = buffer;
     50         }
     51     }
     52 }
     53 
     54 GrBufferAllocPool::~GrBufferAllocPool() {
     55     VALIDATE();
     56     if (fBlocks.count()) {
     57         GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
     58         if (buffer->isLocked()) {
     59             buffer->unlock();
     60         }
     61     }
     62     while (!fBlocks.empty()) {
     63         destroyBlock();
     64     }
     65     fPreallocBuffers.unrefAll();
     66     releaseGpuRef();
     67 }
     68 
     69 void GrBufferAllocPool::releaseGpuRef() {
     70     if (fGpuIsReffed) {
     71         fGpu->unref();
     72         fGpuIsReffed = false;
     73     }
     74 }
     75 
     76 void GrBufferAllocPool::reset() {
     77     VALIDATE();
     78     fBytesInUse = 0;
     79     if (fBlocks.count()) {
     80         GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
     81         if (buffer->isLocked()) {
     82             buffer->unlock();
     83         }
     84     }
     85     // fPreallocBuffersInUse will be decremented down to zero in the while loop
     86     int preallocBuffersInUse = fPreallocBuffersInUse;
     87     while (!fBlocks.empty()) {
     88         this->destroyBlock();
     89     }
     90     if (fPreallocBuffers.count()) {
     91         // must set this after above loop.
     92         fPreallocBufferStartIdx = (fPreallocBufferStartIdx +
     93                                    preallocBuffersInUse) %
     94                                   fPreallocBuffers.count();
     95     }
     96     // we may have created a large cpu mirror of a large VB. Reset the size
     97     // to match our pre-allocated VBs.
     98     fCpuData.reset(fMinBlockSize);
     99     GrAssert(0 == fPreallocBuffersInUse);
    100     VALIDATE();
    101 }
    102 
    103 void GrBufferAllocPool::unlock() {
    104     VALIDATE();
    105 
    106     if (NULL != fBufferPtr) {
    107         BufferBlock& block = fBlocks.back();
    108         if (block.fBuffer->isLocked()) {
    109             block.fBuffer->unlock();
    110         } else {
    111             size_t flushSize = block.fBuffer->sizeInBytes() - block.fBytesFree;
    112             flushCpuData(fBlocks.back().fBuffer, flushSize);
    113         }
    114         fBufferPtr = NULL;
    115     }
    116     VALIDATE();
    117 }
    118 
    119 #if GR_DEBUG
    120 void GrBufferAllocPool::validate(bool unusedBlockAllowed) const {
    121     if (NULL != fBufferPtr) {
    122         GrAssert(!fBlocks.empty());
    123         if (fBlocks.back().fBuffer->isLocked()) {
    124             GrGeometryBuffer* buf = fBlocks.back().fBuffer;
    125             GrAssert(buf->lockPtr() == fBufferPtr);
    126         } else {
    127             GrAssert(fCpuData.get() == fBufferPtr);
    128         }
    129     } else {
    130         GrAssert(fBlocks.empty() || !fBlocks.back().fBuffer->isLocked());
    131     }
    132     size_t bytesInUse = 0;
    133     for (int i = 0; i < fBlocks.count() - 1; ++i) {
    134         GrAssert(!fBlocks[i].fBuffer->isLocked());
    135     }
    136     for (int i = 0; i < fBlocks.count(); ++i) {
    137         size_t bytes = fBlocks[i].fBuffer->sizeInBytes() - fBlocks[i].fBytesFree;
    138         bytesInUse += bytes;
    139         GrAssert(bytes || unusedBlockAllowed);
    140     }
    141 
    142     GrAssert(bytesInUse == fBytesInUse);
    143     if (unusedBlockAllowed) {
    144         GrAssert((fBytesInUse && !fBlocks.empty()) ||
    145                  (!fBytesInUse && (fBlocks.count() < 2)));
    146     } else {
    147         GrAssert((0 == fBytesInUse) == fBlocks.empty());
    148     }
    149 }
    150 #endif
    151 
    152 void* GrBufferAllocPool::makeSpace(size_t size,
    153                                    size_t alignment,
    154                                    const GrGeometryBuffer** buffer,
    155                                    size_t* offset) {
    156     VALIDATE();
    157 
    158     GrAssert(NULL != buffer);
    159     GrAssert(NULL != offset);
    160 
    161     if (NULL != fBufferPtr) {
    162         BufferBlock& back = fBlocks.back();
    163         size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
    164         size_t pad = GrSizeAlignUpPad(usedBytes,
    165                                       alignment);
    166         if ((size + pad) <= back.fBytesFree) {
    167             usedBytes += pad;
    168             *offset = usedBytes;
    169             *buffer = back.fBuffer;
    170             back.fBytesFree -= size + pad;
    171             fBytesInUse += size;
    172             return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
    173         }
    174     }
    175 
    176     // We could honor the space request using by a partial update of the current
    177     // VB (if there is room). But we don't currently use draw calls to GL that
    178     // allow the driver to know that previously issued draws won't read from
    179     // the part of the buffer we update. Also, the GL buffer implementation
    180     // may be cheating on the actual buffer size by shrinking the buffer on
    181     // updateData() if the amount of data passed is less than the full buffer
    182     // size.
    183 
    184     if (!createBlock(size)) {
    185         return NULL;
    186     }
    187     GrAssert(NULL != fBufferPtr);
    188 
    189     *offset = 0;
    190     BufferBlock& back = fBlocks.back();
    191     *buffer = back.fBuffer;
    192     back.fBytesFree -= size;
    193     fBytesInUse += size;
    194     VALIDATE();
    195     return fBufferPtr;
    196 }
    197 
    198 int GrBufferAllocPool::currentBufferItems(size_t itemSize) const {
    199     VALIDATE();
    200     if (NULL != fBufferPtr) {
    201         const BufferBlock& back = fBlocks.back();
    202         size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
    203         size_t pad = GrSizeAlignUpPad(usedBytes, itemSize);
    204         return (back.fBytesFree - pad) / itemSize;
    205     } else if (fPreallocBuffersInUse < fPreallocBuffers.count()) {
    206         return fMinBlockSize / itemSize;
    207     }
    208     return 0;
    209 }
    210 
    211 int GrBufferAllocPool::preallocatedBuffersRemaining() const {
    212     return fPreallocBuffers.count() - fPreallocBuffersInUse;
    213 }
    214 
    215 int GrBufferAllocPool::preallocatedBufferCount() const {
    216     return fPreallocBuffers.count();
    217 }
    218 
    219 void GrBufferAllocPool::putBack(size_t bytes) {
    220     VALIDATE();
    221 
    222     // if the putBack unwinds all the preallocated buffers then we will
    223     // advance the starting index. As blocks are destroyed fPreallocBuffersInUse
    224     // will be decremented. I will reach zero if all blocks using preallocated
    225     // buffers are released.
    226     int preallocBuffersInUse = fPreallocBuffersInUse;
    227 
    228     while (bytes) {
    229         // caller shouldnt try to put back more than they've taken
    230         GrAssert(!fBlocks.empty());
    231         BufferBlock& block = fBlocks.back();
    232         size_t bytesUsed = block.fBuffer->sizeInBytes() - block.fBytesFree;
    233         if (bytes >= bytesUsed) {
    234             bytes -= bytesUsed;
    235             fBytesInUse -= bytesUsed;
    236             // if we locked a vb to satisfy the make space and we're releasing
    237             // beyond it, then unlock it.
    238             if (block.fBuffer->isLocked()) {
    239                 block.fBuffer->unlock();
    240             }
    241             this->destroyBlock();
    242         } else {
    243             block.fBytesFree += bytes;
    244             fBytesInUse -= bytes;
    245             bytes = 0;
    246             break;
    247         }
    248     }
    249     if (!fPreallocBuffersInUse && fPreallocBuffers.count()) {
    250             fPreallocBufferStartIdx = (fPreallocBufferStartIdx +
    251                                        preallocBuffersInUse) %
    252                                       fPreallocBuffers.count();
    253     }
    254     VALIDATE();
    255 }
    256 
    257 bool GrBufferAllocPool::createBlock(size_t requestSize) {
    258 
    259     size_t size = GrMax(requestSize, fMinBlockSize);
    260     GrAssert(size >= GrBufferAllocPool_MIN_BLOCK_SIZE);
    261 
    262     VALIDATE();
    263 
    264     BufferBlock& block = fBlocks.push_back();
    265 
    266     if (size == fMinBlockSize &&
    267         fPreallocBuffersInUse < fPreallocBuffers.count()) {
    268 
    269         uint32_t nextBuffer = (fPreallocBuffersInUse +
    270                                fPreallocBufferStartIdx) %
    271                               fPreallocBuffers.count();
    272         block.fBuffer = fPreallocBuffers[nextBuffer];
    273         block.fBuffer->ref();
    274         ++fPreallocBuffersInUse;
    275     } else {
    276         block.fBuffer = this->createBuffer(size);
    277         if (NULL == block.fBuffer) {
    278             fBlocks.pop_back();
    279             return false;
    280         }
    281     }
    282 
    283     block.fBytesFree = size;
    284     if (NULL != fBufferPtr) {
    285         GrAssert(fBlocks.count() > 1);
    286         BufferBlock& prev = fBlocks.fromBack(1);
    287         if (prev.fBuffer->isLocked()) {
    288             prev.fBuffer->unlock();
    289         } else {
    290             flushCpuData(prev.fBuffer,
    291                          prev.fBuffer->sizeInBytes() - prev.fBytesFree);
    292         }
    293         fBufferPtr = NULL;
    294     }
    295 
    296     GrAssert(NULL == fBufferPtr);
    297 
    298     if (fGpu->getCaps().fBufferLockSupport &&
    299         size > GR_GEOM_BUFFER_LOCK_THRESHOLD &&
    300         (!fFrequentResetHint || requestSize > GR_GEOM_BUFFER_LOCK_THRESHOLD)) {
    301         fBufferPtr = block.fBuffer->lock();
    302     }
    303 
    304     if (NULL == fBufferPtr) {
    305         fBufferPtr = fCpuData.reset(size);
    306     }
    307 
    308     VALIDATE(true);
    309 
    310     return true;
    311 }
    312 
    313 void GrBufferAllocPool::destroyBlock() {
    314     GrAssert(!fBlocks.empty());
    315 
    316     BufferBlock& block = fBlocks.back();
    317     if (fPreallocBuffersInUse > 0) {
    318         uint32_t prevPreallocBuffer = (fPreallocBuffersInUse +
    319                                        fPreallocBufferStartIdx +
    320                                        (fPreallocBuffers.count() - 1)) %
    321                                       fPreallocBuffers.count();
    322         if (block.fBuffer == fPreallocBuffers[prevPreallocBuffer]) {
    323             --fPreallocBuffersInUse;
    324         }
    325     }
    326     GrAssert(!block.fBuffer->isLocked());
    327     block.fBuffer->unref();
    328     fBlocks.pop_back();
    329     fBufferPtr = NULL;
    330 }
    331 
    332 void GrBufferAllocPool::flushCpuData(GrGeometryBuffer* buffer,
    333                                      size_t flushSize) {
    334     GrAssert(NULL != buffer);
    335     GrAssert(!buffer->isLocked());
    336     GrAssert(fCpuData.get() == fBufferPtr);
    337     GrAssert(flushSize <= buffer->sizeInBytes());
    338 
    339     if (fGpu->getCaps().fBufferLockSupport &&
    340         flushSize > GR_GEOM_BUFFER_LOCK_THRESHOLD) {
    341         void* data = buffer->lock();
    342         if (NULL != data) {
    343             memcpy(data, fBufferPtr, flushSize);
    344             buffer->unlock();
    345             return;
    346         }
    347     }
    348     buffer->updateData(fBufferPtr, flushSize);
    349 }
    350 
    351 GrGeometryBuffer* GrBufferAllocPool::createBuffer(size_t size) {
    352     if (kIndex_BufferType == fBufferType) {
    353         return fGpu->createIndexBuffer(size, true);
    354     } else {
    355         GrAssert(kVertex_BufferType == fBufferType);
    356         return fGpu->createVertexBuffer(size, true);
    357     }
    358 }
    359 
    360 ////////////////////////////////////////////////////////////////////////////////
    361 
    362 GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu,
    363                                                  bool frequentResetHint,
    364                                                  size_t bufferSize,
    365                                                  int preallocBufferCnt)
    366 : GrBufferAllocPool(gpu,
    367                     kVertex_BufferType,
    368                     frequentResetHint,
    369                     bufferSize,
    370                     preallocBufferCnt) {
    371 }
    372 
    373 void* GrVertexBufferAllocPool::makeSpace(GrVertexLayout layout,
    374                                          int vertexCount,
    375                                          const GrVertexBuffer** buffer,
    376                                          int* startVertex) {
    377 
    378     GrAssert(vertexCount >= 0);
    379     GrAssert(NULL != buffer);
    380     GrAssert(NULL != startVertex);
    381 
    382     size_t vSize = GrDrawTarget::VertexSize(layout);
    383     size_t offset = 0; // assign to suppress warning
    384     const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
    385     void* ptr = INHERITED::makeSpace(vSize * vertexCount,
    386                                      vSize,
    387                                      &geomBuffer,
    388                                      &offset);
    389 
    390     *buffer = (const GrVertexBuffer*) geomBuffer;
    391     GrAssert(0 == offset % vSize);
    392     *startVertex = offset / vSize;
    393     return ptr;
    394 }
    395 
    396 bool GrVertexBufferAllocPool::appendVertices(GrVertexLayout layout,
    397                                              int vertexCount,
    398                                              const void* vertices,
    399                                              const GrVertexBuffer** buffer,
    400                                              int* startVertex) {
    401     void* space = makeSpace(layout, vertexCount, buffer, startVertex);
    402     if (NULL != space) {
    403         memcpy(space,
    404                vertices,
    405                GrDrawTarget::VertexSize(layout) * vertexCount);
    406         return true;
    407     } else {
    408         return false;
    409     }
    410 }
    411 
    412 int GrVertexBufferAllocPool::preallocatedBufferVertices(GrVertexLayout layout) const {
    413     return INHERITED::preallocatedBufferSize() /
    414             GrDrawTarget::VertexSize(layout);
    415 }
    416 
    417 int GrVertexBufferAllocPool::currentBufferVertices(GrVertexLayout layout) const {
    418     return currentBufferItems(GrDrawTarget::VertexSize(layout));
    419 }
    420 
    421 ////////////////////////////////////////////////////////////////////////////////
    422 
    423 GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu,
    424                                                bool frequentResetHint,
    425                                                size_t bufferSize,
    426                                                int preallocBufferCnt)
    427 : GrBufferAllocPool(gpu,
    428                     kIndex_BufferType,
    429                     frequentResetHint,
    430                     bufferSize,
    431                     preallocBufferCnt) {
    432 }
    433 
    434 void* GrIndexBufferAllocPool::makeSpace(int indexCount,
    435                                         const GrIndexBuffer** buffer,
    436                                         int* startIndex) {
    437 
    438     GrAssert(indexCount >= 0);
    439     GrAssert(NULL != buffer);
    440     GrAssert(NULL != startIndex);
    441 
    442     size_t offset = 0; // assign to suppress warning
    443     const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
    444     void* ptr = INHERITED::makeSpace(indexCount * sizeof(uint16_t),
    445                                      sizeof(uint16_t),
    446                                      &geomBuffer,
    447                                      &offset);
    448 
    449     *buffer = (const GrIndexBuffer*) geomBuffer;
    450     GrAssert(0 == offset % sizeof(uint16_t));
    451     *startIndex = offset / sizeof(uint16_t);
    452     return ptr;
    453 }
    454 
    455 bool GrIndexBufferAllocPool::appendIndices(int indexCount,
    456                                            const void* indices,
    457                                            const GrIndexBuffer** buffer,
    458                                            int* startIndex) {
    459     void* space = makeSpace(indexCount, buffer, startIndex);
    460     if (NULL != space) {
    461         memcpy(space, indices, sizeof(uint16_t) * indexCount);
    462         return true;
    463     } else {
    464         return false;
    465     }
    466 }
    467 
    468 int GrIndexBufferAllocPool::preallocatedBufferIndices() const {
    469     return INHERITED::preallocatedBufferSize() / sizeof(uint16_t);
    470 }
    471 
    472 int GrIndexBufferAllocPool::currentBufferIndices() const {
    473     return currentBufferItems(sizeof(uint16_t));
    474 }
    475