Home | History | Annotate | Download | only in vk
      1 /*
      2 * Copyright 2016 Google Inc.
      3 *
      4 * Use of this source code is governed by a BSD-style license that can be
      5 * found in the LICENSE file.
      6 */
      7 
      8 #include "GrVkDescriptorSetManager.h"
      9 
     10 #include "GrVkDescriptorPool.h"
     11 #include "GrVkDescriptorSet.h"
     12 #include "GrVkGpu.h"
     13 #include "GrVkUniformHandler.h"
     14 
     15 GrVkDescriptorSetManager::GrVkDescriptorSetManager(GrVkGpu* gpu,
     16                                                    VkDescriptorType type,
     17                                                    const GrVkUniformHandler* uniformHandler)
     18     : fPoolManager(type, gpu, uniformHandler) {
     19     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
     20         SkASSERT(uniformHandler);
     21         for (int i = 0; i < uniformHandler->numSamplers(); ++i) {
     22             fBindingVisibilities.push_back(uniformHandler->samplerVisibility(i));
     23         }
     24     } else {
     25         SkASSERT(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
     26         // We set the visibility of the first binding to the vertex shader and the second to the
     27         // fragment shader.
     28         fBindingVisibilities.push_back(kVertex_GrShaderFlag);
     29         fBindingVisibilities.push_back(kFragment_GrShaderFlag);
     30     }
     31 }
     32 
     33 GrVkDescriptorSetManager::GrVkDescriptorSetManager(GrVkGpu* gpu,
     34                                                    VkDescriptorType type,
     35                                                    const SkTArray<uint32_t>& visibilities)
     36     : fPoolManager(type, gpu, visibilities) {
     37     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
     38         for (int i = 0; i < visibilities.count(); ++i) {
     39             fBindingVisibilities.push_back(visibilities[i]);
     40         }
     41     } else {
     42         SkASSERT(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
     43         SkASSERT(2 == visibilities.count() &&
     44                  kVertex_GrShaderFlag == visibilities[0] &&
     45                  kFragment_GrShaderFlag == visibilities[1]);
     46         // We set the visibility of the first binding to the vertex shader and the second to the
     47         // fragment shader.
     48         fBindingVisibilities.push_back(kVertex_GrShaderFlag);
     49         fBindingVisibilities.push_back(kFragment_GrShaderFlag);
     50     }
     51 }
     52 
     53 const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
     54                                                                     const Handle& handle) {
     55     const GrVkDescriptorSet* ds = nullptr;
     56     int count = fFreeSets.count();
     57     if (count > 0) {
     58         ds = fFreeSets[count - 1];
     59         fFreeSets.removeShuffle(count - 1);
     60     } else {
     61         VkDescriptorSet vkDS;
     62         fPoolManager.getNewDescriptorSet(gpu, &vkDS);
     63 
     64         ds = new GrVkDescriptorSet(vkDS, fPoolManager.fPool, handle);
     65     }
     66     SkASSERT(ds);
     67     return ds;
     68 }
     69 
     70 void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
     71     SkASSERT(descSet);
     72     fFreeSets.push_back(descSet);
     73 }
     74 
     75 void GrVkDescriptorSetManager::release(const GrVkGpu* gpu) {
     76     fPoolManager.freeGPUResources(gpu);
     77 
     78     for (int i = 0; i < fFreeSets.count(); ++i) {
     79         fFreeSets[i]->unref(gpu);
     80     }
     81     fFreeSets.reset();
     82 }
     83 
     84 void GrVkDescriptorSetManager::abandon() {
     85     fPoolManager.abandonGPUResources();
     86 
     87     for (int i = 0; i < fFreeSets.count(); ++i) {
     88         fFreeSets[i]->unrefAndAbandon();
     89     }
     90     fFreeSets.reset();
     91 }
     92 
     93 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
     94                                             const GrVkUniformHandler* uniHandler) const {
     95     SkASSERT(uniHandler);
     96     if (type != fPoolManager.fDescType) {
     97         return false;
     98     }
     99 
    100     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
    101         if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
    102             return false;
    103         }
    104         for (int i = 0; i < uniHandler->numSamplers(); ++i) {
    105             if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i]) {
    106                 return false;
    107             }
    108         }
    109     }
    110     return true;
    111 }
    112 
    113 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
    114                                             const SkTArray<uint32_t>& visibilities) const {
    115     if (type != fPoolManager.fDescType) {
    116         return false;
    117     }
    118 
    119     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
    120         if (fBindingVisibilities.count() != visibilities.count()) {
    121             return false;
    122         }
    123         for (int i = 0; i < visibilities.count(); ++i) {
    124             if (visibilities[i] != fBindingVisibilities[i]) {
    125                 return false;
    126             }
    127         }
    128     }
    129     return true;
    130 }
    131 
    132 ////////////////////////////////////////////////////////////////////////////////
    133 
    134 VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
    135     VkShaderStageFlags flags = 0;
    136 
    137     if (visibility & kVertex_GrShaderFlag) {
    138         flags |= VK_SHADER_STAGE_VERTEX_BIT;
    139     }
    140     if (visibility & kGeometry_GrShaderFlag) {
    141         flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
    142     }
    143     if (visibility & kFragment_GrShaderFlag) {
    144         flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
    145     }
    146     return flags;
    147 }
    148 
    149 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
    150         VkDescriptorType type,
    151         GrVkGpu* gpu,
    152         const GrVkUniformHandler* uniformHandler)
    153     : fDescType(type)
    154     , fCurrentDescriptorCount(0)
    155     , fPool(nullptr) {
    156     this->init(gpu, type, uniformHandler, nullptr);
    157 }
    158 
    159 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
    160         VkDescriptorType type,
    161         GrVkGpu* gpu,
    162         const SkTArray<uint32_t>& visibilities)
    163     : fDescType(type)
    164     , fCurrentDescriptorCount(0)
    165     , fPool(nullptr) {
    166     this->init(gpu, type, nullptr, &visibilities);
    167 }
    168 
    169 void GrVkDescriptorSetManager::DescriptorPoolManager::init(GrVkGpu* gpu,
    170                                                            VkDescriptorType type,
    171                                                            const GrVkUniformHandler* uniformHandler,
    172                                                            const SkTArray<uint32_t>* visibilities) {
    173     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
    174         SkASSERT(SkToBool(uniformHandler) != SkToBool(visibilities));
    175         uint32_t numSamplers;
    176         if (uniformHandler) {
    177             numSamplers = (uint32_t)uniformHandler->numSamplers();
    178         } else {
    179             numSamplers = (uint32_t)visibilities->count();
    180         }
    181 
    182         std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
    183             new VkDescriptorSetLayoutBinding[numSamplers]);
    184         for (uint32_t i = 0; i < numSamplers; ++i) {
    185             uint32_t visibility;
    186             if (uniformHandler) {
    187                 visibility = uniformHandler->samplerVisibility(i);
    188             } else {
    189                 visibility = (*visibilities)[i];
    190             }
    191             dsSamplerBindings[i].binding = i;
    192             dsSamplerBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
    193             dsSamplerBindings[i].descriptorCount = 1;
    194             dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
    195             dsSamplerBindings[i].pImmutableSamplers = nullptr;
    196         }
    197 
    198         VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
    199         memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
    200         dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
    201         dsSamplerLayoutCreateInfo.pNext = nullptr;
    202         dsSamplerLayoutCreateInfo.flags = 0;
    203         dsSamplerLayoutCreateInfo.bindingCount = numSamplers;
    204         // Setting to nullptr fixes an error in the param checker validation layer. Even though
    205         // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
    206         // null.
    207         dsSamplerLayoutCreateInfo.pBindings = numSamplers ? dsSamplerBindings.get() : nullptr;
    208 
    209         GR_VK_CALL_ERRCHECK(gpu->vkInterface(),
    210                             CreateDescriptorSetLayout(gpu->device(),
    211                                                       &dsSamplerLayoutCreateInfo,
    212                                                       nullptr,
    213                                                       &fDescLayout));
    214         fDescCountPerSet = numSamplers;
    215     } else {
    216         SkASSERT(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
    217         // Create Uniform Buffer Descriptor
    218         // The vertex uniform buffer will have binding 0 and the fragment binding 1.
    219         VkDescriptorSetLayoutBinding dsUniBindings[kUniformDescPerSet];
    220         memset(&dsUniBindings, 0, 2 * sizeof(VkDescriptorSetLayoutBinding));
    221         dsUniBindings[0].binding = GrVkUniformHandler::kVertexBinding;
    222         dsUniBindings[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
    223         dsUniBindings[0].descriptorCount = 1;
    224         dsUniBindings[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
    225         dsUniBindings[0].pImmutableSamplers = nullptr;
    226         dsUniBindings[1].binding = GrVkUniformHandler::kFragBinding;
    227         dsUniBindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
    228         dsUniBindings[1].descriptorCount = 1;
    229         dsUniBindings[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
    230         dsUniBindings[1].pImmutableSamplers = nullptr;
    231 
    232         VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
    233         memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
    234         uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
    235         uniformLayoutCreateInfo.pNext = nullptr;
    236         uniformLayoutCreateInfo.flags = 0;
    237         uniformLayoutCreateInfo.bindingCount = 2;
    238         uniformLayoutCreateInfo.pBindings = dsUniBindings;
    239 
    240         GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateDescriptorSetLayout(gpu->device(),
    241                                                                           &uniformLayoutCreateInfo,
    242                                                                           nullptr,
    243                                                                           &fDescLayout));
    244         fDescCountPerSet = kUniformDescPerSet;
    245     }
    246 
    247     SkASSERT(fDescCountPerSet < kStartNumDescriptors);
    248     fMaxDescriptors = kStartNumDescriptors;
    249     SkASSERT(fMaxDescriptors > 0);
    250     this->getNewPool(gpu);
    251 }
    252 
    253 void GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
    254     if (fPool) {
    255         fPool->unref(gpu);
    256         uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
    257         if (newPoolSize < kMaxDescriptors) {
    258             fMaxDescriptors = newPoolSize;
    259         } else {
    260             fMaxDescriptors = kMaxDescriptors;
    261         }
    262 
    263     }
    264     fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
    265                                                                          fMaxDescriptors);
    266     SkASSERT(fPool);
    267 }
    268 
    269 void GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
    270                                                                           VkDescriptorSet* ds) {
    271     if (!fMaxDescriptors) {
    272         return;
    273     }
    274     fCurrentDescriptorCount += fDescCountPerSet;
    275     if (fCurrentDescriptorCount > fMaxDescriptors) {
    276         this->getNewPool(gpu);
    277         fCurrentDescriptorCount = fDescCountPerSet;
    278     }
    279 
    280     VkDescriptorSetAllocateInfo dsAllocateInfo;
    281     memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
    282     dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
    283     dsAllocateInfo.pNext = nullptr;
    284     dsAllocateInfo.descriptorPool = fPool->descPool();
    285     dsAllocateInfo.descriptorSetCount = 1;
    286     dsAllocateInfo.pSetLayouts = &fDescLayout;
    287     GR_VK_CALL_ERRCHECK(gpu->vkInterface(), AllocateDescriptorSets(gpu->device(),
    288                                                                    &dsAllocateInfo,
    289                                                                    ds));
    290 }
    291 
    292 void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(const GrVkGpu* gpu) {
    293     if (fDescLayout) {
    294         GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
    295                                                                   nullptr));
    296         fDescLayout = VK_NULL_HANDLE;
    297     }
    298 
    299     if (fPool) {
    300         fPool->unref(gpu);
    301         fPool = nullptr;
    302     }
    303 }
    304 
    305 void GrVkDescriptorSetManager::DescriptorPoolManager::abandonGPUResources() {
    306     fDescLayout = VK_NULL_HANDLE;
    307     if (fPool) {
    308         fPool->unrefAndAbandon();
    309         fPool = nullptr;
    310     }
    311 }
    312