Home | History | Annotate | Download | only in layers
      1 /* Copyright (c) 2015-2016 The Khronos Group Inc.
      2  * Copyright (c) 2015-2016 Valve Corporation
      3  * Copyright (c) 2015-2016 LunarG, Inc.
      4  * Copyright (C) 2015-2016 Google Inc.
      5  *
      6  * Licensed under the Apache License, Version 2.0 (the "License");
      7  * you may not use this file except in compliance with the License.
      8  * You may obtain a copy of the License at
      9  *
     10  *     http://www.apache.org/licenses/LICENSE-2.0
     11  *
     12  * Unless required by applicable law or agreed to in writing, software
     13  * distributed under the License is distributed on an "AS IS" BASIS,
     14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15  * See the License for the specific language governing permissions and
     16  * limitations under the License.
     17  *
     18  * Author: Courtney Goeltzenleuchter <courtneygo (at) google.com>
     19  * Author: Tobin Ehlis <tobine (at) google.com>
     20  * Author: Chris Forbes <chrisf (at) ijw.co.nz>
     21  * Author: Mark Lobodzinski <mark (at) lunarg.com>
     22  */
     23 #ifndef CORE_VALIDATION_TYPES_H_
     24 #define CORE_VALIDATION_TYPES_H_
     25 
     26 #ifndef NOEXCEPT
     27 // Check for noexcept support
     28 #if defined(__clang__)
     29 #if __has_feature(cxx_noexcept)
     30 #define HAS_NOEXCEPT
     31 #endif
     32 #else
     33 #if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46
     34 #define HAS_NOEXCEPT
     35 #else
     36 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS
     37 #define HAS_NOEXCEPT
     38 #endif
     39 #endif
     40 #endif
     41 
     42 #ifdef HAS_NOEXCEPT
     43 #define NOEXCEPT noexcept
     44 #else
     45 #define NOEXCEPT
     46 #endif
     47 #endif
     48 
     49 #include "vk_safe_struct.h"
     50 #include "vulkan/vulkan.h"
     51 #include <atomic>
     52 #include <functional>
     53 #include <map>
     54 #include <string.h>
     55 #include <unordered_map>
     56 #include <unordered_set>
     57 #include <vector>
     58 
     59 // Fwd declarations
     60 namespace cvdescriptorset {
     61 class DescriptorSetLayout;
     62 class DescriptorSet;
     63 };
     64 
     65 struct GLOBAL_CB_NODE;
     66 
     67 class BASE_NODE {
     68   public:
     69     // Track when object is being used by an in-flight command buffer
     70     std::atomic_int in_use;
     71     // Track command buffers that this object is bound to
     72     //  binding initialized when cmd referencing object is bound to command buffer
     73     //  binding removed when command buffer is reset or destroyed
     74     // When an object is destroyed, any bound cbs are set to INVALID
     75     std::unordered_set<GLOBAL_CB_NODE *> cb_bindings;
     76 
     77     BASE_NODE() { in_use.store(0); };
     78 };
     79 
     80 // Generic wrapper for vulkan objects
     81 struct VK_OBJECT {
     82     uint64_t handle;
     83     VkDebugReportObjectTypeEXT type;
     84 };
     85 
     86 inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
     87 
     88 namespace std {
     89 template <> struct hash<VK_OBJECT> {
     90     size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
     91 };
     92 }
     93 
     94 
     95 // Flags describing requirements imposed by the pipeline on a descriptor. These
     96 // can't be checked at pipeline creation time as they depend on the Image or
     97 // ImageView bound.
     98 enum descriptor_req {
     99     DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D,
    100     DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY,
    101     DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D,
    102     DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY,
    103     DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D,
    104     DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE,
    105     DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
    106 
    107     DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_END_RANGE + 1)) - 1,
    108 
    109     DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_END_RANGE,
    110     DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1,
    111 };
    112 
    113 struct DESCRIPTOR_POOL_STATE : BASE_NODE {
    114     VkDescriptorPool pool;
    115     uint32_t maxSets;       // Max descriptor sets allowed in this pool
    116     uint32_t availableSets; // Available descriptor sets in this pool
    117 
    118     VkDescriptorPoolCreateInfo createInfo;
    119     std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool
    120     std::vector<uint32_t> maxDescriptorTypeCount;              // Max # of descriptors of each type in this pool
    121     std::vector<uint32_t> availableDescriptorTypeCount;        // Available # of descriptors of each type in this pool
    122 
    123     DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
    124         : pool(pool), maxSets(pCreateInfo->maxSets), availableSets(pCreateInfo->maxSets), createInfo(*pCreateInfo),
    125           maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0), availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) {
    126         if (createInfo.poolSizeCount) { // Shadow type struct from ptr into local struct
    127             size_t poolSizeCountSize = createInfo.poolSizeCount * sizeof(VkDescriptorPoolSize);
    128             createInfo.pPoolSizes = new VkDescriptorPoolSize[poolSizeCountSize];
    129             memcpy((void *)createInfo.pPoolSizes, pCreateInfo->pPoolSizes, poolSizeCountSize);
    130             // Now set max counts for each descriptor type based on count of that type times maxSets
    131             uint32_t i = 0;
    132             for (i = 0; i < createInfo.poolSizeCount; ++i) {
    133                 uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
    134                 // Same descriptor types can appear several times
    135                 maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount;
    136                 availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
    137             }
    138         } else {
    139             createInfo.pPoolSizes = NULL; // Make sure this is NULL so we don't try to clean it up
    140         }
    141     }
    142     ~DESCRIPTOR_POOL_STATE() {
    143         delete[] createInfo.pPoolSizes;
    144         // TODO : pSets are currently freed in deletePools function which uses freeShadowUpdateTree function
    145         //  need to migrate that struct to smart ptrs for auto-cleanup
    146     }
    147 };
    148 
    149 // Generic memory binding struct to track objects bound to objects
    150 struct MEM_BINDING {
    151     VkDeviceMemory mem;
    152     VkDeviceSize offset;
    153     VkDeviceSize size;
    154 };
    155 
    156 inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; }
    157 
    158 namespace std {
    159 template <> struct hash<MEM_BINDING> {
    160     size_t operator()(MEM_BINDING mb) const NOEXCEPT {
    161         auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset);
    162         return intermediate ^ hash<uint64_t>()(mb.size);
    163     }
    164 };
    165 }
    166 
    167 // Superclass for bindable object state (currently imagesa and buffers)
    168 class BINDABLE : public BASE_NODE {
    169   public:
    170     bool sparse; // Is this object being bound with sparse memory or not?
    171     // Non-sparse binding data
    172     MEM_BINDING binding;
    173     // Sparse binding data, initially just tracking MEM_BINDING per mem object
    174     //  There's more data for sparse bindings so need better long-term solution
    175     // TODO : Need to update solution to track all sparse binding data
    176     std::unordered_set<MEM_BINDING> sparse_bindings;
    177     BINDABLE() : sparse(false), binding{}, sparse_bindings{}{};
    178 };
    179 
    180 class BUFFER_NODE : public BINDABLE {
    181   public:
    182     VkBuffer buffer;
    183     VkBufferCreateInfo createInfo;
    184     BUFFER_NODE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
    185         if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
    186             sparse = true;
    187         }
    188     };
    189 
    190     BUFFER_NODE(BUFFER_NODE const &rh_obj) = delete;
    191 };
    192 
    193 class BUFFER_VIEW_STATE : public BASE_NODE {
    194   public:
    195     VkBufferView buffer_view;
    196     VkBufferViewCreateInfo create_info;
    197     BUFFER_VIEW_STATE(VkBufferView bv, const VkBufferViewCreateInfo *ci) : buffer_view(bv), create_info(*ci){};
    198     BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete;
    199 };
    200 
    201 struct SAMPLER_STATE : public BASE_NODE {
    202     VkSampler sampler;
    203     VkSamplerCreateInfo createInfo;
    204 
    205     SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci){};
    206 };
    207 
    208 class IMAGE_STATE : public BINDABLE {
    209   public:
    210     VkImage image;
    211     VkImageCreateInfo createInfo;
    212     bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
    213     bool acquired;  // If this is a swapchain image, has it been acquired by the app.
    214     IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
    215         : image(img), createInfo(*pCreateInfo), valid(false), acquired(false) {
    216         if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
    217             sparse = true;
    218         }
    219     };
    220 
    221     IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
    222 };
    223 
    224 class IMAGE_VIEW_STATE : public BASE_NODE {
    225   public:
    226     VkImageView image_view;
    227     VkImageViewCreateInfo create_info;
    228     IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci) : image_view(iv), create_info(*ci){};
    229     IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
    230 };
    231 
    232 struct MemRange {
    233     VkDeviceSize offset;
    234     VkDeviceSize size;
    235 };
    236 
    237 struct MEMORY_RANGE {
    238     uint64_t handle;
    239     bool image; // True for image, false for buffer
    240     bool linear; // True for buffers and linear images
    241     bool valid;  // True if this range is know to be valid
    242     VkDeviceMemory memory;
    243     VkDeviceSize start;
    244     VkDeviceSize size;
    245     VkDeviceSize end; // Store this pre-computed for simplicity
    246     // Set of ptrs to every range aliased with this one
    247     std::unordered_set<MEMORY_RANGE *> aliases;
    248 };
    249 
    250 // Data struct for tracking memory object
    251 struct DEVICE_MEM_INFO : public BASE_NODE {
    252     void *object; // Dispatchable object used to create this memory (device of swapchain)
    253     bool global_valid; // If allocation is mapped, set to "true" to be picked up by subsequently bound ranges
    254     VkDeviceMemory mem;
    255     VkMemoryAllocateInfo alloc_info;
    256     std::unordered_set<VK_OBJECT> obj_bindings;         // objects bound to this memory
    257     std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges;     // Map of object to its binding range
    258     // Convenience vectors image/buff handles to speed up iterating over images or buffers independently
    259     std::unordered_set<uint64_t> bound_images;
    260     std::unordered_set<uint64_t> bound_buffers;
    261 
    262     MemRange mem_range;
    263     void *shadow_copy_base;     // Base of layer's allocation for guard band, data, and alignment space
    264     void *shadow_copy;          // Pointer to start of guard-band data before mapped region
    265     uint64_t shadow_pad_size;   // Size of the guard-band data before and after actual data. It MUST be a
    266                                 // multiple of limits.minMemoryMapAlignment
    267     void *p_driver_data;        // Pointer to application's actual memory
    268 
    269     DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
    270         : object(disp_object), global_valid(false), mem(in_mem), alloc_info(*p_alloc_info), mem_range{}, shadow_copy_base(0),
    271           shadow_copy(0), shadow_pad_size(0), p_driver_data(0){};
    272 };
    273 
    274 class SWAPCHAIN_NODE {
    275   public:
    276     safe_VkSwapchainCreateInfoKHR createInfo;
    277     VkSwapchainKHR swapchain;
    278     std::vector<VkImage> images;
    279     SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain)
    280         : createInfo(pCreateInfo), swapchain(swapchain) {}
    281 };
    282 
    283 enum DRAW_TYPE {
    284     DRAW = 0,
    285     DRAW_INDEXED = 1,
    286     DRAW_INDIRECT = 2,
    287     DRAW_INDEXED_INDIRECT = 3,
    288     DRAW_BEGIN_RANGE = DRAW,
    289     DRAW_END_RANGE = DRAW_INDEXED_INDIRECT,
    290     NUM_DRAW_TYPES = (DRAW_END_RANGE - DRAW_BEGIN_RANGE + 1),
    291 };
    292 
    293 class IMAGE_CMD_BUF_LAYOUT_NODE {
    294   public:
    295     IMAGE_CMD_BUF_LAYOUT_NODE() = default;
    296     IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput)
    297         : initialLayout(initialLayoutInput), layout(layoutInput) {}
    298 
    299     VkImageLayout initialLayout;
    300     VkImageLayout layout;
    301 };
    302 
    303 // Store the DAG.
    304 struct DAGNode {
    305     uint32_t pass;
    306     std::vector<uint32_t> prev;
    307     std::vector<uint32_t> next;
    308 };
    309 
    310 struct RENDER_PASS_STATE : public BASE_NODE {
    311     VkRenderPass renderPass;
    312     safe_VkRenderPassCreateInfo createInfo;
    313     std::vector<bool> hasSelfDependency;
    314     std::vector<DAGNode> subpassToNode;
    315     std::unordered_map<uint32_t, bool> attachment_first_read;
    316     std::unordered_map<uint32_t, VkImageLayout> attachment_first_layout;
    317 
    318     RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) : createInfo(pCreateInfo) {}
    319 };
    320 
    321 // Cmd Buffer Tracking
    322 enum CMD_TYPE {
    323     CMD_BINDPIPELINE,
    324     CMD_BINDPIPELINEDELTA,
    325     CMD_SETVIEWPORTSTATE,
    326     CMD_SETSCISSORSTATE,
    327     CMD_SETLINEWIDTHSTATE,
    328     CMD_SETDEPTHBIASSTATE,
    329     CMD_SETBLENDSTATE,
    330     CMD_SETDEPTHBOUNDSSTATE,
    331     CMD_SETSTENCILREADMASKSTATE,
    332     CMD_SETSTENCILWRITEMASKSTATE,
    333     CMD_SETSTENCILREFERENCESTATE,
    334     CMD_BINDDESCRIPTORSETS,
    335     CMD_BINDINDEXBUFFER,
    336     CMD_BINDVERTEXBUFFER,
    337     CMD_DRAW,
    338     CMD_DRAWINDEXED,
    339     CMD_DRAWINDIRECT,
    340     CMD_DRAWINDEXEDINDIRECT,
    341     CMD_DISPATCH,
    342     CMD_DISPATCHINDIRECT,
    343     CMD_COPYBUFFER,
    344     CMD_COPYIMAGE,
    345     CMD_BLITIMAGE,
    346     CMD_COPYBUFFERTOIMAGE,
    347     CMD_COPYIMAGETOBUFFER,
    348     CMD_CLONEIMAGEDATA,
    349     CMD_UPDATEBUFFER,
    350     CMD_FILLBUFFER,
    351     CMD_CLEARCOLORIMAGE,
    352     CMD_CLEARATTACHMENTS,
    353     CMD_CLEARDEPTHSTENCILIMAGE,
    354     CMD_RESOLVEIMAGE,
    355     CMD_SETEVENT,
    356     CMD_RESETEVENT,
    357     CMD_WAITEVENTS,
    358     CMD_PIPELINEBARRIER,
    359     CMD_BEGINQUERY,
    360     CMD_ENDQUERY,
    361     CMD_RESETQUERYPOOL,
    362     CMD_COPYQUERYPOOLRESULTS,
    363     CMD_WRITETIMESTAMP,
    364     CMD_PUSHCONSTANTS,
    365     CMD_INITATOMICCOUNTERS,
    366     CMD_LOADATOMICCOUNTERS,
    367     CMD_SAVEATOMICCOUNTERS,
    368     CMD_BEGINRENDERPASS,
    369     CMD_NEXTSUBPASS,
    370     CMD_ENDRENDERPASS,
    371     CMD_EXECUTECOMMANDS,
    372     CMD_END, // Should be last command in any RECORDED cmd buffer
    373 };
    374 
    375 // Data structure for holding sequence of cmds in cmd buffer
    376 struct CMD_NODE {
    377     CMD_TYPE type;
    378     uint64_t cmdNumber;
    379 };
    380 
    381 enum CB_STATE {
    382     CB_NEW,       // Newly created CB w/o any cmds
    383     CB_RECORDING, // BeginCB has been called on this CB
    384     CB_RECORDED,  // EndCB has been called on this CB
    385     CB_INVALID    // CB had a bound descriptor set destroyed or updated
    386 };
    387 
    388 // CB Status -- used to track status of various bindings on cmd buffer objects
    389 typedef VkFlags CBStatusFlags;
    390 enum CBStatusFlagBits {
    391     // clang-format off
    392     CBSTATUS_NONE                   = 0x00000000,   // No status is set
    393     CBSTATUS_LINE_WIDTH_SET         = 0x00000001,   // Line width has been set
    394     CBSTATUS_DEPTH_BIAS_SET         = 0x00000002,   // Depth bias has been set
    395     CBSTATUS_BLEND_CONSTANTS_SET    = 0x00000004,   // Blend constants state has been set
    396     CBSTATUS_DEPTH_BOUNDS_SET       = 0x00000008,   // Depth bounds state object has been set
    397     CBSTATUS_STENCIL_READ_MASK_SET  = 0x00000010,   // Stencil read mask has been set
    398     CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020,   // Stencil write mask has been set
    399     CBSTATUS_STENCIL_REFERENCE_SET  = 0x00000040,   // Stencil reference has been set
    400     CBSTATUS_INDEX_BUFFER_BOUND     = 0x00000080,   // Index buffer has been set
    401     CBSTATUS_ALL_STATE_SET          = 0x0000007F,   // All state set (intentionally exclude index buffer)
    402     // clang-format on
    403 };
    404 
    405 struct QueryObject {
    406     VkQueryPool pool;
    407     uint32_t index;
    408 };
    409 
    410 inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
    411     return (query1.pool == query2.pool && query1.index == query2.index);
    412 }
    413 
    414 namespace std {
    415 template <> struct hash<QueryObject> {
    416     size_t operator()(QueryObject query) const throw() {
    417         return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
    418     }
    419 };
    420 }
    421 struct DRAW_DATA { std::vector<VkBuffer> buffers; };
    422 
    423 struct ImageSubresourcePair {
    424     VkImage image;
    425     bool hasSubresource;
    426     VkImageSubresource subresource;
    427 };
    428 
    429 inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) {
    430     if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource)
    431         return false;
    432     return !img1.hasSubresource ||
    433            (img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel &&
    434             img1.subresource.arrayLayer == img2.subresource.arrayLayer);
    435 }
    436 
    437 namespace std {
    438 template <> struct hash<ImageSubresourcePair> {
    439     size_t operator()(ImageSubresourcePair img) const throw() {
    440         size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image));
    441         hashVal ^= hash<bool>()(img.hasSubresource);
    442         if (img.hasSubresource) {
    443             hashVal ^= hash<uint32_t>()(reinterpret_cast<uint32_t &>(img.subresource.aspectMask));
    444             hashVal ^= hash<uint32_t>()(img.subresource.mipLevel);
    445             hashVal ^= hash<uint32_t>()(img.subresource.arrayLayer);
    446         }
    447         return hashVal;
    448     }
    449 };
    450 }
    451 
    452 // Store layouts and pushconstants for PipelineLayout
    453 struct PIPELINE_LAYOUT_NODE {
    454     VkPipelineLayout layout;
    455     std::vector<cvdescriptorset::DescriptorSetLayout const *> set_layouts;
    456     std::vector<VkPushConstantRange> push_constant_ranges;
    457 
    458     PIPELINE_LAYOUT_NODE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{} {}
    459 
    460     void reset() {
    461         layout = VK_NULL_HANDLE;
    462         set_layouts.clear();
    463         push_constant_ranges.clear();
    464     }
    465 };
    466 
    467 class PIPELINE_STATE : public BASE_NODE {
    468   public:
    469     VkPipeline pipeline;
    470     safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
    471     safe_VkComputePipelineCreateInfo computePipelineCI;
    472     // Flag of which shader stages are active for this pipeline
    473     uint32_t active_shaders;
    474     uint32_t duplicate_shaders;
    475     // Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
    476     std::unordered_map<uint32_t, std::map<uint32_t, descriptor_req>> active_slots;
    477     // Vtx input info (if any)
    478     std::vector<VkVertexInputBindingDescription> vertexBindingDescriptions;
    479     std::vector<VkVertexInputAttributeDescription> vertexAttributeDescriptions;
    480     std::vector<VkPipelineColorBlendAttachmentState> attachments;
    481     bool blendConstantsEnabled; // Blend constants enabled for any attachments
    482     // Store RPCI b/c renderPass may be destroyed after Pipeline creation
    483     safe_VkRenderPassCreateInfo render_pass_ci;
    484     PIPELINE_LAYOUT_NODE pipeline_layout;
    485 
    486     // Default constructor
    487     PIPELINE_STATE()
    488         : pipeline{}, graphicsPipelineCI{}, computePipelineCI{}, active_shaders(0), duplicate_shaders(0), active_slots(),
    489           vertexBindingDescriptions(), vertexAttributeDescriptions(), attachments(), blendConstantsEnabled(false), render_pass_ci(),
    490           pipeline_layout() {}
    491 
    492     void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo) {
    493         graphicsPipelineCI.initialize(pCreateInfo);
    494         // Make sure compute pipeline is null
    495         VkComputePipelineCreateInfo emptyComputeCI = {};
    496         computePipelineCI.initialize(&emptyComputeCI);
    497         for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
    498             const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
    499             this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
    500             this->active_shaders |= pPSSCI->stage;
    501         }
    502         if (pCreateInfo->pVertexInputState) {
    503             const VkPipelineVertexInputStateCreateInfo *pVICI = pCreateInfo->pVertexInputState;
    504             if (pVICI->vertexBindingDescriptionCount) {
    505                 this->vertexBindingDescriptions = std::vector<VkVertexInputBindingDescription>(
    506                     pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
    507             }
    508             if (pVICI->vertexAttributeDescriptionCount) {
    509                 this->vertexAttributeDescriptions = std::vector<VkVertexInputAttributeDescription>(
    510                     pVICI->pVertexAttributeDescriptions,
    511                     pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
    512             }
    513         }
    514         if (pCreateInfo->pColorBlendState) {
    515             const VkPipelineColorBlendStateCreateInfo *pCBCI = pCreateInfo->pColorBlendState;
    516             if (pCBCI->attachmentCount) {
    517                 this->attachments = std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments,
    518                                                                                      pCBCI->pAttachments + pCBCI->attachmentCount);
    519             }
    520         }
    521     }
    522     void initComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo) {
    523         computePipelineCI.initialize(pCreateInfo);
    524         // Make sure gfx pipeline is null
    525         VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
    526         graphicsPipelineCI.initialize(&emptyGraphicsCI);
    527         switch (computePipelineCI.stage.stage) {
    528         case VK_SHADER_STAGE_COMPUTE_BIT:
    529             this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
    530             break;
    531         default:
    532             // TODO : Flag error
    533             break;
    534         }
    535     }
    536 };
    537 
    538 // Track last states that are bound per pipeline bind point (Gfx & Compute)
    539 struct LAST_BOUND_STATE {
    540     PIPELINE_STATE *pipeline_state;
    541     PIPELINE_LAYOUT_NODE pipeline_layout;
    542     // Track each set that has been bound
    543     // Ordered bound set tracking where index is set# that given set is bound to
    544     std::vector<cvdescriptorset::DescriptorSet *> boundDescriptorSets;
    545     // one dynamic offset per dynamic descriptor bound to this CB
    546     std::vector<std::vector<uint32_t>> dynamicOffsets;
    547 
    548     void reset() {
    549         pipeline_state = nullptr;
    550         pipeline_layout.reset();
    551         boundDescriptorSets.clear();
    552         dynamicOffsets.clear();
    553     }
    554 };
    555 // Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
    556 struct GLOBAL_CB_NODE : public BASE_NODE {
    557     VkCommandBuffer commandBuffer;
    558     VkCommandBufferAllocateInfo createInfo;
    559     VkCommandBufferBeginInfo beginInfo;
    560     VkCommandBufferInheritanceInfo inheritanceInfo;
    561     VkDevice device;                    // device this CB belongs to
    562     uint64_t numCmds;                   // number of cmds in this CB
    563     uint64_t drawCount[NUM_DRAW_TYPES]; // Count of each type of draw in this CB
    564     CB_STATE state;                     // Track cmd buffer update state
    565     uint64_t submitCount;               // Number of times CB has been submitted
    566     CBStatusFlags status;               // Track status of various bindings on cmd buffer
    567     std::vector<CMD_NODE> cmds;              // vector of commands bound to this command buffer
    568     // Currently storing "lastBound" objects on per-CB basis
    569     //  long-term may want to create caches of "lastBound" states and could have
    570     //  each individual CMD_NODE referencing its own "lastBound" state
    571     // Store last bound state for Gfx & Compute pipeline bind points
    572     LAST_BOUND_STATE lastBound[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
    573 
    574     uint32_t viewportMask;
    575     uint32_t scissorMask;
    576     VkRenderPassBeginInfo activeRenderPassBeginInfo;
    577     RENDER_PASS_STATE *activeRenderPass;
    578     VkSubpassContents activeSubpassContents;
    579     uint32_t activeSubpass;
    580     VkFramebuffer activeFramebuffer;
    581     std::unordered_set<VkFramebuffer> framebuffers;
    582     // Unified data structs to track objects bound to this command buffer as well as object
    583     //  dependencies that have been broken : either destroyed objects, or updated descriptor sets
    584     std::unordered_set<VK_OBJECT> object_bindings;
    585     std::vector<VK_OBJECT> broken_bindings;
    586 
    587     std::unordered_set<VkEvent> waitedEvents;
    588     std::vector<VkEvent> writeEventsBeforeWait;
    589     std::vector<VkEvent> events;
    590     std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset;
    591     std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
    592     std::unordered_set<QueryObject> activeQueries;
    593     std::unordered_set<QueryObject> startedQueries;
    594     std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap;
    595     std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> imageSubresourceMap;
    596     std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
    597     std::vector<DRAW_DATA> drawData;
    598     DRAW_DATA currentDrawData;
    599     VkCommandBuffer primaryCommandBuffer;
    600     // Track images and buffers that are updated by this CB at the point of a draw
    601     std::unordered_set<VkImageView> updateImages;
    602     std::unordered_set<VkBuffer> updateBuffers;
    603     // If cmd buffer is primary, track secondary command buffers pending
    604     // execution
    605     std::unordered_set<VkCommandBuffer> secondaryCommandBuffers;
    606     // MTMTODO : Scrub these data fields and merge active sets w/ lastBound as appropriate
    607     std::vector<std::function<bool()>> validate_functions;
    608     std::unordered_set<VkDeviceMemory> memObjs;
    609     std::vector<std::function<bool(VkQueue)>> eventUpdates;
    610     std::vector<std::function<bool(VkQueue)>> queryUpdates;
    611 };
    612 
    613 struct SEMAPHORE_WAIT {
    614     VkSemaphore semaphore;
    615     VkQueue queue;
    616     uint64_t seq;
    617 };
    618 
    619 struct CB_SUBMISSION {
    620     CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores, std::vector<VkSemaphore> const &signalSemaphores, VkFence fence)
    621         : cbs(cbs), waitSemaphores(waitSemaphores), signalSemaphores(signalSemaphores), fence(fence) {}
    622 
    623     std::vector<VkCommandBuffer> cbs;
    624     std::vector<SEMAPHORE_WAIT> waitSemaphores;
    625     std::vector<VkSemaphore> signalSemaphores;
    626     VkFence fence;
    627 };
    628 
    629 // Fwd declarations of layer_data and helpers to look-up/validate state from layer_data maps
    630 namespace core_validation {
    631 struct layer_data;
    632 cvdescriptorset::DescriptorSet *getSetNode(const layer_data *, VkDescriptorSet);
    633 cvdescriptorset::DescriptorSetLayout const *getDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout);
    634 DESCRIPTOR_POOL_STATE *getDescriptorPoolState(const layer_data *, const VkDescriptorPool);
    635 BUFFER_NODE *getBufferNode(const layer_data *, VkBuffer);
    636 IMAGE_STATE *getImageState(const layer_data *, VkImage);
    637 DEVICE_MEM_INFO *getMemObjInfo(const layer_data *, VkDeviceMemory);
    638 BUFFER_VIEW_STATE *getBufferViewState(const layer_data *, VkBufferView);
    639 SAMPLER_STATE *getSamplerState(const layer_data *, VkSampler);
    640 IMAGE_VIEW_STATE *getImageViewState(const layer_data *, VkImageView);
    641 VkSwapchainKHR getSwapchainFromImage(const layer_data *, VkImage);
    642 SWAPCHAIN_NODE *getSwapchainNode(const layer_data *, VkSwapchainKHR);
    643 void invalidateCommandBuffers(std::unordered_set<GLOBAL_CB_NODE *>, VK_OBJECT);
    644 bool ValidateMemoryIsBoundToBuffer(const layer_data *, const BUFFER_NODE *, const char *);
    645 bool ValidateMemoryIsBoundToImage(const layer_data *, const IMAGE_STATE *, const char *);
    646 void AddCommandBufferBindingSampler(GLOBAL_CB_NODE *, SAMPLER_STATE *);
    647 void AddCommandBufferBindingImage(const layer_data *, GLOBAL_CB_NODE *, IMAGE_STATE *);
    648 void AddCommandBufferBindingImageView(const layer_data *, GLOBAL_CB_NODE *, IMAGE_VIEW_STATE *);
    649 void AddCommandBufferBindingBuffer(const layer_data *, GLOBAL_CB_NODE *, BUFFER_NODE *);
    650 void AddCommandBufferBindingBufferView(const layer_data *, GLOBAL_CB_NODE *, BUFFER_VIEW_STATE *);
    651 }
    652 
    653 #endif // CORE_VALIDATION_TYPES_H_
    654