Home | History | Annotate | Download | only in layers
      1 /* Copyright (c) 2015-2016 The Khronos Group Inc.
      2  * Copyright (c) 2015-2016 Valve Corporation
      3  * Copyright (c) 2015-2016 LunarG, Inc.
      4  * Copyright (C) 2015-2016 Google Inc.
      5  *
      6  * Licensed under the Apache License, Version 2.0 (the "License");
      7  * you may not use this file except in compliance with the License.
      8  * You may obtain a copy of the License at
      9  *
     10  *     http://www.apache.org/licenses/LICENSE-2.0
     11  *
     12  * Unless required by applicable law or agreed to in writing, software
     13  * distributed under the License is distributed on an "AS IS" BASIS,
     14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15  * See the License for the specific language governing permissions and
     16  * limitations under the License.
     17  *
     18  * Author: Courtney Goeltzenleuchter <courtneygo (at) google.com>
     19  * Author: Tobin Ehlis <tobine (at) google.com>
     20  * Author: Chris Forbes <chrisf (at) ijw.co.nz>
     21  * Author: Mark Lobodzinski <mark (at) lunarg.com>
     22  */
     23 #ifndef CORE_VALIDATION_TYPES_H_
     24 #define CORE_VALIDATION_TYPES_H_
     25 
     26 #include "vk_safe_struct.h"
     27 #include "vulkan/vulkan.h"
     28 #include "vk_validation_error_messages.h"
     29 #include "vk_layer_logging.h"
     30 #include "vk_object_types.h"
     31 #include "vk_extension_helper.h"
     32 #include <atomic>
     33 #include <functional>
     34 #include <map>
     35 #include <string.h>
     36 #include <unordered_map>
     37 #include <unordered_set>
     38 #include <vector>
     39 #include <memory>
     40 #include <list>
     41 
     42 // Fwd declarations
     43 namespace cvdescriptorset {
     44 class DescriptorSetLayout;
     45 class DescriptorSet;
     46 }  // namespace cvdescriptorset
     47 
     48 struct GLOBAL_CB_NODE;
     49 
     50 enum CALL_STATE {
     51     UNCALLED,       // Function has not been called
     52     QUERY_COUNT,    // Function called once to query a count
     53     QUERY_DETAILS,  // Function called w/ a count to query details
     54 };
     55 
     56 class BASE_NODE {
     57    public:
     58     // Track when object is being used by an in-flight command buffer
     59     std::atomic_int in_use;
     60     // Track command buffers that this object is bound to
     61     //  binding initialized when cmd referencing object is bound to command buffer
     62     //  binding removed when command buffer is reset or destroyed
     63     // When an object is destroyed, any bound cbs are set to INVALID
     64     std::unordered_set<GLOBAL_CB_NODE *> cb_bindings;
     65 
     66     BASE_NODE() { in_use.store(0); };
     67 };
     68 
     69 // Track command pools and their command buffers
     70 struct COMMAND_POOL_NODE : public BASE_NODE {
     71     VkCommandPoolCreateFlags createFlags;
     72     uint32_t queueFamilyIndex;
     73     // Cmd buffers allocated from this pool
     74     std::unordered_set<VkCommandBuffer> commandBuffers;
     75 };
     76 
     77 // Generic wrapper for vulkan objects
     78 struct VK_OBJECT {
     79     uint64_t handle;
     80     VulkanObjectType type;
     81 };
     82 
     83 inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
     84 
     85 namespace std {
     86 template <>
     87 struct hash<VK_OBJECT> {
     88     size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
     89 };
     90 }  // namespace std
     91 
     92 class PHYS_DEV_PROPERTIES_NODE {
     93    public:
     94     VkPhysicalDeviceProperties properties;
     95     std::vector<VkQueueFamilyProperties> queue_family_properties;
     96 };
     97 
     98 // Flags describing requirements imposed by the pipeline on a descriptor. These
     99 // can't be checked at pipeline creation time as they depend on the Image or
    100 // ImageView bound.
    101 enum descriptor_req {
    102     DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D,
    103     DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY,
    104     DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D,
    105     DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY,
    106     DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D,
    107     DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE,
    108     DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
    109 
    110     DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_END_RANGE + 1)) - 1,
    111 
    112     DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_END_RANGE,
    113     DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1,
    114 };
    115 
    116 struct DESCRIPTOR_POOL_STATE : BASE_NODE {
    117     VkDescriptorPool pool;
    118     uint32_t maxSets;        // Max descriptor sets allowed in this pool
    119     uint32_t availableSets;  // Available descriptor sets in this pool
    120 
    121     safe_VkDescriptorPoolCreateInfo createInfo;
    122     std::unordered_set<cvdescriptorset::DescriptorSet *> sets;  // Collection of all sets in this pool
    123     std::vector<uint32_t> maxDescriptorTypeCount;               // Max # of descriptors of each type in this pool
    124     std::vector<uint32_t> availableDescriptorTypeCount;         // Available # of descriptors of each type in this pool
    125 
    126     DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
    127         : pool(pool),
    128           maxSets(pCreateInfo->maxSets),
    129           availableSets(pCreateInfo->maxSets),
    130           createInfo(pCreateInfo),
    131           maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0),
    132           availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) {
    133         // Collect maximums per descriptor type.
    134         for (uint32_t i = 0; i < createInfo.poolSizeCount; ++i) {
    135             uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
    136             // Same descriptor types can appear several times
    137             maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount;
    138             availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
    139         }
    140     }
    141 };
    142 
    143 // Generic memory binding struct to track objects bound to objects
    144 struct MEM_BINDING {
    145     VkDeviceMemory mem;
    146     VkDeviceSize offset;
    147     VkDeviceSize size;
    148 };
    149 
    150 inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; }
    151 
    152 namespace std {
    153 template <>
    154 struct hash<MEM_BINDING> {
    155     size_t operator()(MEM_BINDING mb) const NOEXCEPT {
    156         auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset);
    157         return intermediate ^ hash<uint64_t>()(mb.size);
    158     }
    159 };
    160 }  // namespace std
    161 
    162 // Superclass for bindable object state (currently images and buffers)
    163 class BINDABLE : public BASE_NODE {
    164    public:
    165     bool sparse;  // Is this object being bound with sparse memory or not?
    166     // Non-sparse binding data
    167     MEM_BINDING binding;
    168     // Memory requirements for this BINDABLE
    169     VkMemoryRequirements requirements;
    170     // bool to track if memory requirements were checked
    171     bool memory_requirements_checked;
    172     // Sparse binding data, initially just tracking MEM_BINDING per mem object
    173     //  There's more data for sparse bindings so need better long-term solution
    174     // TODO : Need to update solution to track all sparse binding data
    175     std::unordered_set<MEM_BINDING> sparse_bindings;
    176 
    177     std::unordered_set<VkDeviceMemory> bound_memory_set_;
    178 
    179     BINDABLE()
    180         : sparse(false), binding{}, requirements{}, memory_requirements_checked(false), sparse_bindings{}, bound_memory_set_{} {};
    181 
    182     // Update the cached set of memory bindings.
    183     // Code that changes binding.mem or sparse_bindings must call UpdateBoundMemorySet()
    184     void UpdateBoundMemorySet() {
    185         bound_memory_set_.clear();
    186         if (!sparse) {
    187             bound_memory_set_.insert(binding.mem);
    188         } else {
    189             for (auto sb : sparse_bindings) {
    190                 bound_memory_set_.insert(sb.mem);
    191             }
    192         }
    193     }
    194 
    195     // Return unordered set of memory objects that are bound
    196     // Instead of creating a set from scratch each query, return the cached one
    197     const std::unordered_set<VkDeviceMemory> &GetBoundMemory() const { return bound_memory_set_; }
    198 };
    199 
    200 class BUFFER_STATE : public BINDABLE {
    201    public:
    202     VkBuffer buffer;
    203     VkBufferCreateInfo createInfo;
    204     BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
    205         if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
    206             uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
    207             for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
    208                 pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
    209             }
    210             createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
    211         }
    212 
    213         if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
    214             sparse = true;
    215         }
    216     };
    217 
    218     BUFFER_STATE(BUFFER_STATE const &rh_obj) = delete;
    219 
    220     ~BUFFER_STATE() {
    221         if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
    222             delete[] createInfo.pQueueFamilyIndices;
    223             createInfo.pQueueFamilyIndices = nullptr;
    224         }
    225     };
    226 };
    227 
    228 class BUFFER_VIEW_STATE : public BASE_NODE {
    229    public:
    230     VkBufferView buffer_view;
    231     VkBufferViewCreateInfo create_info;
    232     BUFFER_VIEW_STATE(VkBufferView bv, const VkBufferViewCreateInfo *ci) : buffer_view(bv), create_info(*ci){};
    233     BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete;
    234 };
    235 
    236 struct SAMPLER_STATE : public BASE_NODE {
    237     VkSampler sampler;
    238     VkSamplerCreateInfo createInfo;
    239 
    240     SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci){};
    241 };
    242 
    243 class IMAGE_STATE : public BINDABLE {
    244    public:
    245     VkImage image;
    246     VkImageCreateInfo createInfo;
    247     bool valid;                   // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
    248     bool acquired;                // If this is a swapchain image, has it been acquired by the app.
    249     bool shared_presentable;      // True for a front-buffered swapchain image
    250     bool layout_locked;           // A front-buffered image that has been presented can never have layout transitioned
    251     bool get_sparse_reqs_called;  // Track if GetImageSparseMemoryRequirements() has been called for this image
    252     bool sparse_metadata_required;  // Track if sparse metadata aspect is required for this image
    253     bool sparse_metadata_bound;     // Track if sparse metadata aspect is bound to this image
    254     std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
    255     IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
    256         : image(img),
    257           createInfo(*pCreateInfo),
    258           valid(false),
    259           acquired(false),
    260           shared_presentable(false),
    261           layout_locked(false),
    262           get_sparse_reqs_called(false),
    263           sparse_metadata_required(false),
    264           sparse_metadata_bound(false),
    265           sparse_requirements{} {
    266         if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
    267             uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
    268             for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
    269                 pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
    270             }
    271             createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
    272         }
    273 
    274         if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
    275             sparse = true;
    276         }
    277     };
    278 
    279     IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
    280 
    281     ~IMAGE_STATE() {
    282         if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
    283             delete[] createInfo.pQueueFamilyIndices;
    284             createInfo.pQueueFamilyIndices = nullptr;
    285         }
    286     };
    287 };
    288 
    289 class IMAGE_VIEW_STATE : public BASE_NODE {
    290    public:
    291     VkImageView image_view;
    292     VkImageViewCreateInfo create_info;
    293     IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci) : image_view(iv), create_info(*ci){};
    294     IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
    295 };
    296 
    297 struct MemRange {
    298     VkDeviceSize offset;
    299     VkDeviceSize size;
    300 };
    301 
    302 struct MEMORY_RANGE {
    303     uint64_t handle;
    304     bool image;   // True for image, false for buffer
    305     bool linear;  // True for buffers and linear images
    306     bool valid;   // True if this range is know to be valid
    307     VkDeviceMemory memory;
    308     VkDeviceSize start;
    309     VkDeviceSize size;
    310     VkDeviceSize end;  // Store this pre-computed for simplicity
    311     // Set of ptrs to every range aliased with this one
    312     std::unordered_set<MEMORY_RANGE *> aliases;
    313 };
    314 
    315 // Data struct for tracking memory object
    316 struct DEVICE_MEM_INFO : public BASE_NODE {
    317     void *object;       // Dispatchable object used to create this memory (device of swapchain)
    318     bool global_valid;  // If allocation is mapped or external, set to "true" to be picked up by subsequently bound ranges
    319     VkDeviceMemory mem;
    320     VkMemoryAllocateInfo alloc_info;
    321     std::unordered_set<VK_OBJECT> obj_bindings;               // objects bound to this memory
    322     std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges;  // Map of object to its binding range
    323     // Convenience vectors image/buff handles to speed up iterating over images or buffers independently
    324     std::unordered_set<uint64_t> bound_images;
    325     std::unordered_set<uint64_t> bound_buffers;
    326 
    327     MemRange mem_range;
    328     void *shadow_copy_base;    // Base of layer's allocation for guard band, data, and alignment space
    329     void *shadow_copy;         // Pointer to start of guard-band data before mapped region
    330     uint64_t shadow_pad_size;  // Size of the guard-band data before and after actual data. It MUST be a
    331                                // multiple of limits.minMemoryMapAlignment
    332     void *p_driver_data;       // Pointer to application's actual memory
    333 
    334     DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
    335         : object(disp_object),
    336           global_valid(false),
    337           mem(in_mem),
    338           alloc_info(*p_alloc_info),
    339           mem_range{},
    340           shadow_copy_base(0),
    341           shadow_copy(0),
    342           shadow_pad_size(0),
    343           p_driver_data(0){};
    344 };
    345 
    346 class SWAPCHAIN_NODE {
    347    public:
    348     safe_VkSwapchainCreateInfoKHR createInfo;
    349     VkSwapchainKHR swapchain;
    350     std::vector<VkImage> images;
    351     bool replaced = false;
    352     bool shared_presentable = false;
    353     CALL_STATE vkGetSwapchainImagesKHRState = UNCALLED;
    354     uint32_t get_swapchain_image_count = 0;
    355     SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain)
    356         : createInfo(pCreateInfo), swapchain(swapchain) {}
    357 };
    358 
    359 class IMAGE_CMD_BUF_LAYOUT_NODE {
    360    public:
    361     IMAGE_CMD_BUF_LAYOUT_NODE() = default;
    362     IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput)
    363         : initialLayout(initialLayoutInput), layout(layoutInput) {}
    364 
    365     VkImageLayout initialLayout;
    366     VkImageLayout layout;
    367 };
    368 
    369 // Store the DAG.
    370 struct DAGNode {
    371     uint32_t pass;
    372     std::vector<uint32_t> prev;
    373     std::vector<uint32_t> next;
    374 };
    375 
    376 struct RENDER_PASS_STATE : public BASE_NODE {
    377     VkRenderPass renderPass;
    378     safe_VkRenderPassCreateInfo createInfo;
    379     std::vector<bool> hasSelfDependency;
    380     std::vector<DAGNode> subpassToNode;
    381     std::vector<int32_t> subpass_to_dependency_index;  // srcSubpass to dependency index of self dep, or -1 if none
    382     std::unordered_map<uint32_t, bool> attachment_first_read;
    383 
    384     RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) : createInfo(pCreateInfo) {}
    385 };
    386 
    387 // vkCmd tracking -- complete as of header 1.0.68
    388 // please keep in "none, then sorted" order
    389 // Note: grepping vulkan.h for VKAPI_CALL.*vkCmd will return all functions except vkEndCommandBuffer
    390 
    391 enum CMD_TYPE {
    392     CMD_NONE,
    393     CMD_BEGINQUERY,
    394     CMD_BEGINRENDERPASS,
    395     CMD_BINDDESCRIPTORSETS,
    396     CMD_BINDINDEXBUFFER,
    397     CMD_BINDPIPELINE,
    398     CMD_BINDVERTEXBUFFERS,
    399     CMD_BLITIMAGE,
    400     CMD_CLEARATTACHMENTS,
    401     CMD_CLEARCOLORIMAGE,
    402     CMD_CLEARDEPTHSTENCILIMAGE,
    403     CMD_COPYBUFFER,
    404     CMD_COPYBUFFERTOIMAGE,
    405     CMD_COPYIMAGE,
    406     CMD_COPYIMAGETOBUFFER,
    407     CMD_COPYQUERYPOOLRESULTS,
    408     CMD_DEBUGMARKERBEGINEXT,
    409     CMD_DEBUGMARKERENDEXT,
    410     CMD_DEBUGMARKERINSERTEXT,
    411     CMD_DISPATCH,
    412     CMD_DISPATCHBASEKHX,
    413     CMD_DISPATCHINDIRECT,
    414     CMD_DRAW,
    415     CMD_DRAWINDEXED,
    416     CMD_DRAWINDEXEDINDIRECT,
    417     CMD_DRAWINDEXEDINDIRECTCOUNTAMD,
    418     CMD_DRAWINDIRECT,
    419     CMD_DRAWINDIRECTCOUNTAMD,
    420     CMD_ENDCOMMANDBUFFER,  // Should be the last command in any RECORDED cmd buffer
    421     CMD_ENDQUERY,
    422     CMD_ENDRENDERPASS,
    423     CMD_EXECUTECOMMANDS,
    424     CMD_FILLBUFFER,
    425     CMD_NEXTSUBPASS,
    426     CMD_PIPELINEBARRIER,
    427     CMD_PROCESSCOMMANDSNVX,
    428     CMD_PUSHCONSTANTS,
    429     CMD_PUSHDESCRIPTORSETKHR,
    430     CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR,
    431     CMD_RESERVESPACEFORCOMMANDSNVX,
    432     CMD_RESETEVENT,
    433     CMD_RESETQUERYPOOL,
    434     CMD_RESOLVEIMAGE,
    435     CMD_SETBLENDCONSTANTS,
    436     CMD_SETDEPTHBIAS,
    437     CMD_SETDEPTHBOUNDS,
    438     CMD_SETDEVICEMASKKHX,
    439     CMD_SETDISCARDRECTANGLEEXT,
    440     CMD_SETEVENT,
    441     CMD_SETLINEWIDTH,
    442     CMD_SETSAMPLELOCATIONSEXT,
    443     CMD_SETSCISSOR,
    444     CMD_SETSTENCILCOMPAREMASK,
    445     CMD_SETSTENCILREFERENCE,
    446     CMD_SETSTENCILWRITEMASK,
    447     CMD_SETVIEWPORT,
    448     CMD_SETVIEWPORTWSCALINGNV,
    449     CMD_UPDATEBUFFER,
    450     CMD_WAITEVENTS,
    451     CMD_WRITETIMESTAMP,
    452 };
    453 
    454 enum CB_STATE {
    455     CB_NEW,                 // Newly created CB w/o any cmds
    456     CB_RECORDING,           // BeginCB has been called on this CB
    457     CB_RECORDED,            // EndCB has been called on this CB
    458     CB_INVALID_COMPLETE,    // had a complete recording, but was since invalidated
    459     CB_INVALID_INCOMPLETE,  // fouled before recording was completed
    460 };
    461 
    462 // CB Status -- used to track status of various bindings on cmd buffer objects
    463 typedef VkFlags CBStatusFlags;
    464 enum CBStatusFlagBits {
    465     // clang-format off
    466     CBSTATUS_NONE                   = 0x00000000,   // No status is set
    467     CBSTATUS_LINE_WIDTH_SET         = 0x00000001,   // Line width has been set
    468     CBSTATUS_DEPTH_BIAS_SET         = 0x00000002,   // Depth bias has been set
    469     CBSTATUS_BLEND_CONSTANTS_SET    = 0x00000004,   // Blend constants state has been set
    470     CBSTATUS_DEPTH_BOUNDS_SET       = 0x00000008,   // Depth bounds state object has been set
    471     CBSTATUS_STENCIL_READ_MASK_SET  = 0x00000010,   // Stencil read mask has been set
    472     CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020,   // Stencil write mask has been set
    473     CBSTATUS_STENCIL_REFERENCE_SET  = 0x00000040,   // Stencil reference has been set
    474     CBSTATUS_VIEWPORT_SET           = 0x00000080,
    475     CBSTATUS_SCISSOR_SET            = 0x00000100,
    476     CBSTATUS_INDEX_BUFFER_BOUND     = 0x00000200,   // Index buffer has been set
    477     CBSTATUS_ALL_STATE_SET          = 0x000001FF,   // All state set (intentionally exclude index buffer)
    478     // clang-format on
    479 };
    480 
    481 struct TEMPLATE_STATE {
    482     VkDescriptorUpdateTemplateKHR desc_update_template;
    483     safe_VkDescriptorUpdateTemplateCreateInfoKHR create_info;
    484 
    485     TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo)
    486         : desc_update_template(update_template), create_info(*pCreateInfo) {}
    487 };
    488 
    489 struct QueryObject {
    490     VkQueryPool pool;
    491     uint32_t index;
    492 };
    493 
    494 inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
    495     return (query1.pool == query2.pool && query1.index == query2.index);
    496 }
    497 
    498 namespace std {
    499 template <>
    500 struct hash<QueryObject> {
    501     size_t operator()(QueryObject query) const throw() {
    502         return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
    503     }
    504 };
    505 }  // namespace std
    506 struct DRAW_DATA {
    507     std::vector<VkBuffer> buffers;
    508 };
    509 
    510 struct ImageSubresourcePair {
    511     VkImage image;
    512     bool hasSubresource;
    513     VkImageSubresource subresource;
    514 };
    515 
    516 inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) {
    517     if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource) return false;
    518     return !img1.hasSubresource ||
    519            (img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel &&
    520             img1.subresource.arrayLayer == img2.subresource.arrayLayer);
    521 }
    522 
    523 namespace std {
    524 template <>
    525 struct hash<ImageSubresourcePair> {
    526     size_t operator()(ImageSubresourcePair img) const throw() {
    527         size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image));
    528         hashVal ^= hash<bool>()(img.hasSubresource);
    529         if (img.hasSubresource) {
    530             hashVal ^= hash<uint32_t>()(reinterpret_cast<uint32_t &>(img.subresource.aspectMask));
    531             hashVal ^= hash<uint32_t>()(img.subresource.mipLevel);
    532             hashVal ^= hash<uint32_t>()(img.subresource.arrayLayer);
    533         }
    534         return hashVal;
    535     }
    536 };
    537 }  // namespace std
    538 
    539 // Store layouts and pushconstants for PipelineLayout
    540 struct PIPELINE_LAYOUT_NODE {
    541     VkPipelineLayout layout;
    542     std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts;
    543     std::vector<VkPushConstantRange> push_constant_ranges;
    544 
    545     PIPELINE_LAYOUT_NODE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{} {}
    546 
    547     void reset() {
    548         layout = VK_NULL_HANDLE;
    549         set_layouts.clear();
    550         push_constant_ranges.clear();
    551     }
    552 };
    553 
    554 class PIPELINE_STATE : public BASE_NODE {
    555    public:
    556     VkPipeline pipeline;
    557     safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
    558     // Hold shared ptr to RP in case RP itself is destroyed
    559     std::shared_ptr<RENDER_PASS_STATE> rp_state;
    560     safe_VkComputePipelineCreateInfo computePipelineCI;
    561     // Flag of which shader stages are active for this pipeline
    562     uint32_t active_shaders;
    563     uint32_t duplicate_shaders;
    564     // Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
    565     std::unordered_map<uint32_t, std::map<uint32_t, descriptor_req>> active_slots;
    566     // Vtx input info (if any)
    567     std::vector<VkVertexInputBindingDescription> vertexBindingDescriptions;
    568     std::vector<VkPipelineColorBlendAttachmentState> attachments;
    569     bool blendConstantsEnabled;  // Blend constants enabled for any attachments
    570     PIPELINE_LAYOUT_NODE pipeline_layout;
    571 
    572     // Default constructor
    573     PIPELINE_STATE()
    574         : pipeline{},
    575           graphicsPipelineCI{},
    576           rp_state(nullptr),
    577           computePipelineCI{},
    578           active_shaders(0),
    579           duplicate_shaders(0),
    580           active_slots(),
    581           vertexBindingDescriptions(),
    582           attachments(),
    583           blendConstantsEnabled(false),
    584           pipeline_layout() {}
    585 
    586     void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate) {
    587         bool uses_color_attachment = false;
    588         bool uses_depthstencil_attachment = false;
    589         if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
    590             const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
    591 
    592             for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
    593                 if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
    594                     uses_color_attachment = true;
    595                     break;
    596                 }
    597             }
    598 
    599             if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
    600                 uses_depthstencil_attachment = true;
    601             }
    602         }
    603         graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
    604         // Make sure compute pipeline is null
    605         VkComputePipelineCreateInfo emptyComputeCI = {};
    606         computePipelineCI.initialize(&emptyComputeCI);
    607         for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
    608             const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
    609             this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
    610             this->active_shaders |= pPSSCI->stage;
    611         }
    612         if (graphicsPipelineCI.pVertexInputState) {
    613             const auto pVICI = graphicsPipelineCI.pVertexInputState;
    614             if (pVICI->vertexBindingDescriptionCount) {
    615                 this->vertexBindingDescriptions = std::vector<VkVertexInputBindingDescription>(
    616                     pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
    617             }
    618         }
    619         if (graphicsPipelineCI.pColorBlendState) {
    620             const auto pCBCI = graphicsPipelineCI.pColorBlendState;
    621             if (pCBCI->attachmentCount) {
    622                 this->attachments = std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments,
    623                                                                                      pCBCI->pAttachments + pCBCI->attachmentCount);
    624             }
    625         }
    626         rp_state = rpstate;
    627     }
    628 
    629     void initComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo) {
    630         computePipelineCI.initialize(pCreateInfo);
    631         // Make sure gfx pipeline is null
    632         VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
    633         graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
    634         switch (computePipelineCI.stage.stage) {
    635             case VK_SHADER_STAGE_COMPUTE_BIT:
    636                 this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
    637                 break;
    638             default:
    639                 // TODO : Flag error
    640                 break;
    641         }
    642     }
    643 };
    644 
    645 // Track last states that are bound per pipeline bind point (Gfx & Compute)
    646 struct LAST_BOUND_STATE {
    647     PIPELINE_STATE *pipeline_state;
    648     PIPELINE_LAYOUT_NODE pipeline_layout;
    649     // Track each set that has been bound
    650     // Ordered bound set tracking where index is set# that given set is bound to
    651     std::vector<cvdescriptorset::DescriptorSet *> boundDescriptorSets;
    652     std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
    653     // one dynamic offset per dynamic descriptor bound to this CB
    654     std::vector<std::vector<uint32_t>> dynamicOffsets;
    655 
    656     void reset() {
    657         pipeline_state = nullptr;
    658         pipeline_layout.reset();
    659         boundDescriptorSets.clear();
    660         push_descriptor_set = nullptr;
    661         dynamicOffsets.clear();
    662     }
    663 };
    664 // Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
    665 struct GLOBAL_CB_NODE : public BASE_NODE {
    666     VkCommandBuffer commandBuffer;
    667     VkCommandBufferAllocateInfo createInfo = {};
    668     VkCommandBufferBeginInfo beginInfo;
    669     VkCommandBufferInheritanceInfo inheritanceInfo;
    670     VkDevice device;  // device this CB belongs to
    671     bool hasDrawCmd;
    672     CB_STATE state;        // Track cmd buffer update state
    673     uint64_t submitCount;  // Number of times CB has been submitted
    674     typedef uint64_t ImageLayoutUpdateCount;
    675     ImageLayoutUpdateCount image_layout_change_count;  // The sequence number for changes to image layout (for cached validation)
    676     CBStatusFlags status;                              // Track status of various bindings on cmd buffer
    677     CBStatusFlags static_status;                       // All state bits provided by current graphics pipeline
    678                                                        // rather than dynamic state
    679     // Currently storing "lastBound" objects on per-CB basis
    680     //  long-term may want to create caches of "lastBound" states and could have
    681     //  each individual CMD_NODE referencing its own "lastBound" state
    682     // Store last bound state for Gfx & Compute pipeline bind points
    683     LAST_BOUND_STATE lastBound[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
    684 
    685     uint32_t viewportMask;
    686     uint32_t scissorMask;
    687     VkRenderPassBeginInfo activeRenderPassBeginInfo;
    688     RENDER_PASS_STATE *activeRenderPass;
    689     VkSubpassContents activeSubpassContents;
    690     uint32_t activeSubpass;
    691     VkFramebuffer activeFramebuffer;
    692     std::unordered_set<VkFramebuffer> framebuffers;
    693     // Unified data structs to track objects bound to this command buffer as well as object
    694     //  dependencies that have been broken : either destroyed objects, or updated descriptor sets
    695     std::unordered_set<VK_OBJECT> object_bindings;
    696     std::vector<VK_OBJECT> broken_bindings;
    697 
    698     std::unordered_set<VkEvent> waitedEvents;
    699     std::vector<VkEvent> writeEventsBeforeWait;
    700     std::vector<VkEvent> events;
    701     std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset;
    702     std::unordered_map<QueryObject, bool> queryToStateMap;  // 0 is unavailable, 1 is available
    703     std::unordered_set<QueryObject> activeQueries;
    704     std::unordered_set<QueryObject> startedQueries;
    705     std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap;
    706     std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
    707     std::vector<DRAW_DATA> drawData;
    708     DRAW_DATA currentDrawData;
    709     bool vertex_buffer_used;  // Track for perf warning to make sure any bound vtx buffer used
    710     VkCommandBuffer primaryCommandBuffer;
    711     // Track images and buffers that are updated by this CB at the point of a draw
    712     std::unordered_set<VkImageView> updateImages;
    713     std::unordered_set<VkBuffer> updateBuffers;
    714     // If primary, the secondary command buffers we will call.
    715     // If secondary, the primary command buffers we will be called by.
    716     std::unordered_set<GLOBAL_CB_NODE *> linkedCommandBuffers;
    717     // Validation functions run at primary CB queue submit time
    718     std::vector<std::function<bool()>> queue_submit_functions;
    719     // Validation functions run when secondary CB is executed in primary
    720     std::vector<std::function<bool(VkFramebuffer)>> cmd_execute_commands_functions;
    721     std::unordered_set<VkDeviceMemory> memObjs;
    722     std::vector<std::function<bool(VkQueue)>> eventUpdates;
    723     std::vector<std::function<bool(VkQueue)>> queryUpdates;
    724     std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets;
    725 };
    726 
    727 struct SEMAPHORE_WAIT {
    728     VkSemaphore semaphore;
    729     VkQueue queue;
    730     uint64_t seq;
    731 };
    732 
    733 struct CB_SUBMISSION {
    734     CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores,
    735                   std::vector<VkSemaphore> const &signalSemaphores, std::vector<VkSemaphore> const &externalSemaphores,
    736                   VkFence fence)
    737         : cbs(cbs),
    738           waitSemaphores(waitSemaphores),
    739           signalSemaphores(signalSemaphores),
    740           externalSemaphores(externalSemaphores),
    741           fence(fence) {}
    742 
    743     std::vector<VkCommandBuffer> cbs;
    744     std::vector<SEMAPHORE_WAIT> waitSemaphores;
    745     std::vector<VkSemaphore> signalSemaphores;
    746     std::vector<VkSemaphore> externalSemaphores;
    747     VkFence fence;
    748 };
    749 
    750 struct IMAGE_LAYOUT_NODE {
    751     VkImageLayout layout;
    752     VkFormat format;
    753 };
    754 
    755 // CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
    756 // The end goal is to have all checks guarded by a bool. The bools are all "false" by default meaning that all checks
    757 // are enabled. At CreateInstance time, the user can use the VK_EXT_validation_flags extension to pass in enum values
    758 // of VkValidationCheckEXT that will selectively disable checks.
    759 struct CHECK_DISABLED {
    760     bool command_buffer_state;
    761     bool create_descriptor_set_layout;
    762     bool destroy_buffer_view;       // Skip validation at DestroyBufferView time
    763     bool destroy_image_view;        // Skip validation at DestroyImageView time
    764     bool destroy_pipeline;          // Skip validation at DestroyPipeline time
    765     bool destroy_descriptor_pool;   // Skip validation at DestroyDescriptorPool time
    766     bool destroy_framebuffer;       // Skip validation at DestroyFramebuffer time
    767     bool destroy_renderpass;        // Skip validation at DestroyRenderpass time
    768     bool destroy_image;             // Skip validation at DestroyImage time
    769     bool destroy_sampler;           // Skip validation at DestroySampler time
    770     bool destroy_command_pool;      // Skip validation at DestroyCommandPool time
    771     bool destroy_event;             // Skip validation at DestroyEvent time
    772     bool free_memory;               // Skip validation at FreeMemory time
    773     bool object_in_use;             // Skip all object in_use checking
    774     bool idle_descriptor_set;       // Skip check to verify that descriptor set is no in-use
    775     bool push_constant_range;       // Skip push constant range checks
    776     bool free_descriptor_sets;      // Skip validation prior to vkFreeDescriptorSets()
    777     bool allocate_descriptor_sets;  // Skip validation prior to vkAllocateDescriptorSets()
    778     bool update_descriptor_sets;    // Skip validation prior to vkUpdateDescriptorSets()
    779     bool wait_for_fences;
    780     bool get_fence_state;
    781     bool queue_wait_idle;
    782     bool device_wait_idle;
    783     bool destroy_fence;
    784     bool destroy_semaphore;
    785     bool destroy_query_pool;
    786     bool get_query_pool_results;
    787     bool destroy_buffer;
    788     bool shader_validation;  // Skip validation for shaders
    789 
    790     void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
    791 };
    792 
    793 struct MT_FB_ATTACHMENT_INFO {
    794     IMAGE_VIEW_STATE *view_state;
    795     VkImage image;
    796 };
    797 
    798 class FRAMEBUFFER_STATE : public BASE_NODE {
    799    public:
    800     VkFramebuffer framebuffer;
    801     safe_VkFramebufferCreateInfo createInfo;
    802     std::shared_ptr<RENDER_PASS_STATE> rp_state;
    803     std::vector<MT_FB_ATTACHMENT_INFO> attachments;
    804     FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate)
    805         : framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){};
    806 };
    807 
    808 struct shader_module;
    809 struct DeviceExtensions;
    810 
    811 // Fwd declarations of layer_data and helpers to look-up/validate state from layer_data maps
    812 namespace core_validation {
    813 struct layer_data;
    814 cvdescriptorset::DescriptorSet *GetSetNode(const layer_data *, VkDescriptorSet);
    815 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout);
    816 DESCRIPTOR_POOL_STATE *GetDescriptorPoolState(const layer_data *, const VkDescriptorPool);
    817 BUFFER_STATE *GetBufferState(const layer_data *, VkBuffer);
    818 IMAGE_STATE *GetImageState(const layer_data *, VkImage);
    819 DEVICE_MEM_INFO *GetMemObjInfo(const layer_data *, VkDeviceMemory);
    820 BUFFER_VIEW_STATE *GetBufferViewState(const layer_data *, VkBufferView);
    821 SAMPLER_STATE *GetSamplerState(const layer_data *, VkSampler);
    822 IMAGE_VIEW_STATE *GetImageViewState(const layer_data *, VkImageView);
    823 SWAPCHAIN_NODE *GetSwapchainNode(const layer_data *, VkSwapchainKHR);
    824 GLOBAL_CB_NODE *GetCBNode(layer_data const *my_data, const VkCommandBuffer cb);
    825 RENDER_PASS_STATE *GetRenderPassState(layer_data const *dev_data, VkRenderPass renderpass);
    826 std::shared_ptr<RENDER_PASS_STATE> GetRenderPassStateSharedPtr(layer_data const *dev_data, VkRenderPass renderpass);
    827 FRAMEBUFFER_STATE *GetFramebufferState(const layer_data *my_data, VkFramebuffer framebuffer);
    828 COMMAND_POOL_NODE *GetCommandPoolNode(layer_data *dev_data, VkCommandPool pool);
    829 shader_module const *GetShaderModuleState(layer_data const *dev_data, VkShaderModule module);
    830 const PHYS_DEV_PROPERTIES_NODE *GetPhysDevProperties(const layer_data *device_data);
    831 const VkPhysicalDeviceFeatures *GetEnabledFeatures(const layer_data *device_data);
    832 const DeviceExtensions *GetEnabledExtensions(const layer_data *device_data);
    833 
    834 void invalidateCommandBuffers(const layer_data *, std::unordered_set<GLOBAL_CB_NODE *> const &, VK_OBJECT);
    835 bool ValidateMemoryIsBoundToBuffer(const layer_data *, const BUFFER_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE);
    836 bool ValidateMemoryIsBoundToImage(const layer_data *, const IMAGE_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE);
    837 void AddCommandBufferBindingSampler(GLOBAL_CB_NODE *, SAMPLER_STATE *);
    838 void AddCommandBufferBindingImage(const layer_data *, GLOBAL_CB_NODE *, IMAGE_STATE *);
    839 void AddCommandBufferBindingImageView(const layer_data *, GLOBAL_CB_NODE *, IMAGE_VIEW_STATE *);
    840 void AddCommandBufferBindingBuffer(const layer_data *, GLOBAL_CB_NODE *, BUFFER_STATE *);
    841 void AddCommandBufferBindingBufferView(const layer_data *, GLOBAL_CB_NODE *, BUFFER_VIEW_STATE *);
    842 bool ValidateObjectNotInUse(const layer_data *dev_data, BASE_NODE *obj_node, VK_OBJECT obj_struct, const char *caller_name,
    843                             UNIQUE_VALIDATION_ERROR_CODE error_code);
    844 void invalidateCommandBuffers(const layer_data *dev_data, std::unordered_set<GLOBAL_CB_NODE *> const &cb_nodes, VK_OBJECT obj);
    845 void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info);
    846 void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info);
    847 bool ClearMemoryObjectBindings(layer_data *dev_data, uint64_t handle, VulkanObjectType type);
    848 bool ValidateCmdQueueFlags(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node, const char *caller_name, VkQueueFlags flags,
    849                            UNIQUE_VALIDATION_ERROR_CODE error_code);
    850 bool ValidateCmd(layer_data *my_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd, const char *caller_name);
    851 bool insideRenderPass(const layer_data *my_data, const GLOBAL_CB_NODE *pCB, const char *apiName,
    852                       UNIQUE_VALIDATION_ERROR_CODE msgCode);
    853 void SetImageMemoryValid(layer_data *dev_data, IMAGE_STATE *image_state, bool valid);
    854 bool outsideRenderPass(const layer_data *my_data, GLOBAL_CB_NODE *pCB, const char *apiName, UNIQUE_VALIDATION_ERROR_CODE msgCode);
    855 void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const IMAGE_CMD_BUF_LAYOUT_NODE &node);
    856 void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const VkImageLayout &layout);
    857 bool ValidateImageMemoryIsValid(layer_data *dev_data, IMAGE_STATE *image_state, const char *functionName);
    858 bool ValidateImageSampleCount(layer_data *dev_data, IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count,
    859                               const char *location, UNIQUE_VALIDATION_ERROR_CODE msgCode);
    860 bool rangesIntersect(layer_data const *dev_data, MEMORY_RANGE const *range1, VkDeviceSize offset, VkDeviceSize end);
    861 bool ValidateBufferMemoryIsValid(layer_data *dev_data, BUFFER_STATE *buffer_state, const char *functionName);
    862 void SetBufferMemoryValid(layer_data *dev_data, BUFFER_STATE *buffer_state, bool valid);
    863 bool ValidateCmdSubpassState(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd_type);
    864 bool ValidateCmd(layer_data *dev_data, const GLOBAL_CB_NODE *cb_state, const CMD_TYPE cmd, const char *caller_name);
    865 
    866 // Prototypes for layer_data accessor functions.  These should be in their own header file at some point
    867 VkFormatProperties GetFormatProperties(core_validation::layer_data *device_data, VkFormat format);
    868 VkResult GetImageFormatProperties(core_validation::layer_data *device_data, const VkImageCreateInfo *image_ci,
    869                                   VkImageFormatProperties *image_format_properties);
    870 const debug_report_data *GetReportData(const layer_data *);
    871 const VkPhysicalDeviceProperties *GetPhysicalDeviceProperties(layer_data *);
    872 const CHECK_DISABLED *GetDisables(layer_data *);
    873 std::unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>> *GetImageMap(core_validation::layer_data *);
    874 std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *GetImageSubresourceMap(layer_data *);
    875 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> *GetImageLayoutMap(layer_data *);
    876 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const *GetImageLayoutMap(layer_data const *);
    877 std::unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>> *GetBufferMap(layer_data *device_data);
    878 std::unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>> *GetBufferViewMap(layer_data *device_data);
    879 std::unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>> *GetImageViewMap(layer_data *device_data);
    880 const DeviceExtensions *GetDeviceExtensions(const layer_data *);
    881 }  // namespace core_validation
    882 
    883 #endif  // CORE_VALIDATION_TYPES_H_
    884