1 /* Copyright (c) 2015-2017 The Khronos Group Inc. 2 * Copyright (c) 2015-2017 Valve Corporation 3 * Copyright (c) 2015-2017 LunarG, Inc. 4 * Copyright (C) 2015-2017 Google Inc. 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 * 18 * Author: Mark Lobodzinski <mark (at) lunarg.com> 19 * Author: Dave Houlton <daveh (at) lunarg.com> 20 */ 21 22 // Allow use of STL min and max functions in Windows 23 #define NOMINMAX 24 25 #include <inttypes.h> 26 #include <sstream> 27 #include <string> 28 29 #include "vk_enum_string_helper.h" 30 #include "vk_layer_data.h" 31 #include "vk_layer_utils.h" 32 #include "vk_layer_logging.h" 33 34 #include "buffer_validation.h" 35 36 void SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const VkImageLayout &layout) { 37 if (pCB->imageLayoutMap.find(imgpair) != pCB->imageLayoutMap.end()) { 38 pCB->imageLayoutMap[imgpair].layout = layout; 39 } else { 40 assert(imgpair.hasSubresource); 41 IMAGE_CMD_BUF_LAYOUT_NODE node; 42 if (!FindCmdBufLayout(device_data, pCB, imgpair.image, imgpair.subresource, node)) { 43 node.initialLayout = layout; 44 } 45 SetLayout(device_data, pCB, imgpair, {node.initialLayout, layout}); 46 } 47 } 48 template <class OBJECT, class LAYOUT> 49 void SetLayout(layer_data *device_data, OBJECT *pObject, VkImage image, VkImageSubresource range, const LAYOUT &layout) { 50 ImageSubresourcePair imgpair = {image, true, range}; 51 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 52 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 53 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 54 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 55 } 56 57 template <class OBJECT, class LAYOUT> 58 void SetLayout(layer_data *device_data, OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout, 59 VkImageAspectFlags aspectMask) { 60 if (imgpair.subresource.aspectMask & aspectMask) { 61 imgpair.subresource.aspectMask = aspectMask; 62 SetLayout(device_data, pObject, imgpair, layout); 63 } 64 } 65 66 // Set the layout in supplied map 67 void SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 68 VkImageLayout layout) { 69 imageLayoutMap[imgpair].layout = layout; 70 } 71 72 bool FindLayoutVerifyNode(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, ImageSubresourcePair imgpair, 73 IMAGE_CMD_BUF_LAYOUT_NODE &node, const VkImageAspectFlags aspectMask) { 74 const debug_report_data *report_data = core_validation::GetReportData(device_data); 75 76 if (!(imgpair.subresource.aspectMask & aspectMask)) { 77 return false; 78 } 79 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask; 80 imgpair.subresource.aspectMask = aspectMask; 81 auto imgsubIt = pCB->imageLayoutMap.find(imgpair); 82 if (imgsubIt == pCB->imageLayoutMap.end()) { 83 return false; 84 } 85 if (node.layout != VK_IMAGE_LAYOUT_MAX_ENUM && node.layout != imgsubIt->second.layout) { 86 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 87 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 88 "Cannot query for VkImage 0x%" PRIx64 " layout when combined aspect mask %d has multiple layout types: %s and %s", 89 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(node.layout), 90 string_VkImageLayout(imgsubIt->second.layout)); 91 } 92 if (node.initialLayout != VK_IMAGE_LAYOUT_MAX_ENUM && node.initialLayout != imgsubIt->second.initialLayout) { 93 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 94 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 95 "Cannot query for VkImage 0x%" PRIx64 96 " layout when combined aspect mask %d has multiple initial layout types: %s and %s", 97 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(node.initialLayout), 98 string_VkImageLayout(imgsubIt->second.initialLayout)); 99 } 100 node = imgsubIt->second; 101 return true; 102 } 103 104 bool FindLayoutVerifyLayout(layer_data const *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout, 105 const VkImageAspectFlags aspectMask) { 106 if (!(imgpair.subresource.aspectMask & aspectMask)) { 107 return false; 108 } 109 const debug_report_data *report_data = core_validation::GetReportData(device_data); 110 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask; 111 imgpair.subresource.aspectMask = aspectMask; 112 auto imgsubIt = (*core_validation::GetImageLayoutMap(device_data)).find(imgpair); 113 if (imgsubIt == (*core_validation::GetImageLayoutMap(device_data)).end()) { 114 return false; 115 } 116 if (layout != VK_IMAGE_LAYOUT_MAX_ENUM && layout != imgsubIt->second.layout) { 117 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 118 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 119 "Cannot query for VkImage 0x%" PRIx64 " layout when combined aspect mask %d has multiple layout types: %s and %s", 120 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(layout), 121 string_VkImageLayout(imgsubIt->second.layout)); 122 } 123 layout = imgsubIt->second.layout; 124 return true; 125 } 126 127 // Find layout(s) on the command buffer level 128 bool FindCmdBufLayout(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, VkImage image, VkImageSubresource range, 129 IMAGE_CMD_BUF_LAYOUT_NODE &node) { 130 ImageSubresourcePair imgpair = {image, true, range}; 131 node = IMAGE_CMD_BUF_LAYOUT_NODE(VK_IMAGE_LAYOUT_MAX_ENUM, VK_IMAGE_LAYOUT_MAX_ENUM); 132 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_COLOR_BIT); 133 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_DEPTH_BIT); 134 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_STENCIL_BIT); 135 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_METADATA_BIT); 136 if (node.layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 137 imgpair = {image, false, VkImageSubresource()}; 138 auto imgsubIt = pCB->imageLayoutMap.find(imgpair); 139 if (imgsubIt == pCB->imageLayoutMap.end()) return false; 140 // TODO: This is ostensibly a find function but it changes state here 141 node = imgsubIt->second; 142 } 143 return true; 144 } 145 146 // Find layout(s) on the global level 147 bool FindGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout) { 148 layout = VK_IMAGE_LAYOUT_MAX_ENUM; 149 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 150 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 151 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 152 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 153 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 154 imgpair = {imgpair.image, false, VkImageSubresource()}; 155 auto imgsubIt = (*core_validation::GetImageLayoutMap(device_data)).find(imgpair); 156 if (imgsubIt == (*core_validation::GetImageLayoutMap(device_data)).end()) return false; 157 layout = imgsubIt->second.layout; 158 } 159 return true; 160 } 161 162 bool FindLayouts(layer_data *device_data, VkImage image, std::vector<VkImageLayout> &layouts) { 163 auto sub_data = (*core_validation::GetImageSubresourceMap(device_data)).find(image); 164 if (sub_data == (*core_validation::GetImageSubresourceMap(device_data)).end()) return false; 165 auto image_state = GetImageState(device_data, image); 166 if (!image_state) return false; 167 bool ignoreGlobal = false; 168 // TODO: Make this robust for >1 aspect mask. Now it will just say ignore potential errors in this case. 169 if (sub_data->second.size() >= (image_state->createInfo.arrayLayers * image_state->createInfo.mipLevels + 1)) { 170 ignoreGlobal = true; 171 } 172 for (auto imgsubpair : sub_data->second) { 173 if (ignoreGlobal && !imgsubpair.hasSubresource) continue; 174 auto img_data = (*core_validation::GetImageLayoutMap(device_data)).find(imgsubpair); 175 if (img_data != (*core_validation::GetImageLayoutMap(device_data)).end()) { 176 layouts.push_back(img_data->second.layout); 177 } 178 } 179 return true; 180 } 181 bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 182 VkImageLayout &layout, const VkImageAspectFlags aspectMask) { 183 if (!(imgpair.subresource.aspectMask & aspectMask)) { 184 return false; 185 } 186 imgpair.subresource.aspectMask = aspectMask; 187 auto imgsubIt = imageLayoutMap.find(imgpair); 188 if (imgsubIt == imageLayoutMap.end()) { 189 return false; 190 } 191 layout = imgsubIt->second.layout; 192 return true; 193 } 194 195 // find layout in supplied map 196 bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 197 VkImageLayout &layout) { 198 layout = VK_IMAGE_LAYOUT_MAX_ENUM; 199 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 200 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 201 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 202 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 203 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 204 imgpair = {imgpair.image, false, VkImageSubresource()}; 205 auto imgsubIt = imageLayoutMap.find(imgpair); 206 if (imgsubIt == imageLayoutMap.end()) return false; 207 layout = imgsubIt->second.layout; 208 } 209 return true; 210 } 211 212 // Set the layout on the global level 213 void SetGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, const VkImageLayout &layout) { 214 VkImage &image = imgpair.image; 215 (*core_validation::GetImageLayoutMap(device_data))[imgpair].layout = layout; 216 auto &image_subresources = (*core_validation::GetImageSubresourceMap(device_data))[image]; 217 auto subresource = std::find(image_subresources.begin(), image_subresources.end(), imgpair); 218 if (subresource == image_subresources.end()) { 219 image_subresources.push_back(imgpair); 220 } 221 } 222 223 // Set the layout on the cmdbuf level 224 void SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const IMAGE_CMD_BUF_LAYOUT_NODE &node) { 225 pCB->imageLayoutMap[imgpair] = node; 226 } 227 // Set image layout for given VkImageSubresourceRange struct 228 void SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state, 229 VkImageSubresourceRange image_subresource_range, const VkImageLayout &layout) { 230 assert(image_state); 231 cb_node->image_layout_change_count++; // Change the version of this data to force revalidation 232 for (uint32_t level_index = 0; level_index < image_subresource_range.levelCount; ++level_index) { 233 uint32_t level = image_subresource_range.baseMipLevel + level_index; 234 for (uint32_t layer_index = 0; layer_index < image_subresource_range.layerCount; layer_index++) { 235 uint32_t layer = image_subresource_range.baseArrayLayer + layer_index; 236 VkImageSubresource sub = {image_subresource_range.aspectMask, level, layer}; 237 // TODO: If ImageView was created with depth or stencil, transition both layouts as the aspectMask is ignored and both 238 // are used. Verify that the extra implicit layout is OK for descriptor set layout validation 239 if (image_subresource_range.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) { 240 if (FormatIsDepthAndStencil(image_state->createInfo.format)) { 241 sub.aspectMask |= (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT); 242 } 243 } 244 SetLayout(device_data, cb_node, image_state->image, sub, layout); 245 } 246 } 247 } 248 // Set image layout for given VkImageSubresourceLayers struct 249 void SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state, 250 VkImageSubresourceLayers image_subresource_layers, const VkImageLayout &layout) { 251 // Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct 252 VkImageSubresourceRange image_subresource_range; 253 image_subresource_range.aspectMask = image_subresource_layers.aspectMask; 254 image_subresource_range.baseArrayLayer = image_subresource_layers.baseArrayLayer; 255 image_subresource_range.layerCount = image_subresource_layers.layerCount; 256 image_subresource_range.baseMipLevel = image_subresource_layers.mipLevel; 257 image_subresource_range.levelCount = 1; 258 SetImageLayout(device_data, cb_node, image_state, image_subresource_range, layout); 259 } 260 // Set image layout for all slices of an image view 261 void SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImageView imageView, const VkImageLayout &layout) { 262 auto view_state = GetImageViewState(device_data, imageView); 263 assert(view_state); 264 265 SetImageLayout(device_data, cb_node, GetImageState(device_data, view_state->create_info.image), 266 view_state->create_info.subresourceRange, layout); 267 } 268 269 bool VerifyFramebufferAndRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, 270 const VkRenderPassBeginInfo *pRenderPassBegin, 271 const FRAMEBUFFER_STATE *framebuffer_state) { 272 bool skip = false; 273 auto const pRenderPassInfo = GetRenderPassState(device_data, pRenderPassBegin->renderPass)->createInfo.ptr(); 274 auto const &framebufferInfo = framebuffer_state->createInfo; 275 const auto report_data = core_validation::GetReportData(device_data); 276 if (pRenderPassInfo->attachmentCount != framebufferInfo.attachmentCount) { 277 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 278 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS", 279 "You cannot start a render pass using a framebuffer with a different number of attachments."); 280 } 281 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) { 282 const VkImageView &image_view = framebufferInfo.pAttachments[i]; 283 auto view_state = GetImageViewState(device_data, image_view); 284 assert(view_state); 285 const VkImage &image = view_state->create_info.image; 286 const VkImageSubresourceRange &subRange = view_state->create_info.subresourceRange; 287 auto initial_layout = pRenderPassInfo->pAttachments[i].initialLayout; 288 // TODO: Do not iterate over every possibility - consolidate where possible 289 for (uint32_t j = 0; j < subRange.levelCount; j++) { 290 uint32_t level = subRange.baseMipLevel + j; 291 for (uint32_t k = 0; k < subRange.layerCount; k++) { 292 uint32_t layer = subRange.baseArrayLayer + k; 293 VkImageSubresource sub = {subRange.aspectMask, level, layer}; 294 IMAGE_CMD_BUF_LAYOUT_NODE node; 295 if (!FindCmdBufLayout(device_data, pCB, image, sub, node)) { 296 // Missing layouts will be added during state update 297 continue; 298 } 299 if (initial_layout != VK_IMAGE_LAYOUT_UNDEFINED && initial_layout != node.layout) { 300 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 301 __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS", 302 "You cannot start a render pass using attachment %u where the render pass initial layout is %s " 303 "and the previous known layout of the attachment is %s. The layouts must match, or the render " 304 "pass initial layout for the attachment must be VK_IMAGE_LAYOUT_UNDEFINED", 305 i, string_VkImageLayout(initial_layout), string_VkImageLayout(node.layout)); 306 } 307 } 308 } 309 } 310 return skip; 311 } 312 313 void TransitionAttachmentRefLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, FRAMEBUFFER_STATE *pFramebuffer, 314 VkAttachmentReference ref) { 315 if (ref.attachment != VK_ATTACHMENT_UNUSED) { 316 auto image_view = pFramebuffer->createInfo.pAttachments[ref.attachment]; 317 SetImageViewLayout(device_data, pCB, image_view, ref.layout); 318 } 319 } 320 321 void TransitionSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const RENDER_PASS_STATE *render_pass_state, 322 const int subpass_index, FRAMEBUFFER_STATE *framebuffer_state) { 323 assert(render_pass_state); 324 325 if (framebuffer_state) { 326 auto const &subpass = render_pass_state->createInfo.pSubpasses[subpass_index]; 327 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) { 328 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pInputAttachments[j]); 329 } 330 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) { 331 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pColorAttachments[j]); 332 } 333 if (subpass.pDepthStencilAttachment) { 334 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, *subpass.pDepthStencilAttachment); 335 } 336 } 337 } 338 339 bool ValidateImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE const *pCB, const VkImageMemoryBarrier *mem_barrier, 340 uint32_t level, uint32_t layer, VkImageAspectFlags aspect) { 341 if (!(mem_barrier->subresourceRange.aspectMask & aspect)) { 342 return false; 343 } 344 VkImageSubresource sub = {aspect, level, layer}; 345 IMAGE_CMD_BUF_LAYOUT_NODE node; 346 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) { 347 return false; 348 } 349 bool skip = false; 350 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 351 // TODO: Set memory invalid which is in mem_tracker currently 352 } else if (node.layout != mem_barrier->oldLayout) { 353 skip |= log_msg(core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 354 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCB->commandBuffer), __LINE__, 355 DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 356 "For image 0x%" PRIx64 " you cannot transition the layout of aspect %d from %s when current layout is %s.", 357 HandleToUint64(mem_barrier->image), aspect, string_VkImageLayout(mem_barrier->oldLayout), 358 string_VkImageLayout(node.layout)); 359 } 360 return skip; 361 } 362 363 // Transition the layout state for renderpass attachments based on the BeginRenderPass() call. This includes: 364 // 1. Transition into initialLayout state 365 // 2. Transition from initialLayout to layout used in subpass 0 366 void TransitionBeginRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state, const RENDER_PASS_STATE *render_pass_state, 367 FRAMEBUFFER_STATE *framebuffer_state) { 368 // First transition into initialLayout 369 auto const rpci = render_pass_state->createInfo.ptr(); 370 for (uint32_t i = 0; i < rpci->attachmentCount; ++i) { 371 VkImageView image_view = framebuffer_state->createInfo.pAttachments[i]; 372 SetImageViewLayout(device_data, cb_state, image_view, rpci->pAttachments[i].initialLayout); 373 } 374 // Now transition for first subpass (index 0) 375 TransitionSubpassLayouts(device_data, cb_state, render_pass_state, 0, framebuffer_state); 376 } 377 378 void TransitionImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier, 379 uint32_t level, uint32_t layer, VkImageAspectFlags aspect) { 380 if (!(mem_barrier->subresourceRange.aspectMask & aspect)) { 381 return; 382 } 383 VkImageSubresource sub = {aspect, level, layer}; 384 IMAGE_CMD_BUF_LAYOUT_NODE node; 385 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) { 386 pCB->image_layout_change_count++; // Change the version of this data to force revalidation 387 SetLayout(device_data, pCB, mem_barrier->image, sub, 388 IMAGE_CMD_BUF_LAYOUT_NODE(mem_barrier->oldLayout, mem_barrier->newLayout)); 389 return; 390 } 391 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 392 // TODO: Set memory invalid 393 } 394 SetLayout(device_data, pCB, mem_barrier->image, sub, mem_barrier->newLayout); 395 } 396 397 bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) { 398 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != 0) { 399 if (!FormatIsColor(format)) return false; 400 } 401 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != 0) { 402 if (!FormatHasDepth(format)) return false; 403 } 404 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) { 405 if (!FormatHasStencil(format)) return false; 406 } 407 return true; 408 } 409 410 // Verify an ImageMemoryBarrier's old/new ImageLayouts are compatible with the Image's ImageUsageFlags. 411 bool ValidateBarrierLayoutToImageUsage(layer_data *device_data, const VkImageMemoryBarrier *img_barrier, bool new_not_old, 412 VkImageUsageFlags usage_flags, const char *func_name) { 413 const auto report_data = core_validation::GetReportData(device_data); 414 bool skip = false; 415 const VkImageLayout layout = (new_not_old) ? img_barrier->newLayout : img_barrier->oldLayout; 416 UNIQUE_VALIDATION_ERROR_CODE msg_code = VALIDATION_ERROR_UNDEFINED; // sentinel value meaning "no error" 417 418 switch (layout) { 419 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: 420 if ((usage_flags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0) { 421 msg_code = VALIDATION_ERROR_0a000970; 422 } 423 break; 424 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: 425 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) { 426 msg_code = VALIDATION_ERROR_0a000972; 427 } 428 break; 429 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 430 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) { 431 msg_code = VALIDATION_ERROR_0a000974; 432 } 433 break; 434 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: 435 if ((usage_flags & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) == 0) { 436 msg_code = VALIDATION_ERROR_0a000976; 437 } 438 break; 439 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: 440 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) == 0) { 441 msg_code = VALIDATION_ERROR_0a000978; 442 } 443 break; 444 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: 445 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) == 0) { 446 msg_code = VALIDATION_ERROR_0a00097a; 447 } 448 break; 449 default: 450 // Other VkImageLayout values do not have VUs defined in this context. 451 break; 452 } 453 454 if (msg_code != VALIDATION_ERROR_UNDEFINED) { 455 skip |= 456 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 457 HandleToUint64(img_barrier->image), __LINE__, msg_code, "DS", 458 "%s: Image barrier 0x%p %sLayout=%s is not compatible with image 0x%" PRIx64 " usage flags 0x%" PRIx32 ". %s", 459 func_name, static_cast<const void *>(img_barrier), ((new_not_old) ? "new" : "old"), 460 string_VkImageLayout(layout), HandleToUint64(img_barrier->image), usage_flags, validation_error_map[msg_code]); 461 } 462 return skip; 463 } 464 465 // Verify image barriers are compatible with the images they reference. 466 bool ValidateBarriersToImages(layer_data *device_data, GLOBAL_CB_NODE const *cb_state, uint32_t imageMemoryBarrierCount, 467 const VkImageMemoryBarrier *pImageMemoryBarriers, const char *func_name) { 468 bool skip = false; 469 470 for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) { 471 auto img_barrier = &pImageMemoryBarriers[i]; 472 if (!img_barrier) continue; 473 474 auto image_state = GetImageState(device_data, img_barrier->image); 475 if (image_state) { 476 VkImageUsageFlags usage_flags = image_state->createInfo.usage; 477 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, false, usage_flags, func_name); 478 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, true, usage_flags, func_name); 479 480 // Make sure layout is able to be transitioned, currently only presented shared presentable images are locked 481 if (image_state->layout_locked) { 482 // TODO: Add unique id for error when available 483 skip |= log_msg( 484 core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 485 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 0, "DS", 486 "Attempting to transition shared presentable image 0x%" PRIx64 487 " from layout %s to layout %s, but image has already been presented and cannot have its layout transitioned.", 488 HandleToUint64(img_barrier->image), string_VkImageLayout(img_barrier->oldLayout), 489 string_VkImageLayout(img_barrier->newLayout)); 490 } 491 } 492 493 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, img_barrier->image)->createInfo); 494 // For a Depth/Stencil image both aspects MUST be set 495 if (FormatIsDepthAndStencil(image_create_info->format)) { 496 auto const aspect_mask = img_barrier->subresourceRange.aspectMask; 497 auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; 498 if ((aspect_mask & ds_mask) != (ds_mask)) { 499 skip |= log_msg( 500 core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 501 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(img_barrier->image), __LINE__, VALIDATION_ERROR_0a00096e, 502 "DS", 503 "%s: Image barrier 0x%p references image 0x%" PRIx64 504 " of format %s that must have the depth and stencil aspects set, but its aspectMask is 0x%" PRIx32 ". %s", 505 func_name, static_cast<const void *>(img_barrier), HandleToUint64(img_barrier->image), 506 string_VkFormat(image_create_info->format), aspect_mask, validation_error_map[VALIDATION_ERROR_0a00096e]); 507 } 508 } 509 uint32_t level_count = ResolveRemainingLevels(&img_barrier->subresourceRange, image_create_info->mipLevels); 510 uint32_t layer_count = ResolveRemainingLayers(&img_barrier->subresourceRange, image_create_info->arrayLayers); 511 512 for (uint32_t j = 0; j < level_count; j++) { 513 uint32_t level = img_barrier->subresourceRange.baseMipLevel + j; 514 for (uint32_t k = 0; k < layer_count; k++) { 515 uint32_t layer = img_barrier->subresourceRange.baseArrayLayer + k; 516 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT); 517 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT); 518 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT); 519 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT); 520 } 521 } 522 } 523 return skip; 524 } 525 526 void TransitionImageLayouts(layer_data *device_data, VkCommandBuffer cmdBuffer, uint32_t memBarrierCount, 527 const VkImageMemoryBarrier *pImgMemBarriers) { 528 GLOBAL_CB_NODE *pCB = GetCBNode(device_data, cmdBuffer); 529 530 for (uint32_t i = 0; i < memBarrierCount; ++i) { 531 auto mem_barrier = &pImgMemBarriers[i]; 532 if (!mem_barrier) continue; 533 534 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, mem_barrier->image)->createInfo); 535 uint32_t level_count = ResolveRemainingLevels(&mem_barrier->subresourceRange, image_create_info->mipLevels); 536 uint32_t layer_count = ResolveRemainingLayers(&mem_barrier->subresourceRange, image_create_info->arrayLayers); 537 538 for (uint32_t j = 0; j < level_count; j++) { 539 uint32_t level = mem_barrier->subresourceRange.baseMipLevel + j; 540 for (uint32_t k = 0; k < layer_count; k++) { 541 uint32_t layer = mem_barrier->subresourceRange.baseArrayLayer + k; 542 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT); 543 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT); 544 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT); 545 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT); 546 } 547 } 548 } 549 } 550 551 bool VerifyImageLayout(layer_data const *device_data, GLOBAL_CB_NODE const *cb_node, IMAGE_STATE *image_state, 552 VkImageSubresourceLayers subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout, 553 const char *caller, UNIQUE_VALIDATION_ERROR_CODE msg_code, bool *error) { 554 const auto report_data = core_validation::GetReportData(device_data); 555 const auto image = image_state->image; 556 bool skip = false; 557 558 for (uint32_t i = 0; i < subLayers.layerCount; ++i) { 559 uint32_t layer = i + subLayers.baseArrayLayer; 560 VkImageSubresource sub = {subLayers.aspectMask, subLayers.mipLevel, layer}; 561 IMAGE_CMD_BUF_LAYOUT_NODE node; 562 if (FindCmdBufLayout(device_data, cb_node, image, sub, node)) { 563 if (node.layout != explicit_layout) { 564 *error = true; 565 // TODO: Improve log message in the next pass 566 skip |= log_msg( 567 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 568 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 569 "%s: Cannot use image 0x%" PRIx64 " with specific layout %s that doesn't match the actual current layout %s.", 570 caller, HandleToUint64(image), string_VkImageLayout(explicit_layout), string_VkImageLayout(node.layout)); 571 } 572 } 573 } 574 // If optimal_layout is not UNDEFINED, check that layout matches optimal for this case 575 if ((VK_IMAGE_LAYOUT_UNDEFINED != optimal_layout) && (explicit_layout != optimal_layout)) { 576 if (VK_IMAGE_LAYOUT_GENERAL == explicit_layout) { 577 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 578 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning. 579 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 580 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(cb_node->commandBuffer), __LINE__, 581 DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 582 "%s: For optimal performance image 0x%" PRIx64 " layout should be %s instead of GENERAL.", caller, 583 HandleToUint64(image), string_VkImageLayout(optimal_layout)); 584 } 585 } else if (GetDeviceExtensions(device_data)->vk_khr_shared_presentable_image) { 586 if (image_state->shared_presentable) { 587 if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != explicit_layout) { 588 // TODO: Add unique error id when available. 589 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 590 __LINE__, msg_code, "DS", 591 "Layout for shared presentable image is %s but must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.", 592 string_VkImageLayout(optimal_layout)); 593 } 594 } 595 } else { 596 *error = true; 597 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 598 HandleToUint64(cb_node->commandBuffer), __LINE__, msg_code, "DS", 599 "%s: Layout for image 0x%" PRIx64 " is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL. %s", caller, 600 HandleToUint64(image), string_VkImageLayout(explicit_layout), string_VkImageLayout(optimal_layout), 601 validation_error_map[msg_code]); 602 } 603 } 604 return skip; 605 } 606 607 void TransitionFinalSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin, 608 FRAMEBUFFER_STATE *framebuffer_state) { 609 auto renderPass = GetRenderPassState(device_data, pRenderPassBegin->renderPass); 610 if (!renderPass) return; 611 612 const VkRenderPassCreateInfo *pRenderPassInfo = renderPass->createInfo.ptr(); 613 if (framebuffer_state) { 614 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) { 615 auto image_view = framebuffer_state->createInfo.pAttachments[i]; 616 SetImageViewLayout(device_data, pCB, image_view, pRenderPassInfo->pAttachments[i].finalLayout); 617 } 618 } 619 } 620 621 bool PreCallValidateCreateImage(layer_data *device_data, const VkImageCreateInfo *pCreateInfo, 622 const VkAllocationCallbacks *pAllocator, VkImage *pImage) { 623 bool skip = false; 624 const debug_report_data *report_data = core_validation::GetReportData(device_data); 625 626 if (pCreateInfo->format == VK_FORMAT_UNDEFINED) { 627 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 628 VALIDATION_ERROR_09e0075e, "IMAGE", "vkCreateImage: VkFormat for image must not be VK_FORMAT_UNDEFINED. %s", 629 validation_error_map[VALIDATION_ERROR_09e0075e]); 630 631 return skip; 632 } 633 634 bool optimal_tiling = (VK_IMAGE_TILING_OPTIMAL == pCreateInfo->tiling); 635 const char *tiling_string = string_VkImageTiling(pCreateInfo->tiling); 636 const char *format_string = string_VkFormat(pCreateInfo->format); 637 VkFormatProperties properties = GetFormatProperties(device_data, pCreateInfo->format); 638 VkFormatFeatureFlags features = (optimal_tiling ? properties.optimalTilingFeatures : properties.linearTilingFeatures); 639 640 if (0 == features) { 641 std::stringstream ss; 642 UNIQUE_VALIDATION_ERROR_CODE vuid = (optimal_tiling ? VALIDATION_ERROR_09e007ac : VALIDATION_ERROR_09e007a2); 643 ss << "vkCreateImage format parameter " << format_string << " is an unsupported format"; 644 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, vuid, 645 "IMAGE", "%s. %s", ss.str().c_str(), validation_error_map[vuid]); 646 return skip; 647 } 648 649 if ((pCreateInfo->usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) { 650 std::stringstream ss; 651 UNIQUE_VALIDATION_ERROR_CODE vuid = (optimal_tiling ? VALIDATION_ERROR_09e007ae : VALIDATION_ERROR_09e007a4); 652 ss << "vkCreateImage: usage bit VK_IMAGE_USAGE_SAMPLED_BIT is not supported for format " << format_string << " with tiling " 653 << tiling_string; 654 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, vuid, 655 "IMAGE", "%s. %s", ss.str().c_str(), validation_error_map[vuid]); 656 } 657 658 if ((pCreateInfo->usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) { 659 std::stringstream ss; 660 UNIQUE_VALIDATION_ERROR_CODE vuid = (optimal_tiling ? VALIDATION_ERROR_09e007b0 : VALIDATION_ERROR_09e007a6); 661 ss << "vkCreateImage: usage bit VK_IMAGE_USAGE_STORAGE_BIT is not supported for format " << format_string << " with tiling " 662 << tiling_string; 663 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, vuid, 664 "IMAGE", "%s. %s", ss.str().c_str(), validation_error_map[vuid]); 665 } 666 667 // TODO: Add checks for EXTENDED_USAGE images to validate images are compatible 668 // For EXTENDED_USAGE images, format can match any image COMPATIBLE with original image 669 if (!GetDeviceExtensions(device_data)->vk_khr_maintenance2 || !(pCreateInfo->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR)) { 670 // Validate that format supports usage as color attachment 671 if ((pCreateInfo->usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && 672 (0 == (features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))) { 673 UNIQUE_VALIDATION_ERROR_CODE vuid = (optimal_tiling ? VALIDATION_ERROR_09e007b2 : VALIDATION_ERROR_09e007a8); 674 std::stringstream ss; 675 ss << "vkCreateImage: usage bit VK_IMAGE_USAGE_COLOR_ATTACHMENT is not supported for format " << format_string 676 << " with tiling " << tiling_string; 677 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, vuid, 678 "IMAGE", "%s. %s", ss.str().c_str(), validation_error_map[vuid]); 679 } 680 681 // Validate that format supports usage as depth/stencil attachment 682 if ((pCreateInfo->usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && 683 (0 == (features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))) { 684 UNIQUE_VALIDATION_ERROR_CODE vuid = (optimal_tiling ? VALIDATION_ERROR_09e007b4 : VALIDATION_ERROR_09e007aa); 685 std::stringstream ss; 686 ss << "vkCreateImage: usage bit VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT is not supported for format " << format_string 687 << " with tiling " << tiling_string; 688 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, vuid, 689 "IMAGE", "%s. %s", ss.str().c_str(), validation_error_map[vuid]); 690 } 691 } 692 693 if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && (VK_IMAGE_TYPE_2D != pCreateInfo->imageType)) { 694 std::stringstream ss; 695 ss << "vkCreateImage: Image type must be VK_IMAGE_TYPE_2D when VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT flag bit is set"; 696 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 697 VALIDATION_ERROR_09e0076a, "IMAGE", "%s. %s", ss.str().c_str(), 698 validation_error_map[VALIDATION_ERROR_09e0076a]); 699 } 700 701 const VkPhysicalDeviceLimits *device_limits = &(GetPhysicalDeviceProperties(device_data)->limits); 702 VkImageFormatProperties format_limits; // Format limits may exceed general device limits 703 VkResult err = GetImageFormatProperties(device_data, pCreateInfo, &format_limits); 704 if (VK_SUCCESS != err) { 705 std::stringstream ss; 706 ss << "vkCreateImage: The combination of format, type, tiling, usage and flags supplied in the VkImageCreateInfo struct is " 707 "reported by vkGetPhysicalDeviceImageFormatProperties() as unsupported"; 708 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 709 VALIDATION_ERROR_09e00758, "IMAGE", "%s. %s", ss.str().c_str(), 710 validation_error_map[VALIDATION_ERROR_09e00758]); 711 return skip; 712 } 713 714 if ((VK_IMAGE_TYPE_1D == pCreateInfo->imageType) && 715 (pCreateInfo->extent.width > std::max(device_limits->maxImageDimension1D, format_limits.maxExtent.width))) { 716 std::stringstream ss; 717 ss << "vkCreateImage: 1D image width exceeds maximum supported width for format " << format_string; 718 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 719 VALIDATION_ERROR_09e0076e, "IMAGE", "%s. %s", ss.str().c_str(), 720 validation_error_map[VALIDATION_ERROR_09e0076e]); 721 } 722 723 if (VK_IMAGE_TYPE_2D == pCreateInfo->imageType) { 724 if (0 == (pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)) { 725 if (pCreateInfo->extent.width > std::max(device_limits->maxImageDimension2D, format_limits.maxExtent.width) || 726 pCreateInfo->extent.height > std::max(device_limits->maxImageDimension2D, format_limits.maxExtent.height)) { 727 std::stringstream ss; 728 ss << "vkCreateImage: 2D image extent exceeds maximum supported width or height for format " << format_string; 729 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 730 VALIDATION_ERROR_09e00770, "IMAGE", "%s. %s", ss.str().c_str(), 731 validation_error_map[VALIDATION_ERROR_09e00770]); 732 } 733 } else { 734 if (pCreateInfo->extent.width > std::max(device_limits->maxImageDimensionCube, format_limits.maxExtent.width) || 735 pCreateInfo->extent.height > std::max(device_limits->maxImageDimensionCube, format_limits.maxExtent.height)) { 736 std::stringstream ss; 737 ss << "vkCreateImage: 2D image extent exceeds maximum supported width or height for cube-compatible images with " 738 "format " 739 << format_string; 740 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 741 VALIDATION_ERROR_09e00772, "IMAGE", "%s. %s", ss.str().c_str(), 742 validation_error_map[VALIDATION_ERROR_09e00772]); 743 } 744 } 745 } 746 747 if (VK_IMAGE_TYPE_3D == pCreateInfo->imageType) { 748 if ((pCreateInfo->extent.width > std::max(device_limits->maxImageDimension3D, format_limits.maxExtent.width)) || 749 (pCreateInfo->extent.height > std::max(device_limits->maxImageDimension3D, format_limits.maxExtent.height)) || 750 (pCreateInfo->extent.depth > std::max(device_limits->maxImageDimension3D, format_limits.maxExtent.depth))) { 751 std::stringstream ss; 752 ss << "vkCreateImage: 3D image extent exceeds maximum supported width, height, or depth for format " << format_string; 753 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 754 VALIDATION_ERROR_09e00776, "IMAGE", "%s. %s", ss.str().c_str(), 755 validation_error_map[VALIDATION_ERROR_09e00776]); 756 } 757 } 758 759 // NOTE: As of 1/30/2018 the spec VU language is as in the commented code below. I believe this is an 760 // error in the spec, and have submitted Gitlab Vulkan issue #1151 to have it changed to match the 761 // implementation shown. DJH 762 // 763 // if ((pCreateInfo->mipLevels > format_limits.maxMipLevels) && 764 // (std::max({ pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth }) > 765 // device_limits->maxImageDimension3D)) { 766 if (pCreateInfo->mipLevels > format_limits.maxMipLevels) { 767 std::stringstream ss; 768 ss << "vkCreateImage: Image mip levels exceed image format maxMipLevels for format " << format_string; 769 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 770 VALIDATION_ERROR_09e0077e, "IMAGE", "%s. %s", ss.str().c_str(), 771 validation_error_map[VALIDATION_ERROR_09e0077e]); 772 } 773 774 VkImageUsageFlags attach_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | 775 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT; 776 if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.width > device_limits->maxFramebufferWidth)) { 777 std::stringstream ss; 778 ss << "vkCreateImage: Image usage flags include a frame buffer attachment bit and image width exceeds device " 779 "maxFramebufferWidth"; 780 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 781 VALIDATION_ERROR_09e00788, "IMAGE", "%s. %s", ss.str().c_str(), 782 validation_error_map[VALIDATION_ERROR_09e00788]); 783 } 784 785 if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.height > device_limits->maxFramebufferHeight)) { 786 std::stringstream ss; 787 ss << "vkCreateImage: Image usage flags include a frame buffer attachment bit and image height exceeds device " 788 "maxFramebufferHeight"; 789 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 790 VALIDATION_ERROR_09e0078a, "IMAGE", "%s. %s", ss.str().c_str(), 791 validation_error_map[VALIDATION_ERROR_09e0078a]); 792 } 793 794 uint64_t total_size = (uint64_t)pCreateInfo->extent.width * (uint64_t)pCreateInfo->extent.height * 795 (uint64_t)pCreateInfo->extent.depth * (uint64_t)pCreateInfo->arrayLayers * 796 (uint64_t)pCreateInfo->samples * (uint64_t)FormatSize(pCreateInfo->format); 797 798 // Round up to imageGranularity boundary 799 VkDeviceSize imageGranularity = GetPhysicalDeviceProperties(device_data)->limits.bufferImageGranularity; 800 uint64_t ig_mask = imageGranularity - 1; 801 total_size = (total_size + ig_mask) & ~ig_mask; 802 803 if (total_size > format_limits.maxResourceSize) { 804 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 805 IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image", 806 "CreateImage resource size exceeds allowable maximum Image resource size = 0x%" PRIxLEAST64 807 ", maximum resource size = 0x%" PRIxLEAST64 " ", 808 total_size, format_limits.maxResourceSize); 809 } 810 811 if (pCreateInfo->arrayLayers > format_limits.maxArrayLayers) { 812 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 813 VALIDATION_ERROR_09e00780, "Image", 814 "CreateImage arrayLayers=%d exceeds allowable maximum supported by format of %d. %s", 815 pCreateInfo->arrayLayers, format_limits.maxArrayLayers, validation_error_map[VALIDATION_ERROR_09e00780]); 816 } 817 818 if ((pCreateInfo->samples & format_limits.sampleCounts) == 0) { 819 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 820 VALIDATION_ERROR_09e0078e, "Image", "CreateImage samples %s is not supported by format 0x%.8X. %s", 821 string_VkSampleCountFlagBits(pCreateInfo->samples), format_limits.sampleCounts, 822 validation_error_map[VALIDATION_ERROR_09e0078e]); 823 } 824 825 if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures(device_data)->sparseBinding)) { 826 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 827 VALIDATION_ERROR_09e00792, "DS", 828 "vkCreateImage(): the sparseBinding device feature is disabled: Images cannot be created with the " 829 "VK_IMAGE_CREATE_SPARSE_BINDING_BIT set. %s", 830 validation_error_map[VALIDATION_ERROR_09e00792]); 831 } 832 833 if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyAliased)) { 834 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 835 DRAWSTATE_INVALID_FEATURE, "DS", 836 "vkCreateImage(): the sparseResidencyAliased device feature is disabled: Images cannot be created with the " 837 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT set."); 838 } 839 840 if (GetDeviceExtensions(device_data)->vk_khr_maintenance2) { 841 if (pCreateInfo->flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR) { 842 if (!(FormatIsCompressed_BC(pCreateInfo->format) || FormatIsCompressed_ASTC_LDR(pCreateInfo->format) || 843 FormatIsCompressed_ETC2_EAC(pCreateInfo->format))) { 844 // TODO: Add Maintenance2 VUID 845 skip |= 846 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 847 VALIDATION_ERROR_UNDEFINED, "DS", 848 "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, " 849 "format must be block, ETC or ASTC compressed, but is %s", 850 string_VkFormat(pCreateInfo->format)); 851 } 852 if (!(pCreateInfo->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) { 853 // TODO: Add Maintenance2 VUID 854 skip |= 855 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 856 VALIDATION_ERROR_UNDEFINED, "DS", 857 "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, " 858 "flags must also contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT."); 859 } 860 } 861 } 862 863 return skip; 864 } 865 866 void PostCallRecordCreateImage(layer_data *device_data, const VkImageCreateInfo *pCreateInfo, VkImage *pImage) { 867 IMAGE_LAYOUT_NODE image_state; 868 image_state.layout = pCreateInfo->initialLayout; 869 image_state.format = pCreateInfo->format; 870 GetImageMap(device_data)->insert(std::make_pair(*pImage, std::unique_ptr<IMAGE_STATE>(new IMAGE_STATE(*pImage, pCreateInfo)))); 871 ImageSubresourcePair subpair{*pImage, false, VkImageSubresource()}; 872 (*core_validation::GetImageSubresourceMap(device_data))[*pImage].push_back(subpair); 873 (*core_validation::GetImageLayoutMap(device_data))[subpair] = image_state; 874 } 875 876 bool PreCallValidateDestroyImage(layer_data *device_data, VkImage image, IMAGE_STATE **image_state, VK_OBJECT *obj_struct) { 877 const CHECK_DISABLED *disabled = core_validation::GetDisables(device_data); 878 *image_state = core_validation::GetImageState(device_data, image); 879 *obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage}; 880 if (disabled->destroy_image) return false; 881 bool skip = false; 882 if (*image_state) { 883 skip |= core_validation::ValidateObjectNotInUse(device_data, *image_state, *obj_struct, "vkDestroyImage", 884 VALIDATION_ERROR_252007d0); 885 } 886 return skip; 887 } 888 889 void PostCallRecordDestroyImage(layer_data *device_data, VkImage image, IMAGE_STATE *image_state, VK_OBJECT obj_struct) { 890 core_validation::invalidateCommandBuffers(device_data, image_state->cb_bindings, obj_struct); 891 // Clean up memory mapping, bindings and range references for image 892 for (auto mem_binding : image_state->GetBoundMemory()) { 893 auto mem_info = core_validation::GetMemObjInfo(device_data, mem_binding); 894 if (mem_info) { 895 core_validation::RemoveImageMemoryRange(obj_struct.handle, mem_info); 896 } 897 } 898 core_validation::ClearMemoryObjectBindings(device_data, obj_struct.handle, kVulkanObjectTypeImage); 899 // Remove image from imageMap 900 core_validation::GetImageMap(device_data)->erase(image); 901 std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *imageSubresourceMap = 902 core_validation::GetImageSubresourceMap(device_data); 903 904 const auto &sub_entry = imageSubresourceMap->find(image); 905 if (sub_entry != imageSubresourceMap->end()) { 906 for (const auto &pair : sub_entry->second) { 907 core_validation::GetImageLayoutMap(device_data)->erase(pair); 908 } 909 imageSubresourceMap->erase(sub_entry); 910 } 911 } 912 913 bool ValidateImageAttributes(layer_data *device_data, IMAGE_STATE *image_state, VkImageSubresourceRange range) { 914 bool skip = false; 915 const debug_report_data *report_data = core_validation::GetReportData(device_data); 916 917 if (range.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) { 918 char const str[] = "vkCmdClearColorImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_COLOR_BIT"; 919 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 920 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", str); 921 } 922 923 if (FormatIsDepthOrStencil(image_state->createInfo.format)) { 924 char const str[] = "vkCmdClearColorImage called with depth/stencil image."; 925 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 926 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_1880000e, "IMAGE", "%s. %s", str, 927 validation_error_map[VALIDATION_ERROR_1880000e]); 928 } else if (FormatIsCompressed(image_state->createInfo.format)) { 929 char const str[] = "vkCmdClearColorImage called with compressed image."; 930 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 931 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_1880000e, "IMAGE", "%s. %s", str, 932 validation_error_map[VALIDATION_ERROR_1880000e]); 933 } 934 935 if (!(image_state->createInfo.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) { 936 char const str[] = "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT."; 937 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 938 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_18800004, "IMAGE", "%s. %s", str, 939 validation_error_map[VALIDATION_ERROR_18800004]); 940 } 941 return skip; 942 } 943 944 uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) { 945 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS 946 uint32_t mip_level_count = range->levelCount; 947 if (range->levelCount == VK_REMAINING_MIP_LEVELS) { 948 mip_level_count = mip_levels - range->baseMipLevel; 949 } 950 return mip_level_count; 951 } 952 953 uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) { 954 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS 955 uint32_t array_layer_count = range->layerCount; 956 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) { 957 array_layer_count = layers - range->baseArrayLayer; 958 } 959 return array_layer_count; 960 } 961 962 bool VerifyClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *image_state, 963 VkImageSubresourceRange range, VkImageLayout dest_image_layout, const char *func_name) { 964 bool skip = false; 965 const debug_report_data *report_data = core_validation::GetReportData(device_data); 966 967 uint32_t level_count = ResolveRemainingLevels(&range, image_state->createInfo.mipLevels); 968 uint32_t layer_count = ResolveRemainingLayers(&range, image_state->createInfo.arrayLayers); 969 970 if (dest_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) { 971 if (dest_image_layout == VK_IMAGE_LAYOUT_GENERAL) { 972 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 973 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning. 974 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 975 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 976 "%s: Layout for cleared image should be TRANSFER_DST_OPTIMAL instead of GENERAL.", func_name); 977 } 978 } else if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR == dest_image_layout) { 979 if (!GetDeviceExtensions(device_data)->vk_khr_shared_presentable_image) { 980 // TODO: Add unique error id when available. 981 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 982 HandleToUint64(image_state->image), __LINE__, 0, "DS", 983 "Must enable VK_KHR_shared_presentable_image extension before creating images with a layout type " 984 "of VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR."); 985 986 } else { 987 if (image_state->shared_presentable) { 988 skip |= log_msg( 989 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 990 HandleToUint64(image_state->image), __LINE__, 0, "DS", 991 "Layout for shared presentable cleared image is %s but can only be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.", 992 string_VkImageLayout(dest_image_layout)); 993 } 994 } 995 } else { 996 UNIQUE_VALIDATION_ERROR_CODE error_code = VALIDATION_ERROR_1880000a; 997 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) { 998 error_code = VALIDATION_ERROR_18a00018; 999 } else { 1000 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0); 1001 } 1002 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1003 HandleToUint64(image_state->image), __LINE__, error_code, "DS", 1004 "%s: Layout for cleared image is %s but can only be TRANSFER_DST_OPTIMAL or GENERAL. %s", func_name, 1005 string_VkImageLayout(dest_image_layout), validation_error_map[error_code]); 1006 } 1007 } 1008 1009 for (uint32_t level_index = 0; level_index < level_count; ++level_index) { 1010 uint32_t level = level_index + range.baseMipLevel; 1011 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) { 1012 uint32_t layer = layer_index + range.baseArrayLayer; 1013 VkImageSubresource sub = {range.aspectMask, level, layer}; 1014 IMAGE_CMD_BUF_LAYOUT_NODE node; 1015 if (FindCmdBufLayout(device_data, cb_node, image_state->image, sub, node)) { 1016 if (node.layout != dest_image_layout) { 1017 UNIQUE_VALIDATION_ERROR_CODE error_code = VALIDATION_ERROR_18800008; 1018 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) { 1019 error_code = VALIDATION_ERROR_18a00016; 1020 } else { 1021 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0); 1022 } 1023 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, 1024 __LINE__, error_code, "DS", 1025 "%s: Cannot clear an image whose layout is %s and doesn't match the current layout %s. %s", 1026 func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout), 1027 validation_error_map[error_code]); 1028 } 1029 } 1030 } 1031 } 1032 1033 return skip; 1034 } 1035 1036 void RecordClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImage image, VkImageSubresourceRange range, 1037 VkImageLayout dest_image_layout) { 1038 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, image)->createInfo); 1039 uint32_t level_count = ResolveRemainingLevels(&range, image_create_info->mipLevels); 1040 uint32_t layer_count = ResolveRemainingLayers(&range, image_create_info->arrayLayers); 1041 1042 for (uint32_t level_index = 0; level_index < level_count; ++level_index) { 1043 uint32_t level = level_index + range.baseMipLevel; 1044 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) { 1045 uint32_t layer = layer_index + range.baseArrayLayer; 1046 VkImageSubresource sub = {range.aspectMask, level, layer}; 1047 IMAGE_CMD_BUF_LAYOUT_NODE node; 1048 if (!FindCmdBufLayout(device_data, cb_node, image, sub, node)) { 1049 SetLayout(device_data, cb_node, image, sub, IMAGE_CMD_BUF_LAYOUT_NODE(dest_image_layout, dest_image_layout)); 1050 } 1051 } 1052 } 1053 } 1054 1055 bool PreCallValidateCmdClearColorImage(layer_data *dev_data, VkCommandBuffer commandBuffer, VkImage image, 1056 VkImageLayout imageLayout, uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 1057 bool skip = false; 1058 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state 1059 auto cb_node = GetCBNode(dev_data, commandBuffer); 1060 auto image_state = GetImageState(dev_data, image); 1061 if (cb_node && image_state) { 1062 skip |= ValidateMemoryIsBoundToImage(dev_data, image_state, "vkCmdClearColorImage()", VALIDATION_ERROR_18800006); 1063 skip |= ValidateCmdQueueFlags(dev_data, cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, 1064 VALIDATION_ERROR_18802415); 1065 skip |= ValidateCmd(dev_data, cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()"); 1066 skip |= insideRenderPass(dev_data, cb_node, "vkCmdClearColorImage()", VALIDATION_ERROR_18800017); 1067 for (uint32_t i = 0; i < rangeCount; ++i) { 1068 std::string param_name = "pRanges[" + std::to_string(i) + "]"; 1069 skip |= ValidateCmdClearColorSubresourceRange(dev_data, image_state, pRanges[i], param_name.c_str()); 1070 skip |= ValidateImageAttributes(dev_data, image_state, pRanges[i]); 1071 skip |= VerifyClearImageLayout(dev_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()"); 1072 } 1073 } 1074 return skip; 1075 } 1076 1077 // This state recording routine is shared between ClearColorImage and ClearDepthStencilImage 1078 void PreCallRecordCmdClearImage(layer_data *dev_data, VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 1079 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 1080 auto cb_node = GetCBNode(dev_data, commandBuffer); 1081 auto image_state = GetImageState(dev_data, image); 1082 if (cb_node && image_state) { 1083 AddCommandBufferBindingImage(dev_data, cb_node, image_state); 1084 std::function<bool()> function = [=]() { 1085 SetImageMemoryValid(dev_data, image_state, true); 1086 return false; 1087 }; 1088 cb_node->queue_submit_functions.push_back(function); 1089 for (uint32_t i = 0; i < rangeCount; ++i) { 1090 RecordClearImageLayout(dev_data, cb_node, image, pRanges[i], imageLayout); 1091 } 1092 } 1093 } 1094 1095 bool PreCallValidateCmdClearDepthStencilImage(layer_data *device_data, VkCommandBuffer commandBuffer, VkImage image, 1096 VkImageLayout imageLayout, uint32_t rangeCount, 1097 const VkImageSubresourceRange *pRanges) { 1098 bool skip = false; 1099 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1100 1101 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state 1102 auto cb_node = GetCBNode(device_data, commandBuffer); 1103 auto image_state = GetImageState(device_data, image); 1104 if (cb_node && image_state) { 1105 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearDepthStencilImage()", VALIDATION_ERROR_18a00014); 1106 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT, 1107 VALIDATION_ERROR_18a02415); 1108 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()"); 1109 skip |= insideRenderPass(device_data, cb_node, "vkCmdClearDepthStencilImage()", VALIDATION_ERROR_18a00017); 1110 for (uint32_t i = 0; i < rangeCount; ++i) { 1111 std::string param_name = "pRanges[" + std::to_string(i) + "]"; 1112 skip |= ValidateCmdClearDepthSubresourceRange(device_data, image_state, pRanges[i], param_name.c_str()); 1113 skip |= 1114 VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()"); 1115 // Image aspect must be depth or stencil or both 1116 if (((pRanges[i].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) && 1117 ((pRanges[i].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) { 1118 char const str[] = 1119 "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_DEPTH_BIT " 1120 "and/or VK_IMAGE_ASPECT_STENCIL_BIT"; 1121 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1122 HandleToUint64(commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", str); 1123 } 1124 } 1125 if (image_state && !FormatIsDepthOrStencil(image_state->createInfo.format)) { 1126 char const str[] = "vkCmdClearDepthStencilImage called without a depth/stencil image."; 1127 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1128 HandleToUint64(image), __LINE__, VALIDATION_ERROR_18a0001c, "IMAGE", "%s. %s", str, 1129 validation_error_map[VALIDATION_ERROR_18a0001c]); 1130 } 1131 if (VK_IMAGE_USAGE_TRANSFER_DST_BIT != (VK_IMAGE_USAGE_TRANSFER_DST_BIT & image_state->createInfo.usage)) { 1132 char const str[] = 1133 "vkCmdClearDepthStencilImage() called with an image that was not created with the VK_IMAGE_USAGE_TRANSFER_DST_BIT " 1134 "set."; 1135 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1136 HandleToUint64(image), __LINE__, VALIDATION_ERROR_18a00012, "IMAGE", "%s. %s", str, 1137 validation_error_map[VALIDATION_ERROR_18a00012]); 1138 } 1139 VkFormatProperties props = GetFormatProperties(device_data, image_state->createInfo.format); 1140 VkImageTiling tiling = image_state->createInfo.tiling; 1141 VkFormatFeatureFlags flags = (tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures); 1142 if ((GetDeviceExtensions(device_data)->vk_khr_maintenance1) && 1143 (VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR != (flags & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR))) { 1144 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1145 HandleToUint64(image), __LINE__, VALIDATION_ERROR_18a00010, "IMAGE", 1146 "vkCmdClearDepthStencilImage() called with an image of format %s and tiling %s that does not support " 1147 "VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR. %s", 1148 string_VkFormat(image_state->createInfo.format), string_VkImageTiling(image_state->createInfo.tiling), 1149 validation_error_map[VALIDATION_ERROR_18a00010]); 1150 } 1151 } 1152 return skip; 1153 } 1154 1155 // Returns true if [x, xoffset] and [y, yoffset] overlap 1156 static bool RangesIntersect(int32_t start, uint32_t start_offset, int32_t end, uint32_t end_offset) { 1157 bool result = false; 1158 uint32_t intersection_min = std::max(static_cast<uint32_t>(start), static_cast<uint32_t>(end)); 1159 uint32_t intersection_max = std::min(static_cast<uint32_t>(start) + start_offset, static_cast<uint32_t>(end) + end_offset); 1160 1161 if (intersection_max > intersection_min) { 1162 result = true; 1163 } 1164 return result; 1165 } 1166 1167 // Returns true if two VkImageCopy structures overlap 1168 static bool RegionIntersects(const VkImageCopy *src, const VkImageCopy *dst, VkImageType type) { 1169 bool result = false; 1170 if ((src->srcSubresource.mipLevel == dst->dstSubresource.mipLevel) && 1171 (RangesIntersect(src->srcSubresource.baseArrayLayer, src->srcSubresource.layerCount, dst->dstSubresource.baseArrayLayer, 1172 dst->dstSubresource.layerCount))) { 1173 result = true; 1174 switch (type) { 1175 case VK_IMAGE_TYPE_3D: 1176 result &= RangesIntersect(src->srcOffset.z, src->extent.depth, dst->dstOffset.z, dst->extent.depth); 1177 // Intentionally fall through to 2D case 1178 case VK_IMAGE_TYPE_2D: 1179 result &= RangesIntersect(src->srcOffset.y, src->extent.height, dst->dstOffset.y, dst->extent.height); 1180 // Intentionally fall through to 1D case 1181 case VK_IMAGE_TYPE_1D: 1182 result &= RangesIntersect(src->srcOffset.x, src->extent.width, dst->dstOffset.x, dst->extent.width); 1183 break; 1184 default: 1185 // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation 1186 assert(false); 1187 } 1188 } 1189 return result; 1190 } 1191 1192 // Returns non-zero if offset and extent exceed image extents 1193 static const uint32_t x_bit = 1; 1194 static const uint32_t y_bit = 2; 1195 static const uint32_t z_bit = 4; 1196 static uint32_t ExceedsBounds(const VkOffset3D *offset, const VkExtent3D *extent, const VkExtent3D *image_extent) { 1197 uint32_t result = 0; 1198 // Extents/depths cannot be negative but checks left in for clarity 1199 if ((offset->z + extent->depth > image_extent->depth) || (offset->z < 0) || 1200 ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) { 1201 result |= z_bit; 1202 } 1203 if ((offset->y + extent->height > image_extent->height) || (offset->y < 0) || 1204 ((offset->y + static_cast<int32_t>(extent->height)) < 0)) { 1205 result |= y_bit; 1206 } 1207 if ((offset->x + extent->width > image_extent->width) || (offset->x < 0) || 1208 ((offset->x + static_cast<int32_t>(extent->width)) < 0)) { 1209 result |= x_bit; 1210 } 1211 return result; 1212 } 1213 1214 // Test if two VkExtent3D structs are equivalent 1215 static inline bool IsExtentEqual(const VkExtent3D *extent, const VkExtent3D *other_extent) { 1216 bool result = true; 1217 if ((extent->width != other_extent->width) || (extent->height != other_extent->height) || 1218 (extent->depth != other_extent->depth)) { 1219 result = false; 1220 } 1221 return result; 1222 } 1223 1224 // For image copies between compressed/uncompressed formats, the extent is provided in source image texels 1225 // Destination image texel extents must be adjusted by block size for the dest validation checks 1226 VkExtent3D GetAdjustedDestImageExtent(VkFormat src_format, VkFormat dst_format, VkExtent3D extent) { 1227 VkExtent3D adjusted_extent = extent; 1228 if ((FormatIsCompressed(src_format) && (!FormatIsCompressed(dst_format)))) { 1229 VkExtent3D block_size = FormatCompressedTexelBlockExtent(src_format); 1230 adjusted_extent.width /= block_size.width; 1231 adjusted_extent.height /= block_size.height; 1232 adjusted_extent.depth /= block_size.depth; 1233 } else if ((!FormatIsCompressed(src_format) && (FormatIsCompressed(dst_format)))) { 1234 VkExtent3D block_size = FormatCompressedTexelBlockExtent(dst_format); 1235 adjusted_extent.width *= block_size.width; 1236 adjusted_extent.height *= block_size.height; 1237 adjusted_extent.depth *= block_size.depth; 1238 } 1239 return adjusted_extent; 1240 } 1241 1242 // Returns the effective extent of an image subresource, adjusted for mip level and array depth. 1243 static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const VkImageSubresourceLayers *subresource) { 1244 const uint32_t mip = subresource->mipLevel; 1245 1246 // Return zero extent if mip level doesn't exist 1247 if (mip >= img->createInfo.mipLevels) { 1248 return VkExtent3D{0, 0, 0}; 1249 } 1250 1251 // Don't allow mip adjustment to create 0 dim, but pass along a 0 if that's what subresource specified 1252 VkExtent3D extent = img->createInfo.extent; 1253 extent.width = (0 == extent.width ? 0 : std::max(1U, extent.width >> mip)); 1254 extent.height = (0 == extent.height ? 0 : std::max(1U, extent.height >> mip)); 1255 extent.depth = (0 == extent.depth ? 0 : std::max(1U, extent.depth >> mip)); 1256 1257 // Image arrays have an effective z extent that isn't diminished by mip level 1258 if (VK_IMAGE_TYPE_3D != img->createInfo.imageType) { 1259 extent.depth = img->createInfo.arrayLayers; 1260 } 1261 1262 return extent; 1263 } 1264 1265 // Test if the extent argument has all dimensions set to 0. 1266 static inline bool IsExtentAllZeroes(const VkExtent3D *extent) { 1267 return ((extent->width == 0) && (extent->height == 0) && (extent->depth == 0)); 1268 } 1269 1270 // Test if the extent argument has any dimensions set to 0. 1271 static inline bool IsExtentSizeZero(const VkExtent3D *extent) { 1272 return ((extent->width == 0) || (extent->height == 0) || (extent->depth == 0)); 1273 } 1274 1275 // Returns the image transfer granularity for a specific image scaled by compressed block size if necessary. 1276 static inline VkExtent3D GetScaledItg(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img) { 1277 // Default to (0, 0, 0) granularity in case we can't find the real granularity for the physical device. 1278 VkExtent3D granularity = {0, 0, 0}; 1279 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 1280 if (pPool) { 1281 granularity = 1282 GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].minImageTransferGranularity; 1283 if (FormatIsCompressed(img->createInfo.format)) { 1284 auto block_size = FormatCompressedTexelBlockExtent(img->createInfo.format); 1285 granularity.width *= block_size.width; 1286 granularity.height *= block_size.height; 1287 } 1288 } 1289 return granularity; 1290 } 1291 1292 // Test elements of a VkExtent3D structure against alignment constraints contained in another VkExtent3D structure 1293 static inline bool IsExtentAligned(const VkExtent3D *extent, const VkExtent3D *granularity) { 1294 bool valid = true; 1295 if ((SafeModulo(extent->depth, granularity->depth) != 0) || (SafeModulo(extent->width, granularity->width) != 0) || 1296 (SafeModulo(extent->height, granularity->height) != 0)) { 1297 valid = false; 1298 } 1299 return valid; 1300 } 1301 1302 // Check elements of a VkOffset3D structure against a queue family's Image Transfer Granularity values 1303 static inline bool CheckItgOffset(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkOffset3D *offset, 1304 const VkExtent3D *granularity, const uint32_t i, const char *function, const char *member) { 1305 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1306 bool skip = false; 1307 VkExtent3D offset_extent = {}; 1308 offset_extent.width = static_cast<uint32_t>(abs(offset->x)); 1309 offset_extent.height = static_cast<uint32_t>(abs(offset->y)); 1310 offset_extent.depth = static_cast<uint32_t>(abs(offset->z)); 1311 if (IsExtentAllZeroes(granularity)) { 1312 // If the queue family image transfer granularity is (0, 0, 0), then the offset must always be (0, 0, 0) 1313 if (IsExtentAllZeroes(&offset_extent) == false) { 1314 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1315 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1316 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) must be (x=0, y=0, z=0) when the command buffer's queue family " 1317 "image transfer granularity is (w=0, h=0, d=0).", 1318 function, i, member, offset->x, offset->y, offset->z); 1319 } 1320 } else { 1321 // If the queue family image transfer granularity is not (0, 0, 0), then the offset dimensions must always be even 1322 // integer multiples of the image transfer granularity. 1323 if (IsExtentAligned(&offset_extent, granularity) == false) { 1324 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1325 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1326 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) dimensions must be even integer multiples of this command " 1327 "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d).", 1328 function, i, member, offset->x, offset->y, offset->z, granularity->width, granularity->height, 1329 granularity->depth); 1330 } 1331 } 1332 return skip; 1333 } 1334 1335 // Check elements of a VkExtent3D structure against a queue family's Image Transfer Granularity values 1336 static inline bool CheckItgExtent(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkExtent3D *extent, 1337 const VkOffset3D *offset, const VkExtent3D *granularity, const VkExtent3D *subresource_extent, 1338 const VkImageType image_type, const uint32_t i, const char *function, const char *member) { 1339 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1340 bool skip = false; 1341 if (IsExtentAllZeroes(granularity)) { 1342 // If the queue family image transfer granularity is (0, 0, 0), then the extent must always match the image 1343 // subresource extent. 1344 if (IsExtentEqual(extent, subresource_extent) == false) { 1345 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1346 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1347 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d) " 1348 "when the command buffer's queue family image transfer granularity is (w=0, h=0, d=0).", 1349 function, i, member, extent->width, extent->height, extent->depth, subresource_extent->width, 1350 subresource_extent->height, subresource_extent->depth); 1351 } 1352 } else { 1353 // If the queue family image transfer granularity is not (0, 0, 0), then the extent dimensions must always be even 1354 // integer multiples of the image transfer granularity or the offset + extent dimensions must always match the image 1355 // subresource extent dimensions. 1356 VkExtent3D offset_extent_sum = {}; 1357 offset_extent_sum.width = static_cast<uint32_t>(abs(offset->x)) + extent->width; 1358 offset_extent_sum.height = static_cast<uint32_t>(abs(offset->y)) + extent->height; 1359 offset_extent_sum.depth = static_cast<uint32_t>(abs(offset->z)) + extent->depth; 1360 bool x_ok = true; 1361 bool y_ok = true; 1362 bool z_ok = true; 1363 switch (image_type) { 1364 case VK_IMAGE_TYPE_3D: 1365 z_ok = ((0 == SafeModulo(extent->depth, granularity->depth)) || 1366 (subresource_extent->depth == offset_extent_sum.depth)); 1367 // Intentionally fall through to 2D case 1368 case VK_IMAGE_TYPE_2D: 1369 y_ok = ((0 == SafeModulo(extent->height, granularity->height)) || 1370 (subresource_extent->height == offset_extent_sum.height)); 1371 // Intentionally fall through to 1D case 1372 case VK_IMAGE_TYPE_1D: 1373 x_ok = ((0 == SafeModulo(extent->width, granularity->width)) || 1374 (subresource_extent->width == offset_extent_sum.width)); 1375 break; 1376 default: 1377 // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation 1378 assert(false); 1379 } 1380 if (!(x_ok && y_ok && z_ok)) { 1381 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1382 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1383 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) dimensions must be even integer multiples of this command " 1384 "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d) or offset (x=%d, y=%d, z=%d) + " 1385 "extent (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d).", 1386 function, i, member, extent->width, extent->height, extent->depth, granularity->width, 1387 granularity->height, granularity->depth, offset->x, offset->y, offset->z, extent->width, extent->height, 1388 extent->depth, subresource_extent->width, subresource_extent->height, subresource_extent->depth); 1389 } 1390 } 1391 return skip; 1392 } 1393 1394 // Check a uint32_t width or stride value against a queue family's Image Transfer Granularity width value 1395 static inline bool CheckItgInt(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const uint32_t value, 1396 const uint32_t granularity, const uint32_t i, const char *function, const char *member) { 1397 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1398 1399 bool skip = false; 1400 if (SafeModulo(value, granularity) != 0) { 1401 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1402 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1403 "%s: pRegion[%d].%s (%d) must be an even integer multiple of this command buffer's queue family image " 1404 "transfer granularity width (%d).", 1405 function, i, member, value, granularity); 1406 } 1407 return skip; 1408 } 1409 1410 // Check a VkDeviceSize value against a queue family's Image Transfer Granularity width value 1411 static inline bool CheckItgSize(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkDeviceSize value, 1412 const uint32_t granularity, const uint32_t i, const char *function, const char *member) { 1413 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1414 bool skip = false; 1415 if (SafeModulo(value, granularity) != 0) { 1416 skip |= log_msg( 1417 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1418 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1419 "%s: pRegion[%d].%s (%" PRIdLEAST64 1420 ") must be an even integer multiple of this command buffer's queue family image transfer granularity width (%d).", 1421 function, i, member, value, granularity); 1422 } 1423 return skip; 1424 } 1425 1426 // Check valid usage Image Tranfer Granularity requirements for elements of a VkBufferImageCopy structure 1427 bool ValidateCopyBufferImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, 1428 const IMAGE_STATE *img, const VkBufferImageCopy *region, 1429 const uint32_t i, const char *function) { 1430 bool skip = false; 1431 if (FormatIsCompressed(img->createInfo.format) == true) { 1432 // TODO: Add granularity checking for compressed formats 1433 1434 // bufferRowLength must be a multiple of the compressed texel block width 1435 // bufferImageHeight must be a multiple of the compressed texel block height 1436 // all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block 1437 // bufferOffset must be a multiple of the compressed texel block size in bytes 1438 // imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) 1439 // must equal the image subresource width 1440 // imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) 1441 // must equal the image subresource height 1442 // imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) 1443 // must equal the image subresource depth 1444 } else { 1445 VkExtent3D granularity = GetScaledItg(device_data, cb_node, img); 1446 skip |= CheckItgSize(device_data, cb_node, region->bufferOffset, granularity.width, i, function, "bufferOffset"); 1447 skip |= CheckItgInt(device_data, cb_node, region->bufferRowLength, granularity.width, i, function, "bufferRowLength"); 1448 skip |= CheckItgInt(device_data, cb_node, region->bufferImageHeight, granularity.width, i, function, "bufferImageHeight"); 1449 skip |= CheckItgOffset(device_data, cb_node, ®ion->imageOffset, &granularity, i, function, "imageOffset"); 1450 VkExtent3D subresource_extent = GetImageSubresourceExtent(img, ®ion->imageSubresource); 1451 skip |= CheckItgExtent(device_data, cb_node, ®ion->imageExtent, ®ion->imageOffset, &granularity, &subresource_extent, 1452 img->createInfo.imageType, i, function, "imageExtent"); 1453 } 1454 return skip; 1455 } 1456 1457 // Check valid usage Image Tranfer Granularity requirements for elements of a VkImageCopy structure 1458 bool ValidateCopyImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, 1459 const IMAGE_STATE *src_img, const IMAGE_STATE *dst_img, 1460 const VkImageCopy *region, const uint32_t i, const char *function) { 1461 bool skip = false; 1462 // Source image checks 1463 VkExtent3D granularity = GetScaledItg(device_data, cb_node, src_img); 1464 skip |= CheckItgOffset(device_data, cb_node, ®ion->srcOffset, &granularity, i, function, "srcOffset"); 1465 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, ®ion->srcSubresource); 1466 const VkExtent3D extent = region->extent; 1467 skip |= CheckItgExtent(device_data, cb_node, &extent, ®ion->srcOffset, &granularity, &subresource_extent, 1468 src_img->createInfo.imageType, i, function, "extent"); 1469 1470 // Destination image checks 1471 granularity = GetScaledItg(device_data, cb_node, dst_img); 1472 skip |= CheckItgOffset(device_data, cb_node, ®ion->dstOffset, &granularity, i, function, "dstOffset"); 1473 // Adjust dest extent, if necessary 1474 const VkExtent3D dest_effective_extent = 1475 GetAdjustedDestImageExtent(src_img->createInfo.format, dst_img->createInfo.format, extent); 1476 subresource_extent = GetImageSubresourceExtent(dst_img, ®ion->dstSubresource); 1477 skip |= CheckItgExtent(device_data, cb_node, &dest_effective_extent, ®ion->dstOffset, &granularity, &subresource_extent, 1478 dst_img->createInfo.imageType, i, function, "extent"); 1479 return skip; 1480 } 1481 1482 // Validate contents of a VkImageCopy struct 1483 bool ValidateImageCopyData(const layer_data *device_data, const debug_report_data *report_data, const uint32_t regionCount, 1484 const VkImageCopy *ic_regions, const IMAGE_STATE *src_state, const IMAGE_STATE *dst_state) { 1485 bool skip = false; 1486 1487 for (uint32_t i = 0; i < regionCount; i++) { 1488 const VkImageCopy region = ic_regions[i]; 1489 1490 // For comp<->uncomp copies, the copy extent for the dest image must be adjusted 1491 const VkExtent3D src_copy_extent = region.extent; 1492 const VkExtent3D dst_copy_extent = 1493 GetAdjustedDestImageExtent(src_state->createInfo.format, dst_state->createInfo.format, region.extent); 1494 1495 bool slice_override = false; 1496 uint32_t depth_slices = 0; 1497 1498 // Special case for copying between a 1D/2D array and a 3D image 1499 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up. 1500 if ((VK_IMAGE_TYPE_3D == src_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != dst_state->createInfo.imageType)) { 1501 depth_slices = region.dstSubresource.layerCount; // Slice count from 2D subresource 1502 slice_override = (depth_slices != 1); 1503 } else if ((VK_IMAGE_TYPE_3D == dst_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != src_state->createInfo.imageType)) { 1504 depth_slices = region.srcSubresource.layerCount; // Slice count from 2D subresource 1505 slice_override = (depth_slices != 1); 1506 } 1507 1508 // Do all checks on source image 1509 // 1510 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 1511 if ((0 != region.srcOffset.y) || (1 != src_copy_extent.height)) { 1512 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1513 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c00124, "IMAGE", 1514 "vkCmdCopyImage(): pRegion[%d] srcOffset.y is %d and extent.height is %d. For 1D images these must " 1515 "be 0 and 1, respectively. %s", 1516 i, region.srcOffset.y, src_copy_extent.height, validation_error_map[VALIDATION_ERROR_09c00124]); 1517 } 1518 } 1519 1520 if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (src_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 1521 if ((0 != region.srcOffset.z) || (1 != src_copy_extent.depth)) { 1522 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1523 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c00128, "IMAGE", 1524 "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d and extent.depth is %d. For 1D and 2D images " 1525 "these must be 0 and 1, respectively. %s", 1526 i, region.srcOffset.z, src_copy_extent.depth, validation_error_map[VALIDATION_ERROR_09c00128]); 1527 } 1528 } 1529 1530 // VU01199 changed with mnt1 1531 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1532 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1533 if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) { 1534 skip |= 1535 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1536 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1537 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and srcSubresource.layerCount " 1538 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1539 i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount, 1540 validation_error_map[VALIDATION_ERROR_09c0011a]); 1541 } 1542 } 1543 } else { // Pre maint 1 1544 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1545 if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) { 1546 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1547 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1548 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and " 1549 "srcSubresource.layerCount is %d. For copies with either source or dest of type " 1550 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s", 1551 i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount, 1552 validation_error_map[VALIDATION_ERROR_09c0011a]); 1553 } 1554 } 1555 } 1556 1557 // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec. 1558 if ((region.srcSubresource.baseArrayLayer >= src_state->createInfo.arrayLayers) || 1559 (region.srcSubresource.baseArrayLayer + region.srcSubresource.layerCount > src_state->createInfo.arrayLayers)) { 1560 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1561 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0012a, "IMAGE", 1562 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer (%d) must be less than the source image's " 1563 "arrayLayers (%d), and the sum of baseArrayLayer and srcSubresource.layerCount (%d) must be less than " 1564 "or equal to the source image's arrayLayers. %s", 1565 i, region.srcSubresource.baseArrayLayer, src_state->createInfo.arrayLayers, 1566 region.srcSubresource.layerCount, validation_error_map[VALIDATION_ERROR_09c0012a]); 1567 } 1568 1569 // Checks that apply only to compressed images 1570 if (FormatIsCompressed(src_state->createInfo.format)) { 1571 const VkExtent3D block_size = FormatCompressedTexelBlockExtent(src_state->createInfo.format); 1572 1573 // image offsets must be multiples of block dimensions 1574 if ((SafeModulo(region.srcOffset.x, block_size.width) != 0) || 1575 (SafeModulo(region.srcOffset.y, block_size.height) != 0) || 1576 (SafeModulo(region.srcOffset.z, block_size.depth) != 0)) { 1577 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1578 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0013a, "IMAGE", 1579 "vkCmdCopyImage(): pRegion[%d] srcOffset (%d, %d) must be multiples of the compressed image's " 1580 "texel width & height (%d, %d). %s.", 1581 i, region.srcOffset.x, region.srcOffset.y, block_size.width, block_size.height, 1582 validation_error_map[VALIDATION_ERROR_09c0013a]); 1583 } 1584 1585 const VkExtent3D mip_extent = GetImageSubresourceExtent(src_state, &(region.srcSubresource)); 1586 if ((SafeModulo(src_copy_extent.width, block_size.width) != 0) && 1587 (src_copy_extent.width + region.srcOffset.x != mip_extent.width)) { 1588 skip |= 1589 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1590 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0013c, "IMAGE", 1591 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1592 "width (%d), or when added to srcOffset.x (%d) must equal the image subresource width (%d). %s.", 1593 i, src_copy_extent.width, block_size.width, region.srcOffset.x, mip_extent.width, 1594 validation_error_map[VALIDATION_ERROR_09c0013c]); 1595 } 1596 1597 // Extent height must be a multiple of block height, or extent+offset height must equal subresource height 1598 if ((SafeModulo(src_copy_extent.height, block_size.height) != 0) && 1599 (src_copy_extent.height + region.srcOffset.y != mip_extent.height)) { 1600 skip |= 1601 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1602 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c0013e, "IMAGE", 1603 "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block " 1604 "height (%d), or when added to srcOffset.y (%d) must equal the image subresource height (%d). %s.", 1605 i, src_copy_extent.height, block_size.height, region.srcOffset.y, mip_extent.height, 1606 validation_error_map[VALIDATION_ERROR_09c0013e]); 1607 } 1608 1609 // Extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth 1610 uint32_t copy_depth = (slice_override ? depth_slices : src_copy_extent.depth); 1611 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.srcOffset.z != mip_extent.depth)) { 1612 skip |= 1613 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1614 HandleToUint64(src_state->image), __LINE__, VALIDATION_ERROR_09c00140, "IMAGE", 1615 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1616 "depth (%d), or when added to srcOffset.z (%d) must equal the image subresource depth (%d). %s.", 1617 i, src_copy_extent.depth, block_size.depth, region.srcOffset.z, mip_extent.depth, 1618 validation_error_map[VALIDATION_ERROR_09c00140]); 1619 } 1620 } // Compressed 1621 1622 // Do all checks on dest image 1623 // 1624 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 1625 if ((0 != region.dstOffset.y) || (1 != dst_copy_extent.height)) { 1626 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1627 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00130, "IMAGE", 1628 "vkCmdCopyImage(): pRegion[%d] dstOffset.y is %d and dst_copy_extent.height is %d. For 1D images " 1629 "these must be 0 and 1, respectively. %s", 1630 i, region.dstOffset.y, dst_copy_extent.height, validation_error_map[VALIDATION_ERROR_09c00130]); 1631 } 1632 } 1633 1634 if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (dst_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 1635 if ((0 != region.dstOffset.z) || (1 != dst_copy_extent.depth)) { 1636 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1637 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00134, "IMAGE", 1638 "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d and dst_copy_extent.depth is %d. For 1D and 2D " 1639 "images these must be 0 and 1, respectively. %s", 1640 i, region.dstOffset.z, dst_copy_extent.depth, validation_error_map[VALIDATION_ERROR_09c00134]); 1641 } 1642 } 1643 1644 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1645 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) { 1646 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1647 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1648 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount " 1649 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1650 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount, 1651 validation_error_map[VALIDATION_ERROR_09c0011a]); 1652 } 1653 } 1654 // VU01199 changed with mnt1 1655 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1656 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1657 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) { 1658 skip |= 1659 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1660 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1661 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount " 1662 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1663 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount, 1664 validation_error_map[VALIDATION_ERROR_09c0011a]); 1665 } 1666 } 1667 } else { // Pre maint 1 1668 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1669 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) { 1670 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1671 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1672 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and " 1673 "dstSubresource.layerCount is %d. For copies with either source or dest of type " 1674 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s", 1675 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount, 1676 validation_error_map[VALIDATION_ERROR_09c0011a]); 1677 } 1678 } 1679 } 1680 1681 // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec. 1682 if ((region.dstSubresource.baseArrayLayer >= dst_state->createInfo.arrayLayers) || 1683 (region.dstSubresource.baseArrayLayer + region.dstSubresource.layerCount > dst_state->createInfo.arrayLayers)) { 1684 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1685 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00136, "IMAGE", 1686 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer (%d) must be less than the dest image's " 1687 "arrayLayers (%d), and the sum of baseArrayLayer and dstSubresource.layerCount (%d) must be less than " 1688 "or equal to the dest image's arrayLayers. %s", 1689 i, region.dstSubresource.baseArrayLayer, dst_state->createInfo.arrayLayers, 1690 region.dstSubresource.layerCount, validation_error_map[VALIDATION_ERROR_09c00136]); 1691 } 1692 1693 // Checks that apply only to compressed images 1694 if (FormatIsCompressed(dst_state->createInfo.format)) { 1695 const VkExtent3D block_size = FormatCompressedTexelBlockExtent(dst_state->createInfo.format); 1696 1697 // image offsets must be multiples of block dimensions 1698 if ((SafeModulo(region.dstOffset.x, block_size.width) != 0) || 1699 (SafeModulo(region.dstOffset.y, block_size.height) != 0) || 1700 (SafeModulo(region.dstOffset.z, block_size.depth) != 0)) { 1701 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1702 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00144, "IMAGE", 1703 "vkCmdCopyImage(): pRegion[%d] dstOffset (%d, %d) must be multiples of the compressed image's " 1704 "texel width & height (%d, %d). %s.", 1705 i, region.dstOffset.x, region.dstOffset.y, block_size.width, block_size.height, 1706 validation_error_map[VALIDATION_ERROR_09c00144]); 1707 } 1708 1709 const VkExtent3D mip_extent = GetImageSubresourceExtent(dst_state, &(region.dstSubresource)); 1710 if ((SafeModulo(dst_copy_extent.width, block_size.width) != 0) && 1711 (dst_copy_extent.width + region.dstOffset.x != mip_extent.width)) { 1712 skip |= 1713 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1714 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00146, "IMAGE", 1715 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture " 1716 "block width (%d), or when added to dstOffset.x (%d) must equal the image subresource width (%d). %s.", 1717 i, dst_copy_extent.width, block_size.width, region.dstOffset.x, mip_extent.width, 1718 validation_error_map[VALIDATION_ERROR_09c00146]); 1719 } 1720 1721 // Extent height must be a multiple of block height, or dst_copy_extent+offset height must equal subresource height 1722 if ((SafeModulo(dst_copy_extent.height, block_size.height) != 0) && 1723 (dst_copy_extent.height + region.dstOffset.y != mip_extent.height)) { 1724 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1725 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c00148, "IMAGE", 1726 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent height (%d) must be a multiple of the compressed " 1727 "texture block height (%d), or when added to dstOffset.y (%d) must equal the image subresource " 1728 "height (%d). %s.", 1729 i, dst_copy_extent.height, block_size.height, region.dstOffset.y, mip_extent.height, 1730 validation_error_map[VALIDATION_ERROR_09c00148]); 1731 } 1732 1733 // Extent depth must be a multiple of block depth, or dst_copy_extent+offset depth must equal subresource depth 1734 uint32_t copy_depth = (slice_override ? depth_slices : dst_copy_extent.depth); 1735 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.dstOffset.z != mip_extent.depth)) { 1736 skip |= 1737 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1738 HandleToUint64(dst_state->image), __LINE__, VALIDATION_ERROR_09c0014a, "IMAGE", 1739 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture " 1740 "block depth (%d), or when added to dstOffset.z (%d) must equal the image subresource depth (%d). %s.", 1741 i, dst_copy_extent.depth, block_size.depth, region.dstOffset.z, mip_extent.depth, 1742 validation_error_map[VALIDATION_ERROR_09c0014a]); 1743 } 1744 } // Compressed 1745 } 1746 return skip; 1747 } 1748 1749 bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 1750 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageCopy *regions, 1751 VkImageLayout src_image_layout, VkImageLayout dst_image_layout) { 1752 bool skip = false; 1753 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1754 skip = ValidateImageCopyData(device_data, report_data, region_count, regions, src_image_state, dst_image_state); 1755 1756 VkCommandBuffer command_buffer = cb_node->commandBuffer; 1757 1758 for (uint32_t i = 0; i < region_count; i++) { 1759 const VkImageCopy region = regions[i]; 1760 1761 // For comp/uncomp copies, the copy extent for the dest image must be adjusted 1762 VkExtent3D src_copy_extent = region.extent; 1763 VkExtent3D dst_copy_extent = 1764 GetAdjustedDestImageExtent(src_image_state->createInfo.format, dst_image_state->createInfo.format, region.extent); 1765 1766 bool slice_override = false; 1767 uint32_t depth_slices = 0; 1768 1769 // Special case for copying between a 1D/2D array and a 3D image 1770 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up. 1771 if ((VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType) && 1772 (VK_IMAGE_TYPE_3D != dst_image_state->createInfo.imageType)) { 1773 depth_slices = region.dstSubresource.layerCount; // Slice count from 2D subresource 1774 slice_override = (depth_slices != 1); 1775 } else if ((VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType) && 1776 (VK_IMAGE_TYPE_3D != src_image_state->createInfo.imageType)) { 1777 depth_slices = region.srcSubresource.layerCount; // Slice count from 2D subresource 1778 slice_override = (depth_slices != 1); 1779 } 1780 1781 if (region.srcSubresource.layerCount == 0) { 1782 std::stringstream ss; 1783 ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] srcSubresource is zero"; 1784 skip |= 1785 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1786 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str()); 1787 } 1788 1789 if (region.dstSubresource.layerCount == 0) { 1790 std::stringstream ss; 1791 ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] dstSubresource is zero"; 1792 skip |= 1793 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1794 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str()); 1795 } 1796 1797 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1798 // No chance of mismatch if we're overriding depth slice count 1799 if (!slice_override) { 1800 // The number of depth slices in srcSubresource and dstSubresource must match 1801 // Depth comes from layerCount for 1D,2D resources, from extent.depth for 3D 1802 uint32_t src_slices = 1803 (VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType ? src_copy_extent.depth 1804 : region.srcSubresource.layerCount); 1805 uint32_t dst_slices = 1806 (VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType ? dst_copy_extent.depth 1807 : region.dstSubresource.layerCount); 1808 if (src_slices != dst_slices) { 1809 std::stringstream ss; 1810 ss << "vkCmdCopyImage: number of depth slices in source and destination subresources for pRegions[" << i 1811 << "] do not match"; 1812 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1813 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00118, "IMAGE", "%s. %s", 1814 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c00118]); 1815 } 1816 } 1817 } else { 1818 // For each region the layerCount member of srcSubresource and dstSubresource must match 1819 if (region.srcSubresource.layerCount != region.dstSubresource.layerCount) { 1820 std::stringstream ss; 1821 ss << "vkCmdCopyImage: number of layers in source and destination subresources for pRegions[" << i 1822 << "] do not match"; 1823 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1824 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00118, "IMAGE", "%s. %s", 1825 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c00118]); 1826 } 1827 } 1828 1829 // For each region, the aspectMask member of srcSubresource and dstSubresource must match 1830 if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) { 1831 char const str[] = "vkCmdCopyImage: Src and dest aspectMasks for each region must match"; 1832 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1833 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00112, "IMAGE", "%s. %s", str, 1834 validation_error_map[VALIDATION_ERROR_09c00112]); 1835 } 1836 1837 // For each region, the aspectMask member of srcSubresource must be present in the source image 1838 if (!VerifyAspectsPresent(region.srcSubresource.aspectMask, src_image_state->createInfo.format)) { 1839 std::stringstream ss; 1840 ss << "vkCmdCopyImage: pRegion[" << i 1841 << "] srcSubresource.aspectMask cannot specify aspects not present in source image"; 1842 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1843 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0011c, "IMAGE", "%s. %s", 1844 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c0011c]); 1845 } 1846 1847 // For each region, the aspectMask member of dstSubresource must be present in the destination image 1848 if (!VerifyAspectsPresent(region.dstSubresource.aspectMask, dst_image_state->createInfo.format)) { 1849 std::stringstream ss; 1850 ss << "vkCmdCopyImage: pRegion[" << i << "] dstSubresource.aspectMask cannot specify aspects not present in dest image"; 1851 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1852 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0011e, "IMAGE", "%s. %s", 1853 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c0011e]); 1854 } 1855 1856 // AspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT 1857 if ((region.srcSubresource.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) || 1858 (region.dstSubresource.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT)) { 1859 std::stringstream ss; 1860 ss << "vkCmdCopyImage: pRegions[" << i << "] may not specify aspectMask containing VK_IMAGE_ASPECT_METADATA_BIT"; 1861 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1862 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600150, "IMAGE", "%s. %s", 1863 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600150]); 1864 } 1865 1866 // For each region, if aspectMask contains VK_IMAGE_ASPECT_COLOR_BIT, it must not contain either of 1867 // VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT 1868 if ((region.srcSubresource.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) && 1869 (region.srcSubresource.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) { 1870 char const str[] = "vkCmdCopyImage aspectMask cannot specify both COLOR and DEPTH/STENCIL aspects"; 1871 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1872 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a60014e, "IMAGE", "%s. %s", str, 1873 validation_error_map[VALIDATION_ERROR_0a60014e]); 1874 } 1875 1876 // MipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created 1877 if (region.srcSubresource.mipLevel >= src_image_state->createInfo.mipLevels) { 1878 std::stringstream ss; 1879 ss << "vkCmdCopyImage: pRegions[" << i 1880 << "] specifies a src mipLevel greater than the number specified when the srcImage was created."; 1881 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1882 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600152, "IMAGE", "%s. %s", 1883 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600152]); 1884 } 1885 if (region.dstSubresource.mipLevel >= dst_image_state->createInfo.mipLevels) { 1886 std::stringstream ss; 1887 ss << "vkCmdCopyImage: pRegions[" << i 1888 << "] specifies a dst mipLevel greater than the number specified when the dstImage was created."; 1889 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1890 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600152, "IMAGE", "%s. %s", 1891 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600152]); 1892 } 1893 1894 // (baseArrayLayer + layerCount) must be less than or equal to the arrayLayers specified in VkImageCreateInfo when the 1895 // image was created 1896 if ((region.srcSubresource.baseArrayLayer + region.srcSubresource.layerCount) > src_image_state->createInfo.arrayLayers) { 1897 std::stringstream ss; 1898 ss << "vkCmdCopyImage: srcImage arrayLayers was " << src_image_state->createInfo.arrayLayers << " but subRegion[" << i 1899 << "] baseArrayLayer + layerCount is " << (region.srcSubresource.baseArrayLayer + region.srcSubresource.layerCount); 1900 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1901 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600154, "IMAGE", "%s. %s", 1902 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600154]); 1903 } 1904 if ((region.dstSubresource.baseArrayLayer + region.dstSubresource.layerCount) > dst_image_state->createInfo.arrayLayers) { 1905 std::stringstream ss; 1906 ss << "vkCmdCopyImage: dstImage arrayLayers was " << dst_image_state->createInfo.arrayLayers << " but subRegion[" << i 1907 << "] baseArrayLayer + layerCount is " << (region.dstSubresource.baseArrayLayer + region.dstSubresource.layerCount); 1908 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1909 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600154, "IMAGE", "%s. %s", 1910 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600154]); 1911 } 1912 1913 // Check region extents for 1D-1D, 2D-2D, and 3D-3D copies 1914 if (src_image_state->createInfo.imageType == dst_image_state->createInfo.imageType) { 1915 // The source region specified by a given element of regions must be a region that is contained within srcImage 1916 VkExtent3D img_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource)); 1917 if (0 != ExceedsBounds(®ion.srcOffset, &src_copy_extent, &img_extent)) { 1918 std::stringstream ss; 1919 ss << "vkCmdCopyImage: Source pRegion[" << i << "] with mipLevel [ " << region.srcSubresource.mipLevel 1920 << " ], offset [ " << region.srcOffset.x << ", " << region.srcOffset.y << ", " << region.srcOffset.z 1921 << " ], extent [ " << src_copy_extent.width << ", " << src_copy_extent.height << ", " << src_copy_extent.depth 1922 << " ] exceeds the source image dimensions"; 1923 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1924 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f4, "IMAGE", "%s. %s", 1925 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f4]); 1926 } 1927 1928 // The destination region specified by a given element of regions must be a region that is contained within dst_image 1929 img_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource)); 1930 if (0 != ExceedsBounds(®ion.dstOffset, &dst_copy_extent, &img_extent)) { 1931 std::stringstream ss; 1932 ss << "vkCmdCopyImage: Dest pRegion[" << i << "] with mipLevel [ " << region.dstSubresource.mipLevel 1933 << " ], offset [ " << region.dstOffset.x << ", " << region.dstOffset.y << ", " << region.dstOffset.z 1934 << " ], extent [ " << dst_copy_extent.width << ", " << dst_copy_extent.height << ", " << dst_copy_extent.depth 1935 << " ] exceeds the destination image dimensions"; 1936 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1937 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f6, "IMAGE", "%s. %s", 1938 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f6]); 1939 } 1940 } 1941 1942 // Each dimension offset + extent limits must fall with image subresource extent 1943 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource)); 1944 if (slice_override) src_copy_extent.depth = depth_slices; 1945 uint32_t extent_check = ExceedsBounds(&(region.srcOffset), &src_copy_extent, &subresource_extent); 1946 if (extent_check & x_bit) { 1947 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1948 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00120, "IMAGE", 1949 "vkCmdCopyImage: Source image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource " 1950 "width [%1d]. %s", 1951 i, region.srcOffset.x, src_copy_extent.width, subresource_extent.width, 1952 validation_error_map[VALIDATION_ERROR_09c00120]); 1953 } 1954 1955 if (extent_check & y_bit) { 1956 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1957 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00122, "IMAGE", 1958 "vkCmdCopyImage: Source image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource " 1959 "height [%1d]. %s", 1960 i, region.srcOffset.y, src_copy_extent.height, subresource_extent.height, 1961 validation_error_map[VALIDATION_ERROR_09c00122]); 1962 } 1963 if (extent_check & z_bit) { 1964 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1965 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00126, "IMAGE", 1966 "vkCmdCopyImage: Source image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource " 1967 "depth [%1d]. %s", 1968 i, region.srcOffset.z, src_copy_extent.depth, subresource_extent.depth, 1969 validation_error_map[VALIDATION_ERROR_09c00126]); 1970 } 1971 1972 // Adjust dest extent if necessary 1973 subresource_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource)); 1974 if (slice_override) dst_copy_extent.depth = depth_slices; 1975 1976 extent_check = ExceedsBounds(&(region.dstOffset), &dst_copy_extent, &subresource_extent); 1977 if (extent_check & x_bit) { 1978 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1979 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0012c, "IMAGE", 1980 "vkCmdCopyImage: Dest image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource " 1981 "width [%1d]. %s", 1982 i, region.dstOffset.x, dst_copy_extent.width, subresource_extent.width, 1983 validation_error_map[VALIDATION_ERROR_09c0012c]); 1984 } 1985 if (extent_check & y_bit) { 1986 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1987 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0012e, "IMAGE", 1988 "vkCmdCopyImage: Dest image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource " 1989 "height [%1d]. %s", 1990 i, region.dstOffset.y, dst_copy_extent.height, subresource_extent.height, 1991 validation_error_map[VALIDATION_ERROR_09c0012e]); 1992 } 1993 if (extent_check & z_bit) { 1994 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1995 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00132, "IMAGE", 1996 "vkCmdCopyImage: Dest image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource " 1997 "depth [%1d]. %s", 1998 i, region.dstOffset.z, dst_copy_extent.depth, subresource_extent.depth, 1999 validation_error_map[VALIDATION_ERROR_09c00132]); 2000 } 2001 2002 // The union of all source regions, and the union of all destination regions, specified by the elements of regions, 2003 // must not overlap in memory 2004 if (src_image_state->image == dst_image_state->image) { 2005 for (uint32_t j = 0; j < region_count; j++) { 2006 if (RegionIntersects(®ion, ®ions[j], src_image_state->createInfo.imageType)) { 2007 std::stringstream ss; 2008 ss << "vkCmdCopyImage: pRegions[" << i << "] src overlaps with pRegions[" << j << "]."; 2009 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2010 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f8, "IMAGE", "%s. %s", 2011 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f8]); 2012 } 2013 } 2014 } 2015 } 2016 2017 // The formats of src_image and dst_image must be compatible. Formats are considered compatible if their texel size in bytes 2018 // is the same between both formats. For example, VK_FORMAT_R8G8B8A8_UNORM is compatible with VK_FORMAT_R32_UINT because 2019 // because both texels are 4 bytes in size. Depth/stencil formats must match exactly. 2020 if (FormatIsDepthOrStencil(src_image_state->createInfo.format) || FormatIsDepthOrStencil(dst_image_state->createInfo.format)) { 2021 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) { 2022 char const str[] = "vkCmdCopyImage called with unmatched source and dest image depth/stencil formats."; 2023 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2024 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str); 2025 } 2026 } else { 2027 size_t srcSize = FormatSize(src_image_state->createInfo.format); 2028 size_t destSize = FormatSize(dst_image_state->createInfo.format); 2029 if (srcSize != destSize) { 2030 char const str[] = "vkCmdCopyImage called with unmatched source and dest image format sizes."; 2031 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2032 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_1900010e, "IMAGE", "%s. %s", str, 2033 validation_error_map[VALIDATION_ERROR_1900010e]); 2034 } 2035 } 2036 2037 // Source and dest image sample counts must match 2038 if (src_image_state->createInfo.samples != dst_image_state->createInfo.samples) { 2039 char const str[] = "vkCmdCopyImage() called on image pair with non-identical sample counts."; 2040 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2041 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_19000110, "IMAGE", "%s %s", str, 2042 validation_error_map[VALIDATION_ERROR_19000110]); 2043 } 2044 2045 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImage()", VALIDATION_ERROR_190000fe); 2046 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyImage()", VALIDATION_ERROR_19000108); 2047 // Validate that SRC & DST images have correct usage flags set 2048 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, VALIDATION_ERROR_190000fc, 2049 "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 2050 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_19000106, 2051 "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 2052 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyImage()", 2053 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_19002415); 2054 skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()"); 2055 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyImage()", VALIDATION_ERROR_19000017); 2056 bool hit_error = false; 2057 for (uint32_t i = 0; i < region_count; ++i) { 2058 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, regions[i].srcSubresource, src_image_layout, 2059 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_19000102, &hit_error); 2060 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout, 2061 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_1900010c, &hit_error); 2062 skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, src_image_state, dst_image_state, 2063 ®ions[i], i, "vkCmdCopyImage()"); 2064 } 2065 2066 return skip; 2067 } 2068 2069 void PreCallRecordCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2070 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageCopy *regions, 2071 VkImageLayout src_image_layout, VkImageLayout dst_image_layout) { 2072 // Make sure that all image slices are updated to correct layout 2073 for (uint32_t i = 0; i < region_count; ++i) { 2074 SetImageLayout(device_data, cb_node, src_image_state, regions[i].srcSubresource, src_image_layout); 2075 SetImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout); 2076 } 2077 // Update bindings between images and cmd buffer 2078 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 2079 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 2080 std::function<bool()> function = [=]() { return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdCopyImage()"); }; 2081 cb_node->queue_submit_functions.push_back(function); 2082 function = [=]() { 2083 SetImageMemoryValid(device_data, dst_image_state, true); 2084 return false; 2085 }; 2086 cb_node->queue_submit_functions.push_back(function); 2087 } 2088 2089 // Returns true if sub_rect is entirely contained within rect 2090 static inline bool ContainsRect(VkRect2D rect, VkRect2D sub_rect) { 2091 if ((sub_rect.offset.x < rect.offset.x) || (sub_rect.offset.x + sub_rect.extent.width > rect.offset.x + rect.extent.width) || 2092 (sub_rect.offset.y < rect.offset.y) || (sub_rect.offset.y + sub_rect.extent.height > rect.offset.y + rect.extent.height)) 2093 return false; 2094 return true; 2095 } 2096 2097 bool PreCallValidateCmdClearAttachments(layer_data *device_data, VkCommandBuffer commandBuffer, uint32_t attachmentCount, 2098 const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects) { 2099 GLOBAL_CB_NODE *cb_node = GetCBNode(device_data, commandBuffer); 2100 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2101 2102 bool skip = false; 2103 if (cb_node) { 2104 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT, 2105 VALIDATION_ERROR_18602415); 2106 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()"); 2107 // Warn if this is issued prior to Draw Cmd and clearing the entire attachment 2108 if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) && 2109 (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) { 2110 // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass) 2111 // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call 2112 // CmdClearAttachments. 2113 skip |= log_msg( 2114 report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2115 HandleToUint64(commandBuffer), 0, DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, "DS", 2116 "vkCmdClearAttachments() issued on command buffer object 0x%" PRIx64 2117 " prior to any Draw Cmds. It is recommended you use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.", 2118 HandleToUint64(commandBuffer)); 2119 } 2120 skip |= outsideRenderPass(device_data, cb_node, "vkCmdClearAttachments()", VALIDATION_ERROR_18600017); 2121 } 2122 2123 // Validate that attachment is in reference list of active subpass 2124 if (cb_node->activeRenderPass) { 2125 const VkRenderPassCreateInfo *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr(); 2126 const VkSubpassDescription *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass]; 2127 auto framebuffer = GetFramebufferState(device_data, cb_node->activeFramebuffer); 2128 2129 for (uint32_t i = 0; i < attachmentCount; i++) { 2130 auto clear_desc = &pAttachments[i]; 2131 VkImageView image_view = VK_NULL_HANDLE; 2132 2133 if (0 == clear_desc->aspectMask) { 2134 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2135 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00c03, "IMAGE", "%s", 2136 validation_error_map[VALIDATION_ERROR_01c00c03]); 2137 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) { 2138 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2139 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00028, "IMAGE", "%s", 2140 validation_error_map[VALIDATION_ERROR_01c00028]); 2141 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) { 2142 if (clear_desc->colorAttachment >= subpass_desc->colorAttachmentCount) { 2143 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2144 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_1860001e, "DS", 2145 "vkCmdClearAttachments() color attachment index %d out of range for active subpass %d. %s", 2146 clear_desc->colorAttachment, cb_node->activeSubpass, 2147 validation_error_map[VALIDATION_ERROR_1860001e]); 2148 } else if (subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment == VK_ATTACHMENT_UNUSED) { 2149 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2150 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer), __LINE__, 2151 DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS", 2152 "vkCmdClearAttachments() color attachment index %d is VK_ATTACHMENT_UNUSED; ignored.", 2153 clear_desc->colorAttachment); 2154 } else { 2155 image_view = framebuffer->createInfo 2156 .pAttachments[subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment]; 2157 } 2158 if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || 2159 (clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) { 2160 char const str[] = 2161 "vkCmdClearAttachments aspectMask [%d] must set only VK_IMAGE_ASPECT_COLOR_BIT of a color attachment. %s"; 2162 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2163 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00026, "IMAGE", str, i, 2164 validation_error_map[VALIDATION_ERROR_01c00026]); 2165 } 2166 } else { // Must be depth and/or stencil 2167 if (((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) && 2168 ((clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) { 2169 char const str[] = "vkCmdClearAttachments aspectMask [%d] is not a valid combination of bits. %s"; 2170 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2171 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00c01, "IMAGE", str, i, 2172 validation_error_map[VALIDATION_ERROR_01c00c01]); 2173 } 2174 if (!subpass_desc->pDepthStencilAttachment || 2175 (subpass_desc->pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED)) { 2176 skip |= log_msg( 2177 report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2178 HandleToUint64(commandBuffer), __LINE__, DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS", 2179 "vkCmdClearAttachments() depth/stencil clear with no depth/stencil attachment in subpass; ignored"); 2180 } else { 2181 image_view = framebuffer->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment]; 2182 } 2183 } 2184 if (image_view) { 2185 auto image_view_state = GetImageViewState(device_data, image_view); 2186 for (uint32_t j = 0; j < rectCount; j++) { 2187 // The rectangular region specified by a given element of pRects must be contained within the render area of 2188 // the current render pass instance 2189 // TODO: This check should be moved to CmdExecuteCommands or QueueSubmit to cover secondary CB cases 2190 if ((cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && 2191 (false == ContainsRect(cb_node->activeRenderPassBeginInfo.renderArea, pRects[j].rect))) { 2192 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2193 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_18600020, "DS", 2194 "vkCmdClearAttachments(): The area defined by pRects[%d] is not contained in the area of " 2195 "the current render pass instance. %s", 2196 j, validation_error_map[VALIDATION_ERROR_18600020]); 2197 } 2198 // The layers specified by a given element of pRects must be contained within every attachment that 2199 // pAttachments refers to 2200 auto attachment_layer_count = image_view_state->create_info.subresourceRange.layerCount; 2201 if ((pRects[j].baseArrayLayer >= attachment_layer_count) || 2202 (pRects[j].baseArrayLayer + pRects[j].layerCount > attachment_layer_count)) { 2203 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2204 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_18600022, "DS", 2205 "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers " 2206 "of pAttachment[%d]. %s", 2207 j, i, validation_error_map[VALIDATION_ERROR_18600022]); 2208 } 2209 } 2210 } 2211 } 2212 } 2213 return skip; 2214 } 2215 2216 bool PreCallValidateCmdResolveImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2217 IMAGE_STATE *dst_image_state, uint32_t regionCount, const VkImageResolve *pRegions) { 2218 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2219 bool skip = false; 2220 if (cb_node && src_image_state && dst_image_state) { 2221 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdResolveImage()", VALIDATION_ERROR_1c800200); 2222 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdResolveImage()", VALIDATION_ERROR_1c800204); 2223 skip |= 2224 ValidateCmdQueueFlags(device_data, cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT, VALIDATION_ERROR_1c802415); 2225 skip |= ValidateCmd(device_data, cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()"); 2226 skip |= insideRenderPass(device_data, cb_node, "vkCmdResolveImage()", VALIDATION_ERROR_1c800017); 2227 2228 // For each region, the number of layers in the image subresource should not be zero 2229 // For each region, src and dest image aspect must be color only 2230 for (uint32_t i = 0; i < regionCount; i++) { 2231 if (pRegions[i].srcSubresource.layerCount == 0) { 2232 char const str[] = "vkCmdResolveImage: number of layers in source subresource is zero"; 2233 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2234 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2235 } 2236 if (pRegions[i].dstSubresource.layerCount == 0) { 2237 char const str[] = "vkCmdResolveImage: number of layers in destination subresource is zero"; 2238 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2239 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2240 } 2241 if (pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount) { 2242 skip |= log_msg( 2243 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2244 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_0a200216, "IMAGE", 2245 "vkCmdResolveImage: layerCount in source and destination subresource of pRegions[%d] does not match. %s", i, 2246 validation_error_map[VALIDATION_ERROR_0a200216]); 2247 } 2248 if ((pRegions[i].srcSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) || 2249 (pRegions[i].dstSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT)) { 2250 char const str[] = 2251 "vkCmdResolveImage: src and dest aspectMasks for each region must specify only VK_IMAGE_ASPECT_COLOR_BIT"; 2252 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2253 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_0a200214, "IMAGE", "%s. %s", str, 2254 validation_error_map[VALIDATION_ERROR_0a200214]); 2255 } 2256 } 2257 2258 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) { 2259 char const str[] = "vkCmdResolveImage called with unmatched source and dest formats."; 2260 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2261 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str); 2262 } 2263 if (src_image_state->createInfo.imageType != dst_image_state->createInfo.imageType) { 2264 char const str[] = "vkCmdResolveImage called with unmatched source and dest image types."; 2265 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2266 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_TYPE, "IMAGE", str); 2267 } 2268 if (src_image_state->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) { 2269 char const str[] = "vkCmdResolveImage called with source sample count less than 2."; 2270 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2271 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_1c800202, "IMAGE", "%s. %s", str, 2272 validation_error_map[VALIDATION_ERROR_1c800202]); 2273 } 2274 if (dst_image_state->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) { 2275 char const str[] = "vkCmdResolveImage called with dest sample count greater than 1."; 2276 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2277 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_1c800206, "IMAGE", "%s. %s", str, 2278 validation_error_map[VALIDATION_ERROR_1c800206]); 2279 } 2280 // TODO: Need to validate image layouts, which will include layout validation for shared presentable images 2281 } else { 2282 assert(0); 2283 } 2284 return skip; 2285 } 2286 2287 void PreCallRecordCmdResolveImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2288 IMAGE_STATE *dst_image_state) { 2289 // Update bindings between images and cmd buffer 2290 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 2291 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 2292 2293 std::function<bool()> function = [=]() { 2294 return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdResolveImage()"); 2295 }; 2296 cb_node->queue_submit_functions.push_back(function); 2297 function = [=]() { 2298 SetImageMemoryValid(device_data, dst_image_state, true); 2299 return false; 2300 }; 2301 cb_node->queue_submit_functions.push_back(function); 2302 } 2303 2304 bool PreCallValidateCmdBlitImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2305 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageBlit *regions, 2306 VkImageLayout src_image_layout, VkImageLayout dst_image_layout, VkFilter filter) { 2307 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2308 2309 bool skip = false; 2310 if (cb_node) { 2311 skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()"); 2312 } 2313 if (cb_node && src_image_state && dst_image_state) { 2314 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage", 2315 VALIDATION_ERROR_184001d2); 2316 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage", 2317 VALIDATION_ERROR_184001d4); 2318 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdBlitImage()", VALIDATION_ERROR_184001b8); 2319 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdBlitImage()", VALIDATION_ERROR_184001c2); 2320 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, 2321 VALIDATION_ERROR_184001b6, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 2322 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, 2323 VALIDATION_ERROR_184001c0, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 2324 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT, VALIDATION_ERROR_18402415); 2325 skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()"); 2326 skip |= insideRenderPass(device_data, cb_node, "vkCmdBlitImage()", VALIDATION_ERROR_18400017); 2327 // TODO: Need to validate image layouts, which will include layout validation for shared presentable images 2328 2329 VkFormat src_format = src_image_state->createInfo.format; 2330 VkFormat dst_format = dst_image_state->createInfo.format; 2331 VkImageType src_type = src_image_state->createInfo.imageType; 2332 VkImageType dst_type = dst_image_state->createInfo.imageType; 2333 2334 VkFormatProperties props = GetFormatProperties(device_data, src_format); 2335 VkImageTiling tiling = src_image_state->createInfo.tiling; 2336 VkFormatFeatureFlags flags = (tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures); 2337 if (VK_FORMAT_FEATURE_BLIT_SRC_BIT != (flags & VK_FORMAT_FEATURE_BLIT_SRC_BIT)) { 2338 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2339 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001b4, "IMAGE", 2340 "vkCmdBlitImage: source image format %s does not support VK_FORMAT_FEATURE_BLIT_SRC_BIT feature. %s", 2341 string_VkFormat(src_format), validation_error_map[VALIDATION_ERROR_184001b4]); 2342 } 2343 2344 if ((VK_FILTER_LINEAR == filter) && 2345 (VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT != (flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT))) { 2346 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2347 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001d6, "IMAGE", 2348 "vkCmdBlitImage: source image format %s does not support linear filtering. %s", 2349 string_VkFormat(src_format), validation_error_map[VALIDATION_ERROR_184001d6]); 2350 } 2351 2352 if ((VK_FILTER_CUBIC_IMG == filter) && (VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG != 2353 (flags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG))) { 2354 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2355 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001d8, "IMAGE", 2356 "vkCmdBlitImage: source image format %s does not support cubic filtering. %s", 2357 string_VkFormat(src_format), validation_error_map[VALIDATION_ERROR_184001d8]); 2358 } 2359 2360 if ((VK_FILTER_CUBIC_IMG == filter) && (VK_IMAGE_TYPE_3D != src_type)) { 2361 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2362 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001da, "IMAGE", 2363 "vkCmdBlitImage: source image type must be VK_IMAGE_TYPE_3D when cubic filtering is specified. %s", 2364 validation_error_map[VALIDATION_ERROR_184001da]); 2365 } 2366 2367 props = GetFormatProperties(device_data, dst_format); 2368 tiling = dst_image_state->createInfo.tiling; 2369 flags = (tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures); 2370 if (VK_FORMAT_FEATURE_BLIT_DST_BIT != (flags & VK_FORMAT_FEATURE_BLIT_DST_BIT)) { 2371 skip |= 2372 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2373 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001be, "IMAGE", 2374 "vkCmdBlitImage: destination image format %s does not support VK_FORMAT_FEATURE_BLIT_DST_BIT feature. %s", 2375 string_VkFormat(dst_format), validation_error_map[VALIDATION_ERROR_184001be]); 2376 } 2377 2378 if ((VK_SAMPLE_COUNT_1_BIT != src_image_state->createInfo.samples) || 2379 (VK_SAMPLE_COUNT_1_BIT != dst_image_state->createInfo.samples)) { 2380 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2381 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001c8, "IMAGE", 2382 "vkCmdBlitImage: source or dest image has sample count other than VK_SAMPLE_COUNT_1_BIT. %s", 2383 validation_error_map[VALIDATION_ERROR_184001c8]); 2384 } 2385 2386 // Validate consistency for unsigned formats 2387 if (FormatIsUInt(src_format) != FormatIsUInt(dst_format)) { 2388 std::stringstream ss; 2389 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has unsigned integer format, " 2390 << "the other one must also have unsigned integer format. " 2391 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format); 2392 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2393 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001cc, "IMAGE", "%s. %s", 2394 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001cc]); 2395 } 2396 2397 // Validate consistency for signed formats 2398 if (FormatIsSInt(src_format) != FormatIsSInt(dst_format)) { 2399 std::stringstream ss; 2400 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has signed integer format, " 2401 << "the other one must also have signed integer format. " 2402 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format); 2403 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2404 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ca, "IMAGE", "%s. %s", 2405 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001ca]); 2406 } 2407 2408 // Validate filter for Depth/Stencil formats 2409 if (FormatIsDepthOrStencil(src_format) && (filter != VK_FILTER_NEAREST)) { 2410 std::stringstream ss; 2411 ss << "vkCmdBlitImage: If the format of srcImage is a depth, stencil, or depth stencil " 2412 << "then filter must be VK_FILTER_NEAREST."; 2413 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2414 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001d0, "IMAGE", "%s. %s", 2415 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001d0]); 2416 } 2417 2418 // Validate aspect bits and formats for depth/stencil images 2419 if (FormatIsDepthOrStencil(src_format) || FormatIsDepthOrStencil(dst_format)) { 2420 if (src_format != dst_format) { 2421 std::stringstream ss; 2422 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has a format of depth, stencil or depth " 2423 << "stencil, the other one must have exactly the same format. " 2424 << "Source format is " << string_VkFormat(src_format) << " Destination format is " 2425 << string_VkFormat(dst_format); 2426 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2427 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ce, "IMAGE", "%s. %s", 2428 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001ce]); 2429 } 2430 2431 #if 0 // TODO: Cannot find VU statements or spec language for these in CmdBlitImage. Verify or remove. 2432 for (uint32_t i = 0; i < regionCount; i++) { 2433 VkImageAspectFlags srcAspect = pRegions[i].srcSubresource.aspectMask; 2434 2435 if (FormatIsDepthAndStencil(src_format)) { 2436 if ((srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT)) { 2437 std::stringstream ss; 2438 ss << "vkCmdBlitImage: Combination depth/stencil image formats must have only one of VK_IMAGE_ASPECT_DEPTH_BIT " 2439 << "and VK_IMAGE_ASPECT_STENCIL_BIT set in srcImage and dstImage"; 2440 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2441 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2442 "%s", ss.str().c_str()); 2443 } 2444 } 2445 else if (FormatIsStencilOnly(src_format)) { 2446 if (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT) { 2447 std::stringstream ss; 2448 ss << "vkCmdBlitImage: Stencil-only image formats must have only the VK_IMAGE_ASPECT_STENCIL_BIT " 2449 << "set in both the srcImage and dstImage"; 2450 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2451 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2452 "%s", ss.str().c_str()); 2453 } 2454 } 2455 else if (FormatIsDepthOnly(src_format)) { 2456 if (srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) { 2457 std::stringstream ss; 2458 ss << "vkCmdBlitImage: Depth-only image formats must have only the VK_IMAGE_ASPECT_DEPTH " 2459 << "set in both the srcImage and dstImage"; 2460 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2461 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2462 "%s", ss.str().c_str()); 2463 } 2464 } 2465 } 2466 #endif 2467 } // Depth or Stencil 2468 2469 // Do per-region checks 2470 for (uint32_t i = 0; i < region_count; i++) { 2471 const VkImageBlit rgn = regions[i]; 2472 bool hit_error = false; 2473 skip |= 2474 VerifyImageLayout(device_data, cb_node, src_image_state, rgn.srcSubresource, src_image_layout, 2475 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdBlitImage()", VALIDATION_ERROR_184001bc, &hit_error); 2476 skip |= 2477 VerifyImageLayout(device_data, cb_node, dst_image_state, rgn.dstSubresource, dst_image_layout, 2478 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdBlitImage()", VALIDATION_ERROR_184001c6, &hit_error); 2479 2480 // Warn for zero-sized regions 2481 if ((rgn.srcOffsets[0].x == rgn.srcOffsets[1].x) || (rgn.srcOffsets[0].y == rgn.srcOffsets[1].y) || 2482 (rgn.srcOffsets[0].z == rgn.srcOffsets[1].z)) { 2483 std::stringstream ss; 2484 ss << "vkCmdBlitImage: pRegions[" << i << "].srcOffsets specify a zero-volume area."; 2485 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2486 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_EXTENTS, "IMAGE", "%s", 2487 ss.str().c_str()); 2488 } 2489 if ((rgn.dstOffsets[0].x == rgn.dstOffsets[1].x) || (rgn.dstOffsets[0].y == rgn.dstOffsets[1].y) || 2490 (rgn.dstOffsets[0].z == rgn.dstOffsets[1].z)) { 2491 std::stringstream ss; 2492 ss << "vkCmdBlitImage: pRegions[" << i << "].dstOffsets specify a zero-volume area."; 2493 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2494 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_EXTENTS, "IMAGE", "%s", 2495 ss.str().c_str()); 2496 } 2497 if (rgn.srcSubresource.layerCount == 0) { 2498 char const str[] = "vkCmdBlitImage: number of layers in source subresource is zero"; 2499 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2500 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2501 } 2502 if (rgn.dstSubresource.layerCount == 0) { 2503 char const str[] = "vkCmdBlitImage: number of layers in destination subresource is zero"; 2504 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2505 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2506 } 2507 2508 // Check that src/dst layercounts match 2509 if (rgn.srcSubresource.layerCount != rgn.dstSubresource.layerCount) { 2510 skip |= 2511 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2512 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001de, "IMAGE", 2513 "vkCmdBlitImage: layerCount in source and destination subresource of pRegions[%d] does not match. %s", 2514 i, validation_error_map[VALIDATION_ERROR_09a001de]); 2515 } 2516 2517 if (rgn.srcSubresource.aspectMask != rgn.dstSubresource.aspectMask) { 2518 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2519 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001dc, "IMAGE", 2520 "vkCmdBlitImage: aspectMask members for pRegion[%d] do not match. %s", i, 2521 validation_error_map[VALIDATION_ERROR_09a001dc]); 2522 } 2523 2524 if (!VerifyAspectsPresent(rgn.srcSubresource.aspectMask, src_format)) { 2525 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2526 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e2, "IMAGE", 2527 "vkCmdBlitImage: region [%d] source aspectMask (0x%x) specifies aspects not present in source " 2528 "image format %s. %s", 2529 i, rgn.srcSubresource.aspectMask, string_VkFormat(src_format), 2530 validation_error_map[VALIDATION_ERROR_09a001e2]); 2531 } 2532 2533 if (!VerifyAspectsPresent(rgn.dstSubresource.aspectMask, dst_format)) { 2534 skip |= log_msg( 2535 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2536 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e4, "IMAGE", 2537 "vkCmdBlitImage: region [%d] dest aspectMask (0x%x) specifies aspects not present in dest image format %s. %s", 2538 i, rgn.dstSubresource.aspectMask, string_VkFormat(dst_format), validation_error_map[VALIDATION_ERROR_09a001e4]); 2539 } 2540 2541 // Validate source image offsets 2542 VkExtent3D src_extent = GetImageSubresourceExtent(src_image_state, &(rgn.srcSubresource)); 2543 if (VK_IMAGE_TYPE_1D == src_type) { 2544 if ((0 != rgn.srcOffsets[0].y) || (1 != rgn.srcOffsets[1].y)) { 2545 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2546 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ea, "IMAGE", 2547 "vkCmdBlitImage: region [%d], source image of type VK_IMAGE_TYPE_1D with srcOffset[].y values " 2548 "of (%1d, %1d). These must be (0, 1). %s", 2549 i, rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, validation_error_map[VALIDATION_ERROR_09a001ea]); 2550 } 2551 } 2552 2553 if ((VK_IMAGE_TYPE_1D == src_type) || (VK_IMAGE_TYPE_2D == src_type)) { 2554 if ((0 != rgn.srcOffsets[0].z) || (1 != rgn.srcOffsets[1].z)) { 2555 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2556 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ee, "IMAGE", 2557 "vkCmdBlitImage: region [%d], source image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with " 2558 "srcOffset[].z values of (%1d, %1d). These must be (0, 1). %s", 2559 i, rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, validation_error_map[VALIDATION_ERROR_09a001ee]); 2560 } 2561 } 2562 2563 bool oob = false; 2564 if ((rgn.srcOffsets[0].x < 0) || (rgn.srcOffsets[0].x > static_cast<int32_t>(src_extent.width)) || 2565 (rgn.srcOffsets[1].x < 0) || (rgn.srcOffsets[1].x > static_cast<int32_t>(src_extent.width))) { 2566 oob = true; 2567 skip |= log_msg( 2568 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2569 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e6, "IMAGE", 2570 "vkCmdBlitImage: region [%d] srcOffset[].x values (%1d, %1d) exceed srcSubresource width extent (%1d). %s", i, 2571 rgn.srcOffsets[0].x, rgn.srcOffsets[1].x, src_extent.width, validation_error_map[VALIDATION_ERROR_09a001e6]); 2572 } 2573 if ((rgn.srcOffsets[0].y < 0) || (rgn.srcOffsets[0].y > static_cast<int32_t>(src_extent.height)) || 2574 (rgn.srcOffsets[1].y < 0) || (rgn.srcOffsets[1].y > static_cast<int32_t>(src_extent.height))) { 2575 oob = true; 2576 skip |= log_msg( 2577 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2578 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e8, "IMAGE", 2579 "vkCmdBlitImage: region [%d] srcOffset[].y values (%1d, %1d) exceed srcSubresource height extent (%1d). %s", i, 2580 rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, src_extent.height, validation_error_map[VALIDATION_ERROR_09a001e8]); 2581 } 2582 if ((rgn.srcOffsets[0].z < 0) || (rgn.srcOffsets[0].z > static_cast<int32_t>(src_extent.depth)) || 2583 (rgn.srcOffsets[1].z < 0) || (rgn.srcOffsets[1].z > static_cast<int32_t>(src_extent.depth))) { 2584 oob = true; 2585 skip |= log_msg( 2586 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2587 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ec, "IMAGE", 2588 "vkCmdBlitImage: region [%d] srcOffset[].z values (%1d, %1d) exceed srcSubresource depth extent (%1d). %s", i, 2589 rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, src_extent.depth, validation_error_map[VALIDATION_ERROR_09a001ec]); 2590 } 2591 if (rgn.srcSubresource.mipLevel >= src_image_state->createInfo.mipLevels) { 2592 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2593 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ae, "IMAGE", 2594 "vkCmdBlitImage: region [%d] source image, attempt to access a non-existant mip level %1d. %s", i, 2595 rgn.srcSubresource.mipLevel, validation_error_map[VALIDATION_ERROR_184001ae]); 2596 } else if (oob) { 2597 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2598 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ae, "IMAGE", 2599 "vkCmdBlitImage: region [%d] source image blit region exceeds image dimensions. %s", i, 2600 validation_error_map[VALIDATION_ERROR_184001ae]); 2601 } 2602 2603 // Validate dest image offsets 2604 VkExtent3D dst_extent = GetImageSubresourceExtent(dst_image_state, &(rgn.dstSubresource)); 2605 if (VK_IMAGE_TYPE_1D == dst_type) { 2606 if ((0 != rgn.dstOffsets[0].y) || (1 != rgn.dstOffsets[1].y)) { 2607 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2608 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f4, "IMAGE", 2609 "vkCmdBlitImage: region [%d], dest image of type VK_IMAGE_TYPE_1D with dstOffset[].y values of " 2610 "(%1d, %1d). These must be (0, 1). %s", 2611 i, rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, validation_error_map[VALIDATION_ERROR_09a001f4]); 2612 } 2613 } 2614 2615 if ((VK_IMAGE_TYPE_1D == dst_type) || (VK_IMAGE_TYPE_2D == dst_type)) { 2616 if ((0 != rgn.dstOffsets[0].z) || (1 != rgn.dstOffsets[1].z)) { 2617 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2618 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f8, "IMAGE", 2619 "vkCmdBlitImage: region [%d], dest image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with " 2620 "dstOffset[].z values of (%1d, %1d). These must be (0, 1). %s", 2621 i, rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, validation_error_map[VALIDATION_ERROR_09a001f8]); 2622 } 2623 } 2624 2625 oob = false; 2626 if ((rgn.dstOffsets[0].x < 0) || (rgn.dstOffsets[0].x > static_cast<int32_t>(dst_extent.width)) || 2627 (rgn.dstOffsets[1].x < 0) || (rgn.dstOffsets[1].x > static_cast<int32_t>(dst_extent.width))) { 2628 oob = true; 2629 skip |= log_msg( 2630 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2631 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f0, "IMAGE", 2632 "vkCmdBlitImage: region [%d] dstOffset[].x values (%1d, %1d) exceed dstSubresource width extent (%1d). %s", i, 2633 rgn.dstOffsets[0].x, rgn.dstOffsets[1].x, dst_extent.width, validation_error_map[VALIDATION_ERROR_09a001f0]); 2634 } 2635 if ((rgn.dstOffsets[0].y < 0) || (rgn.dstOffsets[0].y > static_cast<int32_t>(dst_extent.height)) || 2636 (rgn.dstOffsets[1].y < 0) || (rgn.dstOffsets[1].y > static_cast<int32_t>(dst_extent.height))) { 2637 oob = true; 2638 skip |= log_msg( 2639 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2640 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f2, "IMAGE", 2641 "vkCmdBlitImage: region [%d] dstOffset[].y values (%1d, %1d) exceed dstSubresource height extent (%1d). %s", i, 2642 rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, dst_extent.height, validation_error_map[VALIDATION_ERROR_09a001f2]); 2643 } 2644 if ((rgn.dstOffsets[0].z < 0) || (rgn.dstOffsets[0].z > static_cast<int32_t>(dst_extent.depth)) || 2645 (rgn.dstOffsets[1].z < 0) || (rgn.dstOffsets[1].z > static_cast<int32_t>(dst_extent.depth))) { 2646 oob = true; 2647 skip |= log_msg( 2648 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2649 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f6, "IMAGE", 2650 "vkCmdBlitImage: region [%d] dstOffset[].z values (%1d, %1d) exceed dstSubresource depth extent (%1d). %s", i, 2651 rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, dst_extent.depth, validation_error_map[VALIDATION_ERROR_09a001f6]); 2652 } 2653 if (rgn.dstSubresource.mipLevel >= dst_image_state->createInfo.mipLevels) { 2654 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2655 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001b0, "IMAGE", 2656 "vkCmdBlitImage: region [%d] destination image, attempt to access a non-existant mip level %1d. %s", 2657 i, rgn.dstSubresource.mipLevel, validation_error_map[VALIDATION_ERROR_184001b0]); 2658 } else if (oob) { 2659 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2660 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001b0, "IMAGE", 2661 "vkCmdBlitImage: region [%d] destination image blit region exceeds image dimensions. %s", i, 2662 validation_error_map[VALIDATION_ERROR_184001b0]); 2663 } 2664 2665 if ((VK_IMAGE_TYPE_3D == src_type) || (VK_IMAGE_TYPE_3D == dst_type)) { 2666 if ((0 != rgn.srcSubresource.baseArrayLayer) || (1 != rgn.srcSubresource.layerCount) || 2667 (0 != rgn.dstSubresource.baseArrayLayer) || (1 != rgn.dstSubresource.layerCount)) { 2668 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2669 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e0, "IMAGE", 2670 "vkCmdBlitImage: region [%d] blit to/from a 3D image type with a non-zero baseArrayLayer, or a " 2671 "layerCount other than 1. %s", 2672 i, validation_error_map[VALIDATION_ERROR_09a001e0]); 2673 } 2674 } 2675 } // per-region checks 2676 } else { 2677 assert(0); 2678 } 2679 return skip; 2680 } 2681 2682 void PreCallRecordCmdBlitImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2683 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageBlit *regions, 2684 VkImageLayout src_image_layout, VkImageLayout dst_image_layout) { 2685 // Make sure that all image slices are updated to correct layout 2686 for (uint32_t i = 0; i < region_count; ++i) { 2687 SetImageLayout(device_data, cb_node, src_image_state, regions[i].srcSubresource, src_image_layout); 2688 SetImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout); 2689 } 2690 // Update bindings between images and cmd buffer 2691 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 2692 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 2693 2694 std::function<bool()> function = [=]() { return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdBlitImage()"); }; 2695 cb_node->queue_submit_functions.push_back(function); 2696 function = [=]() { 2697 SetImageMemoryValid(device_data, dst_image_state, true); 2698 return false; 2699 }; 2700 cb_node->queue_submit_functions.push_back(function); 2701 } 2702 2703 // This validates that the initial layout specified in the command buffer for 2704 // the IMAGE is the same 2705 // as the global IMAGE layout 2706 bool ValidateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, 2707 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const &globalImageLayoutMap, 2708 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &overlayLayoutMap) { 2709 bool skip = false; 2710 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2711 for (auto cb_image_data : pCB->imageLayoutMap) { 2712 VkImageLayout imageLayout; 2713 2714 if (FindLayout(overlayLayoutMap, cb_image_data.first, imageLayout) || 2715 FindLayout(globalImageLayoutMap, cb_image_data.first, imageLayout)) { 2716 if (cb_image_data.second.initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 2717 // TODO: Set memory invalid which is in mem_tracker currently 2718 } else if (imageLayout != cb_image_data.second.initialLayout) { 2719 if (cb_image_data.first.hasSubresource) { 2720 skip |= log_msg( 2721 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2722 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2723 "Cannot submit cmd buffer using image (0x%" PRIx64 2724 ") [sub-resource: aspectMask 0x%X array layer %u, mip level %u], with layout %s when first use is %s.", 2725 HandleToUint64(cb_image_data.first.image), cb_image_data.first.subresource.aspectMask, 2726 cb_image_data.first.subresource.arrayLayer, cb_image_data.first.subresource.mipLevel, 2727 string_VkImageLayout(imageLayout), string_VkImageLayout(cb_image_data.second.initialLayout)); 2728 } else { 2729 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2730 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2731 "Cannot submit cmd buffer using image (0x%" PRIx64 ") with layout %s when first use is %s.", 2732 HandleToUint64(cb_image_data.first.image), string_VkImageLayout(imageLayout), 2733 string_VkImageLayout(cb_image_data.second.initialLayout)); 2734 } 2735 } 2736 SetLayout(overlayLayoutMap, cb_image_data.first, cb_image_data.second.layout); 2737 } 2738 } 2739 return skip; 2740 } 2741 2742 void UpdateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB) { 2743 for (auto cb_image_data : pCB->imageLayoutMap) { 2744 VkImageLayout imageLayout; 2745 FindGlobalLayout(device_data, cb_image_data.first, imageLayout); 2746 SetGlobalLayout(device_data, cb_image_data.first, cb_image_data.second.layout); 2747 } 2748 } 2749 2750 // Print readable FlagBits in FlagMask 2751 static std::string string_VkAccessFlags(VkAccessFlags accessMask) { 2752 std::string result; 2753 std::string separator; 2754 2755 if (accessMask == 0) { 2756 result = "[None]"; 2757 } else { 2758 result = "["; 2759 for (auto i = 0; i < 32; i++) { 2760 if (accessMask & (1 << i)) { 2761 result = result + separator + string_VkAccessFlagBits((VkAccessFlagBits)(1 << i)); 2762 separator = " | "; 2763 } 2764 } 2765 result = result + "]"; 2766 } 2767 return result; 2768 } 2769 2770 // AccessFlags MUST have 'required_bit' set, and may have one or more of 'optional_bits' set. If required_bit is zero, accessMask 2771 // must have at least one of 'optional_bits' set 2772 // TODO: Add tracking to ensure that at least one barrier has been set for these layout transitions 2773 static bool ValidateMaskBits(core_validation::layer_data *device_data, VkCommandBuffer cmdBuffer, const VkAccessFlags &accessMask, 2774 const VkImageLayout &layout, VkAccessFlags required_bit, VkAccessFlags optional_bits, 2775 const char *type) { 2776 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2777 bool skip = false; 2778 2779 if ((accessMask & required_bit) || (!required_bit && (accessMask & optional_bits))) { 2780 if (accessMask & ~(required_bit | optional_bits)) { 2781 // TODO: Verify against Valid Use 2782 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2783 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2784 "Additional bits in %s accessMask 0x%X %s are specified when layout is %s.", type, accessMask, 2785 string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout)); 2786 } 2787 } else { 2788 if (!required_bit) { 2789 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2790 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2791 "%s AccessMask %d %s must contain at least one of access bits %d %s when layout is %s, unless the app " 2792 "has previously added a barrier for this transition.", 2793 type, accessMask, string_VkAccessFlags(accessMask).c_str(), optional_bits, 2794 string_VkAccessFlags(optional_bits).c_str(), string_VkImageLayout(layout)); 2795 } else { 2796 std::string opt_bits; 2797 if (optional_bits != 0) { 2798 std::stringstream ss; 2799 ss << optional_bits; 2800 opt_bits = "and may have optional bits " + ss.str() + ' ' + string_VkAccessFlags(optional_bits); 2801 } 2802 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2803 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2804 "%s AccessMask %d %s must have required access bit %d %s %s when layout is %s, unless the app has " 2805 "previously added a barrier for this transition.", 2806 type, accessMask, string_VkAccessFlags(accessMask).c_str(), required_bit, 2807 string_VkAccessFlags(required_bit).c_str(), opt_bits.c_str(), string_VkImageLayout(layout)); 2808 } 2809 } 2810 return skip; 2811 } 2812 2813 // ValidateLayoutVsAttachmentDescription is a general function where we can validate various state associated with the 2814 // VkAttachmentDescription structs that are used by the sub-passes of a renderpass. Initial check is to make sure that READ_ONLY 2815 // layout attachments don't have CLEAR as their loadOp. 2816 bool ValidateLayoutVsAttachmentDescription(const debug_report_data *report_data, const VkImageLayout first_layout, 2817 const uint32_t attachment, const VkAttachmentDescription &attachment_description) { 2818 bool skip = false; 2819 // Verify that initial loadOp on READ_ONLY attachments is not CLEAR 2820 if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) { 2821 if ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || 2822 (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)) { 2823 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2824 VALIDATION_ERROR_12200688, "DS", "Cannot clear attachment %d with invalid first layout %s. %s", 2825 attachment, string_VkImageLayout(first_layout), validation_error_map[VALIDATION_ERROR_12200688]); 2826 } 2827 } 2828 return skip; 2829 } 2830 2831 bool ValidateLayouts(core_validation::layer_data *device_data, VkDevice device, const VkRenderPassCreateInfo *pCreateInfo) { 2832 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2833 bool skip = false; 2834 2835 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) { 2836 VkFormat format = pCreateInfo->pAttachments[i].format; 2837 if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 2838 if ((FormatIsColor(format) || FormatHasDepth(format)) && 2839 pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) { 2840 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2841 DRAWSTATE_INVALID_RENDERPASS, "DS", 2842 "Render pass has an attachment with loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout == " 2843 "VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using " 2844 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the " 2845 "render pass."); 2846 } 2847 if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) { 2848 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2849 DRAWSTATE_INVALID_RENDERPASS, "DS", 2850 "Render pass has an attachment with stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout " 2851 "== VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using " 2852 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the " 2853 "render pass."); 2854 } 2855 } 2856 } 2857 2858 // Track when we're observing the first use of an attachment 2859 std::vector<bool> attach_first_use(pCreateInfo->attachmentCount, true); 2860 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) { 2861 const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i]; 2862 2863 // Check input attachments first, so we can detect first-use-as-input for VU #00349 2864 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) { 2865 auto attach_index = subpass.pInputAttachments[j].attachment; 2866 if (attach_index == VK_ATTACHMENT_UNUSED) continue; 2867 2868 switch (subpass.pInputAttachments[j].layout) { 2869 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 2870 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: 2871 // These are ideal. 2872 break; 2873 2874 case VK_IMAGE_LAYOUT_GENERAL: 2875 // May not be optimal. TODO: reconsider this warning based on other constraints. 2876 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2877 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2878 "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL."); 2879 break; 2880 2881 default: 2882 // No other layouts are acceptable 2883 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2884 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2885 "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.", 2886 string_VkImageLayout(subpass.pInputAttachments[j].layout)); 2887 } 2888 2889 VkImageLayout layout = subpass.pInputAttachments[j].layout; 2890 bool found_layout_mismatch = subpass.pDepthStencilAttachment && 2891 subpass.pDepthStencilAttachment->attachment == attach_index && 2892 subpass.pDepthStencilAttachment->layout != layout; 2893 for (uint32_t c = 0; !found_layout_mismatch && c < subpass.colorAttachmentCount; ++c) { 2894 found_layout_mismatch = 2895 (subpass.pColorAttachments[c].attachment == attach_index && subpass.pColorAttachments[c].layout != layout); 2896 } 2897 if (found_layout_mismatch) { 2898 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2899 VALIDATION_ERROR_140006ae, "DS", 2900 "CreateRenderPass: Subpass %u pInputAttachments[%u] (%u) has layout %u, but is also used as a " 2901 "depth/color attachment with a different layout. %s", 2902 i, j, attach_index, layout, validation_error_map[VALIDATION_ERROR_140006ae]); 2903 } 2904 2905 if (attach_first_use[attach_index]) { 2906 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pInputAttachments[j].layout, attach_index, 2907 pCreateInfo->pAttachments[attach_index]); 2908 2909 bool used_as_depth = 2910 (subpass.pDepthStencilAttachment != NULL && subpass.pDepthStencilAttachment->attachment == attach_index); 2911 bool used_as_color = false; 2912 for (uint32_t k = 0; !used_as_depth && !used_as_color && k < subpass.colorAttachmentCount; ++k) { 2913 used_as_color = (subpass.pColorAttachments[k].attachment == attach_index); 2914 } 2915 if (!used_as_depth && !used_as_color && 2916 pCreateInfo->pAttachments[attach_index].loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) { 2917 skip |= log_msg( 2918 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2919 VALIDATION_ERROR_1400069c, "DS", 2920 "CreateRenderPass: attachment %u is first used as an input attachment in subpass %u with loadOp=CLEAR. %s", 2921 attach_index, attach_index, validation_error_map[VALIDATION_ERROR_1400069c]); 2922 } 2923 } 2924 attach_first_use[attach_index] = false; 2925 } 2926 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) { 2927 auto attach_index = subpass.pColorAttachments[j].attachment; 2928 if (attach_index == VK_ATTACHMENT_UNUSED) continue; 2929 2930 // TODO: Need a way to validate shared presentable images here, currently just allowing 2931 // VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR 2932 // as an acceptable layout, but need to make sure shared presentable images ONLY use that layout 2933 switch (subpass.pColorAttachments[j].layout) { 2934 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: 2935 // This is ideal. 2936 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: 2937 // TODO: See note above, just assuming that attachment is shared presentable and allowing this for now. 2938 break; 2939 2940 case VK_IMAGE_LAYOUT_GENERAL: 2941 // May not be optimal; TODO: reconsider this warning based on other constraints? 2942 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2943 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2944 "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL."); 2945 break; 2946 2947 default: 2948 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2949 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2950 "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.", 2951 string_VkImageLayout(subpass.pColorAttachments[j].layout)); 2952 } 2953 2954 if (attach_first_use[attach_index]) { 2955 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pColorAttachments[j].layout, attach_index, 2956 pCreateInfo->pAttachments[attach_index]); 2957 } 2958 attach_first_use[attach_index] = false; 2959 } 2960 2961 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) { 2962 switch (subpass.pDepthStencilAttachment->layout) { 2963 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: 2964 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 2965 // These are ideal. 2966 break; 2967 2968 case VK_IMAGE_LAYOUT_GENERAL: 2969 // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than 2970 // doing a bunch of transitions. 2971 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2972 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2973 "GENERAL layout for depth attachment may not give optimal performance."); 2974 break; 2975 2976 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR: 2977 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR: 2978 if (GetDeviceExtensions(device_data)->vk_khr_maintenance2) { 2979 break; 2980 } else { 2981 // Intentionally fall through to generic error message 2982 } 2983 2984 default: 2985 // No other layouts are acceptable 2986 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2987 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2988 "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, " 2989 "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.", 2990 string_VkImageLayout(subpass.pDepthStencilAttachment->layout)); 2991 } 2992 2993 auto attach_index = subpass.pDepthStencilAttachment->attachment; 2994 if (attach_first_use[attach_index]) { 2995 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pDepthStencilAttachment->layout, attach_index, 2996 pCreateInfo->pAttachments[attach_index]); 2997 } 2998 attach_first_use[attach_index] = false; 2999 } 3000 } 3001 return skip; 3002 } 3003 3004 // For any image objects that overlap mapped memory, verify that their layouts are PREINIT or GENERAL 3005 bool ValidateMapImageLayouts(core_validation::layer_data *device_data, VkDevice device, DEVICE_MEM_INFO const *mem_info, 3006 VkDeviceSize offset, VkDeviceSize end_offset) { 3007 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3008 bool skip = false; 3009 // Iterate over all bound image ranges and verify that for any that overlap the map ranges, the layouts are 3010 // VK_IMAGE_LAYOUT_PREINITIALIZED or VK_IMAGE_LAYOUT_GENERAL 3011 // TODO : This can be optimized if we store ranges based on starting address and early exit when we pass our range 3012 for (auto image_handle : mem_info->bound_images) { 3013 auto img_it = mem_info->bound_ranges.find(image_handle); 3014 if (img_it != mem_info->bound_ranges.end()) { 3015 if (rangesIntersect(device_data, &img_it->second, offset, end_offset)) { 3016 std::vector<VkImageLayout> layouts; 3017 if (FindLayouts(device_data, VkImage(image_handle), layouts)) { 3018 for (auto layout : layouts) { 3019 if (layout != VK_IMAGE_LAYOUT_PREINITIALIZED && layout != VK_IMAGE_LAYOUT_GENERAL) { 3020 skip |= 3021 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 3022 HandleToUint64(mem_info->mem), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 3023 "Mapping an image with layout %s can result in undefined behavior if this memory is used " 3024 "by the device. Only GENERAL or PREINITIALIZED should be used.", 3025 string_VkImageLayout(layout)); 3026 } 3027 } 3028 } 3029 } 3030 } 3031 } 3032 return skip; 3033 } 3034 3035 // Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict 3036 // is true, verify that (actual & desired) flags == desired 3037 static bool validate_usage_flags(layer_data *device_data, VkFlags actual, VkFlags desired, VkBool32 strict, uint64_t obj_handle, 3038 VulkanObjectType obj_type, int32_t const msgCode, char const *func_name, char const *usage_str) { 3039 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3040 3041 bool correct_usage = false; 3042 bool skip = false; 3043 const char *type_str = object_string[obj_type]; 3044 if (strict) { 3045 correct_usage = ((actual & desired) == desired); 3046 } else { 3047 correct_usage = ((actual & desired) != 0); 3048 } 3049 if (!correct_usage) { 3050 if (msgCode == -1) { 3051 // TODO: Fix callers with msgCode == -1 to use correct validation checks. 3052 skip = 3053 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, __LINE__, 3054 MEMTRACK_INVALID_USAGE_FLAG, "MEM", 3055 "Invalid usage flag for %s 0x%" PRIx64 " used by %s. In this case, %s should have %s set during creation.", 3056 type_str, obj_handle, func_name, type_str, usage_str); 3057 } else { 3058 const char *valid_usage = (msgCode == -1) ? "" : validation_error_map[msgCode]; 3059 skip = log_msg( 3060 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, __LINE__, msgCode, "MEM", 3061 "Invalid usage flag for %s 0x%" PRIx64 " used by %s. In this case, %s should have %s set during creation. %s", 3062 type_str, obj_handle, func_name, type_str, usage_str, valid_usage); 3063 } 3064 } 3065 return skip; 3066 } 3067 3068 // Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above 3069 // where an error will be flagged if usage is not correct 3070 bool ValidateImageUsageFlags(layer_data *device_data, IMAGE_STATE const *image_state, VkFlags desired, bool strict, 3071 int32_t const msgCode, char const *func_name, char const *usage_string) { 3072 return validate_usage_flags(device_data, image_state->createInfo.usage, desired, strict, HandleToUint64(image_state->image), 3073 kVulkanObjectTypeImage, msgCode, func_name, usage_string); 3074 } 3075 3076 // Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above 3077 // where an error will be flagged if usage is not correct 3078 bool ValidateBufferUsageFlags(layer_data *device_data, BUFFER_STATE const *buffer_state, VkFlags desired, bool strict, 3079 int32_t const msgCode, char const *func_name, char const *usage_string) { 3080 return validate_usage_flags(device_data, buffer_state->createInfo.usage, desired, strict, HandleToUint64(buffer_state->buffer), 3081 kVulkanObjectTypeBuffer, msgCode, func_name, usage_string); 3082 } 3083 3084 bool PreCallValidateCreateBuffer(layer_data *device_data, const VkBufferCreateInfo *pCreateInfo) { 3085 bool skip = false; 3086 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3087 3088 // TODO: Add check for VALIDATION_ERROR_1ec0071e (sparse address space accounting) 3089 3090 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures(device_data)->sparseBinding)) { 3091 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3092 VALIDATION_ERROR_01400726, "DS", 3093 "vkCreateBuffer(): the sparseBinding device feature is disabled: Buffers cannot be created with the " 3094 "VK_BUFFER_CREATE_SPARSE_BINDING_BIT set. %s", 3095 validation_error_map[VALIDATION_ERROR_01400726]); 3096 } 3097 3098 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyBuffer)) { 3099 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3100 VALIDATION_ERROR_01400728, "DS", 3101 "vkCreateBuffer(): the sparseResidencyBuffer device feature is disabled: Buffers cannot be created with " 3102 "the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT set. %s", 3103 validation_error_map[VALIDATION_ERROR_01400728]); 3104 } 3105 3106 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyAliased)) { 3107 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3108 VALIDATION_ERROR_0140072a, "DS", 3109 "vkCreateBuffer(): the sparseResidencyAliased device feature is disabled: Buffers cannot be created with " 3110 "the VK_BUFFER_CREATE_SPARSE_ALIASED_BIT set. %s", 3111 validation_error_map[VALIDATION_ERROR_0140072a]); 3112 } 3113 return skip; 3114 } 3115 3116 void PostCallRecordCreateBuffer(layer_data *device_data, const VkBufferCreateInfo *pCreateInfo, VkBuffer *pBuffer) { 3117 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid 3118 GetBufferMap(device_data) 3119 ->insert(std::make_pair(*pBuffer, std::unique_ptr<BUFFER_STATE>(new BUFFER_STATE(*pBuffer, pCreateInfo)))); 3120 } 3121 3122 bool PreCallValidateCreateBufferView(layer_data *device_data, const VkBufferViewCreateInfo *pCreateInfo) { 3123 bool skip = false; 3124 BUFFER_STATE *buffer_state = GetBufferState(device_data, pCreateInfo->buffer); 3125 // If this isn't a sparse buffer, it needs to have memory backing it at CreateBufferView time 3126 if (buffer_state) { 3127 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCreateBufferView()", VALIDATION_ERROR_01a0074e); 3128 // In order to create a valid buffer view, the buffer must have been created with at least one of the following flags: 3129 // UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT 3130 skip |= ValidateBufferUsageFlags( 3131 device_data, buffer_state, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, false, 3132 VALIDATION_ERROR_01a00748, "vkCreateBufferView()", "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT"); 3133 } 3134 return skip; 3135 } 3136 3137 void PostCallRecordCreateBufferView(layer_data *device_data, const VkBufferViewCreateInfo *pCreateInfo, VkBufferView *pView) { 3138 (*GetBufferViewMap(device_data))[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo)); 3139 } 3140 3141 // For the given format verify that the aspect masks make sense 3142 bool ValidateImageAspectMask(layer_data *device_data, VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, 3143 const char *func_name) { 3144 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3145 bool skip = false; 3146 if (FormatIsColor(format)) { 3147 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) { 3148 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3149 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3150 "%s: Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set. %s", func_name, 3151 validation_error_map[VALIDATION_ERROR_0a400c01]); 3152 } else if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != aspect_mask) { 3153 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3154 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3155 "%s: Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set. %s", func_name, 3156 validation_error_map[VALIDATION_ERROR_0a400c01]); 3157 } 3158 } else if (FormatIsDepthAndStencil(format)) { 3159 if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) { 3160 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3161 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3162 "%s: Depth/stencil image formats must have at least one of VK_IMAGE_ASPECT_DEPTH_BIT and " 3163 "VK_IMAGE_ASPECT_STENCIL_BIT set. %s", 3164 func_name, validation_error_map[VALIDATION_ERROR_0a400c01]); 3165 } else if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspect_mask) { 3166 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3167 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3168 "%s: Combination depth/stencil image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and " 3169 "VK_IMAGE_ASPECT_STENCIL_BIT set. %s", 3170 func_name, validation_error_map[VALIDATION_ERROR_0a400c01]); 3171 } 3172 } else if (FormatIsDepthOnly(format)) { 3173 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) { 3174 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3175 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3176 "%s: Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set. %s", func_name, 3177 validation_error_map[VALIDATION_ERROR_0a400c01]); 3178 } else if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != aspect_mask) { 3179 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3180 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3181 "%s: Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set. %s", func_name, 3182 validation_error_map[VALIDATION_ERROR_0a400c01]); 3183 } 3184 } else if (FormatIsStencilOnly(format)) { 3185 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT) { 3186 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3187 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3188 "%s: Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set. %s", func_name, 3189 validation_error_map[VALIDATION_ERROR_0a400c01]); 3190 } else if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != aspect_mask) { 3191 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3192 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3193 "%s: Stencil-only image formats can have only the VK_IMAGE_ASPECT_STENCIL_BIT set. %s", func_name, 3194 validation_error_map[VALIDATION_ERROR_0a400c01]); 3195 } 3196 } 3197 return skip; 3198 } 3199 3200 struct SubresourceRangeErrorCodes { 3201 UNIQUE_VALIDATION_ERROR_CODE base_mip_err, mip_count_err, base_layer_err, layer_count_err; 3202 }; 3203 3204 bool ValidateImageSubresourceRange(const layer_data *device_data, const uint32_t image_mip_count, const uint32_t image_layer_count, 3205 const VkImageSubresourceRange &subresourceRange, const char *cmd_name, const char *param_name, 3206 const char *image_layer_count_var_name, const uint64_t image_handle, 3207 SubresourceRangeErrorCodes errorCodes) { 3208 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3209 bool skip = false; 3210 3211 // Validate mip levels 3212 if (subresourceRange.baseMipLevel >= image_mip_count) { 3213 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, __LINE__, 3214 errorCodes.base_mip_err, "IMAGE", 3215 "%s: %s.baseMipLevel (= %" PRIu32 3216 ") is greater or equal to the mip level count of the image (i.e. greater or equal to %" PRIu32 "). %s", 3217 cmd_name, param_name, subresourceRange.baseMipLevel, image_mip_count, 3218 validation_error_map[errorCodes.base_mip_err]); 3219 } 3220 3221 if (subresourceRange.levelCount != VK_REMAINING_MIP_LEVELS) { 3222 if (subresourceRange.levelCount == 0) { 3223 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, 3224 __LINE__, errorCodes.mip_count_err, "IMAGE", "%s: %s.levelCount is 0. %s", cmd_name, param_name, 3225 validation_error_map[errorCodes.mip_count_err]); 3226 } else { 3227 const uint64_t necessary_mip_count = uint64_t{subresourceRange.baseMipLevel} + uint64_t{subresourceRange.levelCount}; 3228 3229 if (necessary_mip_count > image_mip_count) { 3230 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, 3231 __LINE__, errorCodes.mip_count_err, "IMAGE", 3232 "%s: %s.baseMipLevel + .levelCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64 3233 ") is greater than the mip level count of the image (i.e. greater than %" PRIu32 "). %s", 3234 cmd_name, param_name, subresourceRange.baseMipLevel, subresourceRange.levelCount, 3235 necessary_mip_count, image_mip_count, validation_error_map[errorCodes.mip_count_err]); 3236 } 3237 } 3238 } 3239 3240 // Validate array layers 3241 if (subresourceRange.baseArrayLayer >= image_layer_count) { 3242 skip |= 3243 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, __LINE__, 3244 errorCodes.base_layer_err, "IMAGE", 3245 "%s: %s.baseArrayLayer (= %" PRIu32 3246 ") is greater or equal to the %s of the image when it was created (i.e. greater or equal to %" PRIu32 "). %s", 3247 cmd_name, param_name, subresourceRange.baseArrayLayer, image_layer_count_var_name, image_layer_count, 3248 validation_error_map[errorCodes.base_layer_err]); 3249 } 3250 3251 if (subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS) { 3252 if (subresourceRange.layerCount == 0) { 3253 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, 3254 __LINE__, errorCodes.layer_count_err, "IMAGE", "%s: %s.layerCount is 0. %s", cmd_name, param_name, 3255 validation_error_map[errorCodes.layer_count_err]); 3256 } else { 3257 const uint64_t necessary_layer_count = 3258 uint64_t{subresourceRange.baseArrayLayer} + uint64_t{subresourceRange.layerCount}; 3259 3260 if (necessary_layer_count > image_layer_count) { 3261 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle, 3262 __LINE__, errorCodes.layer_count_err, "IMAGE", 3263 "%s: %s.baseArrayLayer + .layerCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64 3264 ") is greater than the %s of the image when it was created (i.e. greater than %" PRIu32 "). %s", 3265 cmd_name, param_name, subresourceRange.baseArrayLayer, subresourceRange.layerCount, 3266 necessary_layer_count, image_layer_count_var_name, image_layer_count, 3267 validation_error_map[errorCodes.layer_count_err]); 3268 } 3269 } 3270 } 3271 3272 return skip; 3273 } 3274 3275 bool ValidateCreateImageViewSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state, 3276 bool is_imageview_2d_type, const VkImageSubresourceRange &subresourceRange) { 3277 bool is_khr_maintenance1 = GetDeviceExtensions(device_data)->vk_khr_maintenance1; 3278 bool is_image_slicable = image_state->createInfo.imageType == VK_IMAGE_TYPE_3D && 3279 (image_state->createInfo.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR); 3280 bool is_3D_to_2D_map = is_khr_maintenance1 && is_image_slicable && is_imageview_2d_type; 3281 3282 const auto image_layer_count = is_3D_to_2D_map ? image_state->createInfo.extent.depth : image_state->createInfo.arrayLayers; 3283 const auto image_layer_count_var_name = is_3D_to_2D_map ? "extent.depth" : "arrayLayers"; 3284 3285 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {}; 3286 subresourceRangeErrorCodes.base_mip_err = VALIDATION_ERROR_0ac00b8c; 3287 subresourceRangeErrorCodes.mip_count_err = VALIDATION_ERROR_0ac00b8e; 3288 subresourceRangeErrorCodes.base_layer_err = 3289 is_khr_maintenance1 ? (is_3D_to_2D_map ? VALIDATION_ERROR_0ac00b98 : VALIDATION_ERROR_0ac00b94) : VALIDATION_ERROR_0ac00b90; 3290 subresourceRangeErrorCodes.layer_count_err = 3291 is_khr_maintenance1 ? (is_3D_to_2D_map ? VALIDATION_ERROR_0ac00b9a : VALIDATION_ERROR_0ac00b96) : VALIDATION_ERROR_0ac00b92; 3292 3293 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_layer_count, subresourceRange, 3294 "vkCreateImageView", "pCreateInfo->subresourceRange", image_layer_count_var_name, 3295 HandleToUint64(image_state->image), subresourceRangeErrorCodes); 3296 } 3297 3298 bool ValidateCmdClearColorSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state, 3299 const VkImageSubresourceRange &subresourceRange, const char *param_name) { 3300 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {}; 3301 subresourceRangeErrorCodes.base_mip_err = VALIDATION_ERROR_18800b7c; 3302 subresourceRangeErrorCodes.mip_count_err = VALIDATION_ERROR_18800b7e; 3303 subresourceRangeErrorCodes.base_layer_err = VALIDATION_ERROR_18800b80; 3304 subresourceRangeErrorCodes.layer_count_err = VALIDATION_ERROR_18800b82; 3305 3306 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, 3307 subresourceRange, "vkCmdClearColorImage", param_name, "arrayLayers", 3308 HandleToUint64(image_state->image), subresourceRangeErrorCodes); 3309 } 3310 3311 bool ValidateCmdClearDepthSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state, 3312 const VkImageSubresourceRange &subresourceRange, const char *param_name) { 3313 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {}; 3314 subresourceRangeErrorCodes.base_mip_err = VALIDATION_ERROR_18a00b84; 3315 subresourceRangeErrorCodes.mip_count_err = VALIDATION_ERROR_18a00b86; 3316 subresourceRangeErrorCodes.base_layer_err = VALIDATION_ERROR_18a00b88; 3317 subresourceRangeErrorCodes.layer_count_err = VALIDATION_ERROR_18a00b8a; 3318 3319 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, 3320 subresourceRange, "vkCmdClearDepthStencilImage", param_name, "arrayLayers", 3321 HandleToUint64(image_state->image), subresourceRangeErrorCodes); 3322 } 3323 3324 bool ValidateImageBarrierSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state, 3325 const VkImageSubresourceRange &subresourceRange, const char *cmd_name, 3326 const char *param_name) { 3327 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {}; 3328 subresourceRangeErrorCodes.base_mip_err = VALIDATION_ERROR_0a000b9c; 3329 subresourceRangeErrorCodes.mip_count_err = VALIDATION_ERROR_0a000b9e; 3330 subresourceRangeErrorCodes.base_layer_err = VALIDATION_ERROR_0a000ba0; 3331 subresourceRangeErrorCodes.layer_count_err = VALIDATION_ERROR_0a000ba2; 3332 3333 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, 3334 subresourceRange, cmd_name, param_name, "arrayLayers", HandleToUint64(image_state->image), 3335 subresourceRangeErrorCodes); 3336 } 3337 3338 bool PreCallValidateCreateImageView(layer_data *device_data, const VkImageViewCreateInfo *create_info) { 3339 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3340 bool skip = false; 3341 IMAGE_STATE *image_state = GetImageState(device_data, create_info->image); 3342 if (image_state) { 3343 skip |= ValidateImageUsageFlags( 3344 device_data, image_state, 3345 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | 3346 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 3347 false, -1, "vkCreateImageView()", 3348 "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT]_BIT"); 3349 // If this isn't a sparse image, it needs to have memory backing it at CreateImageView time 3350 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCreateImageView()", VALIDATION_ERROR_0ac007f8); 3351 // Checks imported from image layer 3352 skip |= ValidateCreateImageViewSubresourceRange( 3353 device_data, image_state, 3354 create_info->viewType == VK_IMAGE_VIEW_TYPE_2D || create_info->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY, 3355 create_info->subresourceRange); 3356 3357 VkImageCreateFlags image_flags = image_state->createInfo.flags; 3358 VkFormat image_format = image_state->createInfo.format; 3359 VkImageUsageFlags image_usage = image_state->createInfo.usage; 3360 VkImageTiling image_tiling = image_state->createInfo.tiling; 3361 VkFormat view_format = create_info->format; 3362 VkImageAspectFlags aspect_mask = create_info->subresourceRange.aspectMask; 3363 VkImageType image_type = image_state->createInfo.imageType; 3364 VkImageViewType view_type = create_info->viewType; 3365 3366 // Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state 3367 if (image_flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) { 3368 if ((!GetDeviceExtensions(device_data)->vk_khr_maintenance2 || 3369 !(image_flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR))) { 3370 // Format MUST be compatible (in the same format compatibility class) as the format the image was created with 3371 if (FormatCompatibilityClass(image_format) != FormatCompatibilityClass(view_format)) { 3372 std::stringstream ss; 3373 ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format) 3374 << " is not in the same format compatibility class as image (" << HandleToUint64(create_info->image) 3375 << ") format " << string_VkFormat(image_format) 3376 << ". Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT " 3377 << "can support ImageViews with differing formats but they must be in the same compatibility class."; 3378 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3379 __LINE__, VALIDATION_ERROR_0ac007f4, "IMAGE", "%s %s", ss.str().c_str(), 3380 validation_error_map[VALIDATION_ERROR_0ac007f4]); 3381 } 3382 } 3383 } else { 3384 // Format MUST be IDENTICAL to the format the image was created with 3385 if (image_format != view_format) { 3386 std::stringstream ss; 3387 ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from image " 3388 << HandleToUint64(create_info->image) << " format " << string_VkFormat(image_format) 3389 << ". Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation."; 3390 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3391 VALIDATION_ERROR_0ac007f6, "IMAGE", "%s %s", ss.str().c_str(), 3392 validation_error_map[VALIDATION_ERROR_0ac007f6]); 3393 } 3394 } 3395 3396 // Validate correct image aspect bits for desired formats and format consistency 3397 skip |= ValidateImageAspectMask(device_data, image_state->image, image_format, aspect_mask, "vkCreateImageView()"); 3398 3399 switch (image_type) { 3400 case VK_IMAGE_TYPE_1D: 3401 if (view_type != VK_IMAGE_VIEW_TYPE_1D && view_type != VK_IMAGE_VIEW_TYPE_1D_ARRAY) { 3402 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3403 __LINE__, VALIDATION_ERROR_0ac007fa, "IMAGE", 3404 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3405 string_VkImageViewType(view_type), string_VkImageType(image_type), 3406 validation_error_map[VALIDATION_ERROR_0ac007fa]); 3407 } 3408 break; 3409 case VK_IMAGE_TYPE_2D: 3410 if (view_type != VK_IMAGE_VIEW_TYPE_2D && view_type != VK_IMAGE_VIEW_TYPE_2D_ARRAY) { 3411 if ((view_type == VK_IMAGE_VIEW_TYPE_CUBE || view_type == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) && 3412 !(image_flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)) { 3413 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3414 __LINE__, VALIDATION_ERROR_0ac007d6, "IMAGE", 3415 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3416 string_VkImageViewType(view_type), string_VkImageType(image_type), 3417 validation_error_map[VALIDATION_ERROR_0ac007d6]); 3418 } else if (view_type != VK_IMAGE_VIEW_TYPE_CUBE && view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { 3419 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3420 __LINE__, VALIDATION_ERROR_0ac007fa, "IMAGE", 3421 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3422 string_VkImageViewType(view_type), string_VkImageType(image_type), 3423 validation_error_map[VALIDATION_ERROR_0ac007fa]); 3424 } 3425 } 3426 break; 3427 case VK_IMAGE_TYPE_3D: 3428 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 3429 if (view_type != VK_IMAGE_VIEW_TYPE_3D) { 3430 if ((view_type == VK_IMAGE_VIEW_TYPE_2D || view_type == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) { 3431 if (!(image_flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) { 3432 skip |= log_msg( 3433 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3434 __LINE__, VALIDATION_ERROR_0ac007da, "IMAGE", 3435 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3436 string_VkImageViewType(view_type), string_VkImageType(image_type), 3437 validation_error_map[VALIDATION_ERROR_0ac007da]); 3438 } else if ((image_flags & (VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | 3439 VK_IMAGE_CREATE_SPARSE_ALIASED_BIT))) { 3440 skip |= 3441 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3442 __LINE__, VALIDATION_ERROR_0ac007fa, "IMAGE", 3443 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s " 3444 "when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or " 3445 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled. %s", 3446 string_VkImageViewType(view_type), string_VkImageType(image_type), 3447 validation_error_map[VALIDATION_ERROR_0ac007fa]); 3448 } 3449 } else { 3450 skip |= 3451 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3452 __LINE__, VALIDATION_ERROR_0ac007fa, "IMAGE", 3453 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3454 string_VkImageViewType(view_type), string_VkImageType(image_type), 3455 validation_error_map[VALIDATION_ERROR_0ac007fa]); 3456 } 3457 } 3458 } else { 3459 if (view_type != VK_IMAGE_VIEW_TYPE_3D) { 3460 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3461 __LINE__, VALIDATION_ERROR_0ac007fa, "IMAGE", 3462 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s. %s", 3463 string_VkImageViewType(view_type), string_VkImageType(image_type), 3464 validation_error_map[VALIDATION_ERROR_0ac007fa]); 3465 } 3466 } 3467 break; 3468 default: 3469 break; 3470 } 3471 3472 VkFormatProperties format_properties = GetFormatProperties(device_data, view_format); 3473 bool check_tiling_features = false; 3474 VkFormatFeatureFlags tiling_features = 0; 3475 UNIQUE_VALIDATION_ERROR_CODE linear_error_codes[] = { 3476 VALIDATION_ERROR_0ac007dc, VALIDATION_ERROR_0ac007e0, VALIDATION_ERROR_0ac007e2, 3477 VALIDATION_ERROR_0ac007e4, VALIDATION_ERROR_0ac007e6, 3478 }; 3479 UNIQUE_VALIDATION_ERROR_CODE optimal_error_codes[] = { 3480 VALIDATION_ERROR_0ac007e8, VALIDATION_ERROR_0ac007ea, VALIDATION_ERROR_0ac007ec, 3481 VALIDATION_ERROR_0ac007ee, VALIDATION_ERROR_0ac007f0, 3482 }; 3483 UNIQUE_VALIDATION_ERROR_CODE *error_codes = nullptr; 3484 if (image_tiling == VK_IMAGE_TILING_LINEAR) { 3485 tiling_features = format_properties.linearTilingFeatures; 3486 error_codes = linear_error_codes; 3487 check_tiling_features = true; 3488 } else if (image_tiling == VK_IMAGE_TILING_OPTIMAL) { 3489 tiling_features = format_properties.optimalTilingFeatures; 3490 error_codes = optimal_error_codes; 3491 check_tiling_features = true; 3492 } 3493 3494 if (check_tiling_features) { 3495 if (tiling_features == 0) { 3496 skip |= 3497 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3498 error_codes[0], "IMAGE", 3499 "vkCreateImageView() pCreateInfo->format %s cannot be used with an image having the %s flag set. %s", 3500 string_VkFormat(view_format), string_VkImageTiling(image_tiling), validation_error_map[error_codes[0]]); 3501 } else if ((image_usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(tiling_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) { 3502 skip |= 3503 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3504 error_codes[1], "IMAGE", 3505 "vkCreateImageView() pCreateInfo->format %s cannot be used with an image having the %s and " 3506 "VK_IMAGE_USAGE_SAMPLED_BIT flags set. %s", 3507 string_VkFormat(view_format), string_VkImageTiling(image_tiling), validation_error_map[error_codes[1]]); 3508 } else if ((image_usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(tiling_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) { 3509 skip |= 3510 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3511 error_codes[2], "IMAGE", 3512 "vkCreateImageView() pCreateInfo->format %s cannot be used with an image having the %s and " 3513 "VK_IMAGE_USAGE_STORAGE_BIT flags set. %s", 3514 string_VkFormat(view_format), string_VkImageTiling(image_tiling), validation_error_map[error_codes[2]]); 3515 } else if ((image_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && 3516 !(tiling_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) { 3517 skip |= 3518 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3519 error_codes[3], "IMAGE", 3520 "vkCreateImageView() pCreateInfo->format %s cannot be used with an image having the %s and " 3521 "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT flags set. %s", 3522 string_VkFormat(view_format), string_VkImageTiling(image_tiling), validation_error_map[error_codes[3]]); 3523 } else if ((image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && 3524 !(tiling_features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) { 3525 skip |= 3526 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3527 error_codes[4], "IMAGE", 3528 "vkCreateImageView() pCreateInfo->format %s cannot be used with an image having the %s and " 3529 "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT flags set. %s", 3530 string_VkFormat(view_format), string_VkImageTiling(image_tiling), validation_error_map[error_codes[4]]); 3531 } 3532 } 3533 } 3534 return skip; 3535 } 3536 3537 void PostCallRecordCreateImageView(layer_data *device_data, const VkImageViewCreateInfo *create_info, VkImageView view) { 3538 auto image_view_map = GetImageViewMap(device_data); 3539 (*image_view_map)[view] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(view, create_info)); 3540 3541 auto image_state = GetImageState(device_data, create_info->image); 3542 auto &sub_res_range = (*image_view_map)[view].get()->create_info.subresourceRange; 3543 sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels); 3544 sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers); 3545 } 3546 3547 bool PreCallValidateCmdCopyBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 3548 BUFFER_STATE *dst_buffer_state) { 3549 bool skip = false; 3550 skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c000ee); 3551 skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c000f2); 3552 // Validate that SRC & DST buffers have correct usage flags set 3553 skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, 3554 VALIDATION_ERROR_18c000ec, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 3555 skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, 3556 VALIDATION_ERROR_18c000f0, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 3557 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyBuffer()", 3558 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_18c02415); 3559 skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()"); 3560 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c00017); 3561 return skip; 3562 } 3563 3564 void PreCallRecordCmdCopyBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 3565 BUFFER_STATE *dst_buffer_state) { 3566 // Update bindings between buffers and cmd buffer 3567 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state); 3568 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state); 3569 3570 std::function<bool()> function = [=]() { 3571 return ValidateBufferMemoryIsValid(device_data, src_buffer_state, "vkCmdCopyBuffer()"); 3572 }; 3573 cb_node->queue_submit_functions.push_back(function); 3574 function = [=]() { 3575 SetBufferMemoryValid(device_data, dst_buffer_state, true); 3576 return false; 3577 }; 3578 cb_node->queue_submit_functions.push_back(function); 3579 } 3580 3581 static bool validateIdleBuffer(layer_data *device_data, VkBuffer buffer) { 3582 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3583 bool skip = false; 3584 auto buffer_state = GetBufferState(device_data, buffer); 3585 if (!buffer_state) { 3586 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, HandleToUint64(buffer), 3587 __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS", "Cannot free buffer 0x%" PRIx64 " that has not been allocated.", 3588 HandleToUint64(buffer)); 3589 } else { 3590 if (buffer_state->in_use.load()) { 3591 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, 3592 HandleToUint64(buffer), __LINE__, VALIDATION_ERROR_23c00734, "DS", 3593 "Cannot free buffer 0x%" PRIx64 " that is in use by a command buffer. %s", HandleToUint64(buffer), 3594 validation_error_map[VALIDATION_ERROR_23c00734]); 3595 } 3596 } 3597 return skip; 3598 } 3599 3600 bool PreCallValidateDestroyImageView(layer_data *device_data, VkImageView image_view, IMAGE_VIEW_STATE **image_view_state, 3601 VK_OBJECT *obj_struct) { 3602 *image_view_state = GetImageViewState(device_data, image_view); 3603 *obj_struct = {HandleToUint64(image_view), kVulkanObjectTypeImageView}; 3604 if (GetDisables(device_data)->destroy_image_view) return false; 3605 bool skip = false; 3606 if (*image_view_state) { 3607 skip |= 3608 ValidateObjectNotInUse(device_data, *image_view_state, *obj_struct, "vkDestroyImageView", VALIDATION_ERROR_25400804); 3609 } 3610 return skip; 3611 } 3612 3613 void PostCallRecordDestroyImageView(layer_data *device_data, VkImageView image_view, IMAGE_VIEW_STATE *image_view_state, 3614 VK_OBJECT obj_struct) { 3615 // Any bound cmd buffers are now invalid 3616 invalidateCommandBuffers(device_data, image_view_state->cb_bindings, obj_struct); 3617 (*GetImageViewMap(device_data)).erase(image_view); 3618 } 3619 3620 bool PreCallValidateDestroyBuffer(layer_data *device_data, VkBuffer buffer, BUFFER_STATE **buffer_state, VK_OBJECT *obj_struct) { 3621 *buffer_state = GetBufferState(device_data, buffer); 3622 *obj_struct = {HandleToUint64(buffer), kVulkanObjectTypeBuffer}; 3623 if (GetDisables(device_data)->destroy_buffer) return false; 3624 bool skip = false; 3625 if (*buffer_state) { 3626 skip |= validateIdleBuffer(device_data, buffer); 3627 } 3628 return skip; 3629 } 3630 3631 void PostCallRecordDestroyBuffer(layer_data *device_data, VkBuffer buffer, BUFFER_STATE *buffer_state, VK_OBJECT obj_struct) { 3632 invalidateCommandBuffers(device_data, buffer_state->cb_bindings, obj_struct); 3633 for (auto mem_binding : buffer_state->GetBoundMemory()) { 3634 auto mem_info = GetMemObjInfo(device_data, mem_binding); 3635 if (mem_info) { 3636 core_validation::RemoveBufferMemoryRange(HandleToUint64(buffer), mem_info); 3637 } 3638 } 3639 ClearMemoryObjectBindings(device_data, HandleToUint64(buffer), kVulkanObjectTypeBuffer); 3640 GetBufferMap(device_data)->erase(buffer_state->buffer); 3641 } 3642 3643 bool PreCallValidateDestroyBufferView(layer_data *device_data, VkBufferView buffer_view, BUFFER_VIEW_STATE **buffer_view_state, 3644 VK_OBJECT *obj_struct) { 3645 *buffer_view_state = GetBufferViewState(device_data, buffer_view); 3646 *obj_struct = {HandleToUint64(buffer_view), kVulkanObjectTypeBufferView}; 3647 if (GetDisables(device_data)->destroy_buffer_view) return false; 3648 bool skip = false; 3649 if (*buffer_view_state) { 3650 skip |= 3651 ValidateObjectNotInUse(device_data, *buffer_view_state, *obj_struct, "vkDestroyBufferView", VALIDATION_ERROR_23e00750); 3652 } 3653 return skip; 3654 } 3655 3656 void PostCallRecordDestroyBufferView(layer_data *device_data, VkBufferView buffer_view, BUFFER_VIEW_STATE *buffer_view_state, 3657 VK_OBJECT obj_struct) { 3658 // Any bound cmd buffers are now invalid 3659 invalidateCommandBuffers(device_data, buffer_view_state->cb_bindings, obj_struct); 3660 GetBufferViewMap(device_data)->erase(buffer_view); 3661 } 3662 3663 bool PreCallValidateCmdFillBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *buffer_state) { 3664 bool skip = false; 3665 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdFillBuffer()", VALIDATION_ERROR_1b40003e); 3666 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdFillBuffer()", 3667 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_1b402415); 3668 skip |= ValidateCmd(device_data, cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()"); 3669 // Validate that DST buffer has correct usage flags set 3670 skip |= ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_1b40003a, 3671 "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 3672 skip |= insideRenderPass(device_data, cb_node, "vkCmdFillBuffer()", VALIDATION_ERROR_1b400017); 3673 return skip; 3674 } 3675 3676 void PreCallRecordCmdFillBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *buffer_state) { 3677 std::function<bool()> function = [=]() { 3678 SetBufferMemoryValid(device_data, buffer_state, true); 3679 return false; 3680 }; 3681 cb_node->queue_submit_functions.push_back(function); 3682 // Update bindings between buffer and cmd buffer 3683 AddCommandBufferBindingBuffer(device_data, cb_node, buffer_state); 3684 } 3685 3686 bool ValidateBufferImageCopyData(const debug_report_data *report_data, uint32_t regionCount, const VkBufferImageCopy *pRegions, 3687 IMAGE_STATE *image_state, const char *function) { 3688 bool skip = false; 3689 3690 for (uint32_t i = 0; i < regionCount; i++) { 3691 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 3692 if ((pRegions[i].imageOffset.y != 0) || (pRegions[i].imageExtent.height != 1)) { 3693 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3694 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160018e, "IMAGE", 3695 "%s(): pRegion[%d] imageOffset.y is %d and imageExtent.height is %d. For 1D images these must be 0 " 3696 "and 1, respectively. %s", 3697 function, i, pRegions[i].imageOffset.y, pRegions[i].imageExtent.height, 3698 validation_error_map[VALIDATION_ERROR_0160018e]); 3699 } 3700 } 3701 3702 if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (image_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 3703 if ((pRegions[i].imageOffset.z != 0) || (pRegions[i].imageExtent.depth != 1)) { 3704 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3705 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600192, "IMAGE", 3706 "%s(): pRegion[%d] imageOffset.z is %d and imageExtent.depth is %d. For 1D and 2D images these " 3707 "must be 0 and 1, respectively. %s", 3708 function, i, pRegions[i].imageOffset.z, pRegions[i].imageExtent.depth, 3709 validation_error_map[VALIDATION_ERROR_01600192]); 3710 } 3711 } 3712 3713 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 3714 if ((0 != pRegions[i].imageSubresource.baseArrayLayer) || (1 != pRegions[i].imageSubresource.layerCount)) { 3715 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3716 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001aa, "IMAGE", 3717 "%s(): pRegion[%d] imageSubresource.baseArrayLayer is %d and imageSubresource.layerCount is %d. " 3718 "For 3D images these must be 0 and 1, respectively. %s", 3719 function, i, pRegions[i].imageSubresource.baseArrayLayer, pRegions[i].imageSubresource.layerCount, 3720 validation_error_map[VALIDATION_ERROR_016001aa]); 3721 } 3722 } 3723 3724 // If the the calling command's VkImage parameter's format is not a depth/stencil format, 3725 // then bufferOffset must be a multiple of the calling command's VkImage parameter's texel size 3726 auto texel_size = FormatSize(image_state->createInfo.format); 3727 if (!FormatIsDepthAndStencil(image_state->createInfo.format) && SafeModulo(pRegions[i].bufferOffset, texel_size) != 0) { 3728 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3729 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600182, "IMAGE", 3730 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 3731 " must be a multiple of this format's texel size (" PRINTF_SIZE_T_SPECIFIER "). %s", 3732 function, i, pRegions[i].bufferOffset, texel_size, validation_error_map[VALIDATION_ERROR_01600182]); 3733 } 3734 3735 // BufferOffset must be a multiple of 4 3736 if (SafeModulo(pRegions[i].bufferOffset, 4) != 0) { 3737 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3738 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600184, "IMAGE", 3739 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 " must be a multiple of 4. %s", function, i, 3740 pRegions[i].bufferOffset, validation_error_map[VALIDATION_ERROR_01600184]); 3741 } 3742 3743 // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent 3744 if ((pRegions[i].bufferRowLength != 0) && (pRegions[i].bufferRowLength < pRegions[i].imageExtent.width)) { 3745 skip |= log_msg( 3746 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3747 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600186, "IMAGE", 3748 "%s(): pRegion[%d] bufferRowLength (%d) must be zero or greater-than-or-equal-to imageExtent.width (%d). %s", 3749 function, i, pRegions[i].bufferRowLength, pRegions[i].imageExtent.width, 3750 validation_error_map[VALIDATION_ERROR_01600186]); 3751 } 3752 3753 // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent 3754 if ((pRegions[i].bufferImageHeight != 0) && (pRegions[i].bufferImageHeight < pRegions[i].imageExtent.height)) { 3755 skip |= log_msg( 3756 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3757 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600188, "IMAGE", 3758 "%s(): pRegion[%d] bufferImageHeight (%d) must be zero or greater-than-or-equal-to imageExtent.height (%d). %s", 3759 function, i, pRegions[i].bufferImageHeight, pRegions[i].imageExtent.height, 3760 validation_error_map[VALIDATION_ERROR_01600188]); 3761 } 3762 3763 // subresource aspectMask must have exactly 1 bit set 3764 const int num_bits = sizeof(VkFlags) * CHAR_BIT; 3765 std::bitset<num_bits> aspect_mask_bits(pRegions[i].imageSubresource.aspectMask); 3766 if (aspect_mask_bits.count() != 1) { 3767 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3768 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a8, "IMAGE", 3769 "%s: aspectMasks for imageSubresource in each region must have only a single bit set. %s", function, 3770 validation_error_map[VALIDATION_ERROR_016001a8]); 3771 } 3772 3773 // image subresource aspect bit must match format 3774 if (!VerifyAspectsPresent(pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format)) { 3775 skip |= log_msg( 3776 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3777 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a6, "IMAGE", 3778 "%s(): pRegion[%d] subresource aspectMask 0x%x specifies aspects that are not present in image format 0x%x. %s", 3779 function, i, pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format, 3780 validation_error_map[VALIDATION_ERROR_016001a6]); 3781 } 3782 3783 // Checks that apply only to compressed images 3784 // TODO: there is a comment in ValidateCopyBufferImageTransferGranularityRequirements() in core_validation.cpp that 3785 // reserves a place for these compressed image checks. This block of code could move there once the image 3786 // stuff is moved into core validation. 3787 if (FormatIsCompressed(image_state->createInfo.format)) { 3788 auto block_size = FormatCompressedTexelBlockExtent(image_state->createInfo.format); 3789 3790 // BufferRowLength must be a multiple of block width 3791 if (SafeModulo(pRegions[i].bufferRowLength, block_size.width) != 0) { 3792 skip |= log_msg( 3793 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3794 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600196, "IMAGE", 3795 "%s(): pRegion[%d] bufferRowLength (%d) must be a multiple of the compressed image's texel width (%d). %s.", 3796 function, i, pRegions[i].bufferRowLength, block_size.width, validation_error_map[VALIDATION_ERROR_01600196]); 3797 } 3798 3799 // BufferRowHeight must be a multiple of block height 3800 if (SafeModulo(pRegions[i].bufferImageHeight, block_size.height) != 0) { 3801 skip |= log_msg( 3802 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3803 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600198, "IMAGE", 3804 "%s(): pRegion[%d] bufferImageHeight (%d) must be a multiple of the compressed image's texel height (%d). %s.", 3805 function, i, pRegions[i].bufferImageHeight, block_size.height, validation_error_map[VALIDATION_ERROR_01600198]); 3806 } 3807 3808 // image offsets must be multiples of block dimensions 3809 if ((SafeModulo(pRegions[i].imageOffset.x, block_size.width) != 0) || 3810 (SafeModulo(pRegions[i].imageOffset.y, block_size.height) != 0) || 3811 (SafeModulo(pRegions[i].imageOffset.z, block_size.depth) != 0)) { 3812 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3813 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019a, "IMAGE", 3814 "%s(): pRegion[%d] imageOffset(x,y) (%d, %d) must be multiples of the compressed image's texel " 3815 "width & height (%d, %d). %s.", 3816 function, i, pRegions[i].imageOffset.x, pRegions[i].imageOffset.y, block_size.width, 3817 block_size.height, validation_error_map[VALIDATION_ERROR_0160019a]); 3818 } 3819 3820 // bufferOffset must be a multiple of block size (linear bytes) 3821 size_t block_size_in_bytes = FormatSize(image_state->createInfo.format); 3822 if (SafeModulo(pRegions[i].bufferOffset, block_size_in_bytes) != 0) { 3823 skip |= log_msg( 3824 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3825 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019c, "IMAGE", 3826 "%s(): pRegion[%d] bufferOffset (0x%" PRIxLEAST64 3827 ") must be a multiple of the compressed image's texel block size (" PRINTF_SIZE_T_SPECIFIER "). %s.", 3828 function, i, pRegions[i].bufferOffset, block_size_in_bytes, validation_error_map[VALIDATION_ERROR_0160019c]); 3829 } 3830 3831 // imageExtent width must be a multiple of block width, or extent+offset width must equal subresource width 3832 VkExtent3D mip_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource)); 3833 if ((SafeModulo(pRegions[i].imageExtent.width, block_size.width) != 0) && 3834 (pRegions[i].imageExtent.width + pRegions[i].imageOffset.x != mip_extent.width)) { 3835 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3836 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019e, "IMAGE", 3837 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block width " 3838 "(%d), or when added to offset.x (%d) must equal the image subresource width (%d). %s.", 3839 function, i, pRegions[i].imageExtent.width, block_size.width, pRegions[i].imageOffset.x, 3840 mip_extent.width, validation_error_map[VALIDATION_ERROR_0160019e]); 3841 } 3842 3843 // imageExtent height must be a multiple of block height, or extent+offset height must equal subresource height 3844 if ((SafeModulo(pRegions[i].imageExtent.height, block_size.height) != 0) && 3845 (pRegions[i].imageExtent.height + pRegions[i].imageOffset.y != mip_extent.height)) { 3846 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3847 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a0, "IMAGE", 3848 "%s(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block height " 3849 "(%d), or when added to offset.y (%d) must equal the image subresource height (%d). %s.", 3850 function, i, pRegions[i].imageExtent.height, block_size.height, pRegions[i].imageOffset.y, 3851 mip_extent.height, validation_error_map[VALIDATION_ERROR_016001a0]); 3852 } 3853 3854 // imageExtent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth 3855 if ((SafeModulo(pRegions[i].imageExtent.depth, block_size.depth) != 0) && 3856 (pRegions[i].imageExtent.depth + pRegions[i].imageOffset.z != mip_extent.depth)) { 3857 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3858 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a2, "IMAGE", 3859 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block depth " 3860 "(%d), or when added to offset.z (%d) must equal the image subresource depth (%d). %s.", 3861 function, i, pRegions[i].imageExtent.depth, block_size.depth, pRegions[i].imageOffset.z, 3862 mip_extent.depth, validation_error_map[VALIDATION_ERROR_016001a2]); 3863 } 3864 } 3865 } 3866 3867 return skip; 3868 } 3869 3870 static bool ValidateImageBounds(const debug_report_data *report_data, const IMAGE_STATE *image_state, const uint32_t regionCount, 3871 const VkBufferImageCopy *pRegions, const char *func_name, UNIQUE_VALIDATION_ERROR_CODE msg_code) { 3872 bool skip = false; 3873 const VkImageCreateInfo *image_info = &(image_state->createInfo); 3874 3875 for (uint32_t i = 0; i < regionCount; i++) { 3876 VkExtent3D extent = pRegions[i].imageExtent; 3877 VkOffset3D offset = pRegions[i].imageOffset; 3878 3879 if (IsExtentSizeZero(&extent)) // Warn on zero area subresource 3880 { 3881 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3882 (uint64_t)0, __LINE__, IMAGE_ZERO_AREA_SUBREGION, "IMAGE", 3883 "%s: pRegion[%d] imageExtent of {%1d, %1d, %1d} has zero area", func_name, i, extent.width, 3884 extent.height, extent.depth); 3885 } 3886 3887 VkExtent3D image_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource)); 3888 3889 // If we're using a compressed format, valid extent is rounded up to multiple of block size (per 18.1) 3890 if (FormatIsCompressed(image_info->format)) { 3891 auto block_extent = FormatCompressedTexelBlockExtent(image_info->format); 3892 if (image_extent.width % block_extent.width) { 3893 image_extent.width += (block_extent.width - (image_extent.width % block_extent.width)); 3894 } 3895 if (image_extent.height % block_extent.height) { 3896 image_extent.height += (block_extent.height - (image_extent.height % block_extent.height)); 3897 } 3898 if (image_extent.depth % block_extent.depth) { 3899 image_extent.depth += (block_extent.depth - (image_extent.depth % block_extent.depth)); 3900 } 3901 } 3902 3903 if (0 != ExceedsBounds(&offset, &extent, &image_extent)) { 3904 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0, 3905 __LINE__, msg_code, "IMAGE", "%s: pRegion[%d] exceeds image bounds. %s.", func_name, i, 3906 validation_error_map[msg_code]); 3907 } 3908 } 3909 3910 return skip; 3911 } 3912 3913 static inline bool ValidateBufferBounds(const debug_report_data *report_data, IMAGE_STATE *image_state, BUFFER_STATE *buff_state, 3914 uint32_t regionCount, const VkBufferImageCopy *pRegions, const char *func_name, 3915 UNIQUE_VALIDATION_ERROR_CODE msg_code) { 3916 bool skip = false; 3917 3918 VkDeviceSize buffer_size = buff_state->createInfo.size; 3919 3920 for (uint32_t i = 0; i < regionCount; i++) { 3921 VkExtent3D copy_extent = pRegions[i].imageExtent; 3922 3923 VkDeviceSize buffer_width = (0 == pRegions[i].bufferRowLength ? copy_extent.width : pRegions[i].bufferRowLength); 3924 VkDeviceSize buffer_height = (0 == pRegions[i].bufferImageHeight ? copy_extent.height : pRegions[i].bufferImageHeight); 3925 VkDeviceSize unit_size = FormatSize(image_state->createInfo.format); // size (bytes) of texel or block 3926 3927 // Handle special buffer packing rules for specific depth/stencil formats 3928 if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) { 3929 unit_size = FormatSize(VK_FORMAT_S8_UINT); 3930 } else if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) { 3931 switch (image_state->createInfo.format) { 3932 case VK_FORMAT_D16_UNORM_S8_UINT: 3933 unit_size = FormatSize(VK_FORMAT_D16_UNORM); 3934 break; 3935 case VK_FORMAT_D32_SFLOAT_S8_UINT: 3936 unit_size = FormatSize(VK_FORMAT_D32_SFLOAT); 3937 break; 3938 case VK_FORMAT_X8_D24_UNORM_PACK32: // Fall through 3939 case VK_FORMAT_D24_UNORM_S8_UINT: 3940 unit_size = 4; 3941 break; 3942 default: 3943 break; 3944 } 3945 } 3946 3947 if (FormatIsCompressed(image_state->createInfo.format)) { 3948 // Switch to texel block units, rounding up for any partially-used blocks 3949 auto block_dim = FormatCompressedTexelBlockExtent(image_state->createInfo.format); 3950 buffer_width = (buffer_width + block_dim.width - 1) / block_dim.width; 3951 buffer_height = (buffer_height + block_dim.height - 1) / block_dim.height; 3952 3953 copy_extent.width = (copy_extent.width + block_dim.width - 1) / block_dim.width; 3954 copy_extent.height = (copy_extent.height + block_dim.height - 1) / block_dim.height; 3955 copy_extent.depth = (copy_extent.depth + block_dim.depth - 1) / block_dim.depth; 3956 } 3957 3958 // Either depth or layerCount may be greater than 1 (not both). This is the number of 'slices' to copy 3959 uint32_t z_copies = std::max(copy_extent.depth, pRegions[i].imageSubresource.layerCount); 3960 if (IsExtentSizeZero(©_extent) || (0 == z_copies)) { 3961 // TODO: Issue warning here? Already warned in ValidateImageBounds()... 3962 } else { 3963 // Calculate buffer offset of final copied byte, + 1. 3964 VkDeviceSize max_buffer_offset = (z_copies - 1) * buffer_height * buffer_width; // offset to slice 3965 max_buffer_offset += ((copy_extent.height - 1) * buffer_width) + copy_extent.width; // add row,col 3966 max_buffer_offset *= unit_size; // convert to bytes 3967 max_buffer_offset += pRegions[i].bufferOffset; // add initial offset (bytes) 3968 3969 if (buffer_size < max_buffer_offset) { 3970 skip |= 3971 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0, 3972 __LINE__, msg_code, "IMAGE", "%s: pRegion[%d] exceeds buffer size of %" PRIu64 " bytes. %s.", func_name, 3973 i, buffer_size, validation_error_map[msg_code]); 3974 } 3975 } 3976 } 3977 3978 return skip; 3979 } 3980 3981 bool PreCallValidateCmdCopyImageToBuffer(layer_data *device_data, VkImageLayout srcImageLayout, GLOBAL_CB_NODE *cb_node, 3982 IMAGE_STATE *src_image_state, BUFFER_STATE *dst_buffer_state, uint32_t regionCount, 3983 const VkBufferImageCopy *pRegions, const char *func_name) { 3984 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3985 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer"); 3986 3987 // Validate command buffer state 3988 skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()"); 3989 3990 // Command pool must support graphics, compute, or transfer operations 3991 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 3992 3993 VkQueueFlags queue_flags = GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].queueFlags; 3994 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) { 3995 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3996 HandleToUint64(cb_node->createInfo.commandPool), __LINE__, VALIDATION_ERROR_19202415, "DS", 3997 "Cannot call vkCmdCopyImageToBuffer() on a command buffer allocated from a pool without graphics, compute, " 3998 "or transfer capabilities. %s.", 3999 validation_error_map[VALIDATION_ERROR_19202415]); 4000 } 4001 skip |= ValidateImageBounds(report_data, src_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 4002 VALIDATION_ERROR_1920016c); 4003 skip |= ValidateBufferBounds(report_data, src_image_state, dst_buffer_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()", 4004 VALIDATION_ERROR_1920016e); 4005 4006 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage", 4007 VALIDATION_ERROR_19200178); 4008 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200176); 4009 skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200180); 4010 4011 // Validate that SRC image & DST buffer have correct usage flags set 4012 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, VALIDATION_ERROR_19200174, 4013 "vkCmdCopyImageToBuffer()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 4014 skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, 4015 VALIDATION_ERROR_1920017e, "vkCmdCopyImageToBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 4016 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200017); 4017 bool hit_error = false; 4018 for (uint32_t i = 0; i < regionCount; ++i) { 4019 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout, 4020 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_1920017c, 4021 &hit_error); 4022 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, src_image_state, &pRegions[i], i, 4023 "vkCmdCopyImageToBuffer()"); 4024 } 4025 return skip; 4026 } 4027 4028 void PreCallRecordCmdCopyImageToBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 4029 BUFFER_STATE *dst_buffer_state, uint32_t region_count, const VkBufferImageCopy *regions, 4030 VkImageLayout src_image_layout) { 4031 // Make sure that all image slices are updated to correct layout 4032 for (uint32_t i = 0; i < region_count; ++i) { 4033 SetImageLayout(device_data, cb_node, src_image_state, regions[i].imageSubresource, src_image_layout); 4034 } 4035 // Update bindings between buffer/image and cmd buffer 4036 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 4037 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state); 4038 4039 std::function<bool()> function = [=]() { 4040 return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdCopyImageToBuffer()"); 4041 }; 4042 cb_node->queue_submit_functions.push_back(function); 4043 function = [=]() { 4044 SetBufferMemoryValid(device_data, dst_buffer_state, true); 4045 return false; 4046 }; 4047 cb_node->queue_submit_functions.push_back(function); 4048 } 4049 4050 bool PreCallValidateCmdCopyBufferToImage(layer_data *device_data, VkImageLayout dstImageLayout, GLOBAL_CB_NODE *cb_node, 4051 BUFFER_STATE *src_buffer_state, IMAGE_STATE *dst_image_state, uint32_t regionCount, 4052 const VkBufferImageCopy *pRegions, const char *func_name) { 4053 const debug_report_data *report_data = core_validation::GetReportData(device_data); 4054 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage"); 4055 4056 // Validate command buffer state 4057 skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()"); 4058 4059 // Command pool must support graphics, compute, or transfer operations 4060 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 4061 VkQueueFlags queue_flags = GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].queueFlags; 4062 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) { 4063 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 4064 HandleToUint64(cb_node->createInfo.commandPool), __LINE__, VALIDATION_ERROR_18e02415, "DS", 4065 "Cannot call vkCmdCopyBufferToImage() on a command buffer allocated from a pool without graphics, compute, " 4066 "or transfer capabilities. %s.", 4067 validation_error_map[VALIDATION_ERROR_18e02415]); 4068 } 4069 skip |= ValidateImageBounds(report_data, dst_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 4070 VALIDATION_ERROR_18e00158); 4071 skip |= ValidateBufferBounds(report_data, dst_image_state, src_buffer_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 4072 VALIDATION_ERROR_18e00156); 4073 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage", 4074 VALIDATION_ERROR_18e00166); 4075 skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00160); 4076 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00164); 4077 skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, 4078 VALIDATION_ERROR_18e0015c, "vkCmdCopyBufferToImage()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 4079 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_18e00162, 4080 "vkCmdCopyBufferToImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 4081 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00017); 4082 bool hit_error = false; 4083 for (uint32_t i = 0; i < regionCount; ++i) { 4084 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout, 4085 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e0016a, 4086 &hit_error); 4087 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, dst_image_state, &pRegions[i], i, 4088 "vkCmdCopyBufferToImage()"); 4089 } 4090 return skip; 4091 } 4092 4093 void PreCallRecordCmdCopyBufferToImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 4094 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkBufferImageCopy *regions, 4095 VkImageLayout dst_image_layout) { 4096 // Make sure that all image slices are updated to correct layout 4097 for (uint32_t i = 0; i < region_count; ++i) { 4098 SetImageLayout(device_data, cb_node, dst_image_state, regions[i].imageSubresource, dst_image_layout); 4099 } 4100 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state); 4101 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 4102 std::function<bool()> function = [=]() { 4103 SetImageMemoryValid(device_data, dst_image_state, true); 4104 return false; 4105 }; 4106 cb_node->queue_submit_functions.push_back(function); 4107 function = [=]() { return ValidateBufferMemoryIsValid(device_data, src_buffer_state, "vkCmdCopyBufferToImage()"); }; 4108 cb_node->queue_submit_functions.push_back(function); 4109 } 4110 4111 bool PreCallValidateGetImageSubresourceLayout(layer_data *device_data, VkImage image, const VkImageSubresource *pSubresource) { 4112 const auto report_data = core_validation::GetReportData(device_data); 4113 bool skip = false; 4114 const VkImageAspectFlags sub_aspect = pSubresource->aspectMask; 4115 4116 // The aspectMask member of pSubresource must only have a single bit set 4117 const int num_bits = sizeof(sub_aspect) * CHAR_BIT; 4118 std::bitset<num_bits> aspect_mask_bits(sub_aspect); 4119 if (aspect_mask_bits.count() != 1) { 4120 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 4121 __LINE__, VALIDATION_ERROR_2a6007ca, "IMAGE", 4122 "vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must have exactly 1 bit set. %s", 4123 validation_error_map[VALIDATION_ERROR_2a6007ca]); 4124 } 4125 4126 IMAGE_STATE *image_entry = GetImageState(device_data, image); 4127 if (!image_entry) { 4128 return skip; 4129 } 4130 4131 // image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR 4132 if (image_entry->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 4133 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 4134 __LINE__, VALIDATION_ERROR_2a6007c8, "IMAGE", 4135 "vkGetImageSubresourceLayout(): Image must have tiling of VK_IMAGE_TILING_LINEAR. %s", 4136 validation_error_map[VALIDATION_ERROR_2a6007c8]); 4137 } 4138 4139 // mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created 4140 if (pSubresource->mipLevel >= image_entry->createInfo.mipLevels) { 4141 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 4142 __LINE__, VALIDATION_ERROR_0a4007cc, "IMAGE", 4143 "vkGetImageSubresourceLayout(): pSubresource.mipLevel (%d) must be less than %d. %s", 4144 pSubresource->mipLevel, image_entry->createInfo.mipLevels, validation_error_map[VALIDATION_ERROR_0a4007cc]); 4145 } 4146 4147 // arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created 4148 if (pSubresource->arrayLayer >= image_entry->createInfo.arrayLayers) { 4149 skip |= 4150 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 4151 __LINE__, VALIDATION_ERROR_0a4007ce, "IMAGE", 4152 "vkGetImageSubresourceLayout(): pSubresource.arrayLayer (%d) must be less than %d. %s", 4153 pSubresource->arrayLayer, image_entry->createInfo.arrayLayers, validation_error_map[VALIDATION_ERROR_0a4007ce]); 4154 } 4155 4156 // subresource's aspect must be compatible with image's format. 4157 const VkFormat img_format = image_entry->createInfo.format; 4158 if (FormatIsMultiplane(img_format)) { 4159 VkImageAspectFlags allowed_flags = (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR); 4160 UNIQUE_VALIDATION_ERROR_CODE vuid = VALIDATION_ERROR_2a600c5a; // 2-plane version 4161 if (FormatPlaneCount(img_format) > 2u) { 4162 allowed_flags |= VK_IMAGE_ASPECT_PLANE_2_BIT_KHR; 4163 vuid = VALIDATION_ERROR_2a600c5c; // 3-plane version 4164 } 4165 if (sub_aspect != (sub_aspect & allowed_flags)) { 4166 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 4167 HandleToUint64(image), __LINE__, vuid, "IMAGE", 4168 "vkGetImageSubresourceLayout(): For multi-planar images, VkImageSubresource.aspectMask (0x%" PRIx32 4169 ") must be a single-plane specifier flag. %s", 4170 sub_aspect, validation_error_map[vuid]); 4171 } 4172 } else if (FormatIsColor(img_format)) { 4173 if (sub_aspect != VK_IMAGE_ASPECT_COLOR_BIT) { 4174 skip |= log_msg( 4175 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), __LINE__, 4176 VALIDATION_ERROR_0a400c01, "IMAGE", 4177 "vkGetImageSubresourceLayout(): For color formats, VkImageSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR. %s", 4178 validation_error_map[VALIDATION_ERROR_0a400c01]); 4179 } 4180 } else if (FormatIsDepthOrStencil(img_format)) { 4181 if ((sub_aspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (sub_aspect != VK_IMAGE_ASPECT_STENCIL_BIT)) { 4182 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 4183 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 4184 "vkGetImageSubresourceLayout(): For depth/stencil formats, VkImageSubresource.aspectMask must be " 4185 "either VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT. %s", 4186 validation_error_map[VALIDATION_ERROR_0a400c01]); 4187 } 4188 } 4189 return skip; 4190 } 4191