1 /* 2 ** Copyright (c) 2015-2016 The Khronos Group Inc. 3 ** 4 ** Licensed under the Apache License, Version 2.0 (the "License"); 5 ** you may not use this file except in compliance with the License. 6 ** You may obtain a copy of the License at 7 ** 8 ** http://www.apache.org/licenses/LICENSE-2.0 9 ** 10 ** Unless required by applicable law or agreed to in writing, software 11 ** distributed under the License is distributed on an "AS IS" BASIS, 12 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 ** See the License for the specific language governing permissions and 14 ** limitations under the License. 15 */ 16 17 /* 18 ** This header is generated from the Khronos Vulkan XML API Registry. 19 ** 20 */ 21 22 23 namespace unique_objects { 24 25 26 // Declare only 27 VKAPI_ATTR VkResult VKAPI_CALL CreateInstance( 28 const VkInstanceCreateInfo* pCreateInfo, 29 const VkAllocationCallbacks* pAllocator, 30 VkInstance* pInstance); 31 32 // Declare only 33 VKAPI_ATTR void VKAPI_CALL DestroyInstance( 34 VkInstance instance, 35 const VkAllocationCallbacks* pAllocator); 36 37 // Declare only 38 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr( 39 VkInstance instance, 40 const char* pName); 41 42 // Declare only 43 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr( 44 VkDevice device, 45 const char* pName); 46 47 // Declare only 48 VKAPI_ATTR VkResult VKAPI_CALL CreateDevice( 49 VkPhysicalDevice physicalDevice, 50 const VkDeviceCreateInfo* pCreateInfo, 51 const VkAllocationCallbacks* pAllocator, 52 VkDevice* pDevice); 53 54 // Declare only 55 VKAPI_ATTR void VKAPI_CALL DestroyDevice( 56 VkDevice device, 57 const VkAllocationCallbacks* pAllocator); 58 59 // Declare only 60 VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties( 61 const char* pLayerName, 62 uint32_t* pPropertyCount, 63 VkExtensionProperties* pProperties); 64 65 // Declare only 66 VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties( 67 uint32_t* pPropertyCount, 68 VkLayerProperties* pProperties); 69 70 // Declare only 71 VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties( 72 VkPhysicalDevice physicalDevice, 73 uint32_t* pPropertyCount, 74 VkLayerProperties* pProperties); 75 76 VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit( 77 VkQueue queue, 78 uint32_t submitCount, 79 const VkSubmitInfo* pSubmits, 80 VkFence fence) 81 { 82 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 83 safe_VkSubmitInfo *local_pSubmits = NULL; 84 { 85 std::lock_guard<std::mutex> lock(global_lock); 86 if (pSubmits) { 87 local_pSubmits = new safe_VkSubmitInfo[submitCount]; 88 for (uint32_t index0 = 0; index0 < submitCount; ++index0) { 89 local_pSubmits[index0].initialize(&pSubmits[index0]); 90 if (local_pSubmits[index0].pWaitSemaphores) { 91 for (uint32_t index1 = 0; index1 < local_pSubmits[index0].waitSemaphoreCount; ++index1) { 92 local_pSubmits[index0].pWaitSemaphores[index1] = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pSubmits[index0].pWaitSemaphores[index1])]; 93 } 94 } 95 if (local_pSubmits[index0].pSignalSemaphores) { 96 for (uint32_t index1 = 0; index1 < local_pSubmits[index0].signalSemaphoreCount; ++index1) { 97 local_pSubmits[index0].pSignalSemaphores[index1] = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pSubmits[index0].pSignalSemaphores[index1])]; 98 } 99 } 100 } 101 } 102 fence = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(fence)]; 103 } 104 VkResult result = dev_data->device_dispatch_table->QueueSubmit(queue, submitCount, (const VkSubmitInfo*)local_pSubmits, fence); 105 if (local_pSubmits) 106 delete[] local_pSubmits; 107 return result; 108 } 109 110 // Declare only 111 VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory( 112 VkDevice device, 113 const VkMemoryAllocateInfo* pAllocateInfo, 114 const VkAllocationCallbacks* pAllocator, 115 VkDeviceMemory* pMemory); 116 117 VKAPI_ATTR void VKAPI_CALL FreeMemory( 118 VkDevice device, 119 VkDeviceMemory memory, 120 const VkAllocationCallbacks* pAllocator) 121 { 122 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 123 std::unique_lock<std::mutex> lock(global_lock); 124 uint64_t memory_id = reinterpret_cast<uint64_t &>(memory); 125 memory = (VkDeviceMemory)dev_data->unique_id_mapping[memory_id]; 126 dev_data->unique_id_mapping.erase(memory_id); 127 lock.unlock(); 128 dev_data->device_dispatch_table->FreeMemory(device, memory, pAllocator); 129 130 } 131 132 VKAPI_ATTR VkResult VKAPI_CALL MapMemory( 133 VkDevice device, 134 VkDeviceMemory memory, 135 VkDeviceSize offset, 136 VkDeviceSize size, 137 VkMemoryMapFlags flags, 138 void** ppData) 139 { 140 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 141 { 142 std::lock_guard<std::mutex> lock(global_lock); 143 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 144 } 145 VkResult result = dev_data->device_dispatch_table->MapMemory(device, memory, offset, size, flags, ppData); 146 147 return result; 148 } 149 150 VKAPI_ATTR void VKAPI_CALL UnmapMemory( 151 VkDevice device, 152 VkDeviceMemory memory) 153 { 154 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 155 { 156 std::lock_guard<std::mutex> lock(global_lock); 157 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 158 } 159 dev_data->device_dispatch_table->UnmapMemory(device, memory); 160 161 } 162 163 VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges( 164 VkDevice device, 165 uint32_t memoryRangeCount, 166 const VkMappedMemoryRange* pMemoryRanges) 167 { 168 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 169 safe_VkMappedMemoryRange *local_pMemoryRanges = NULL; 170 { 171 std::lock_guard<std::mutex> lock(global_lock); 172 if (pMemoryRanges) { 173 local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount]; 174 for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) { 175 local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]); 176 if (pMemoryRanges[index0].memory) { 177 local_pMemoryRanges[index0].memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pMemoryRanges[index0].memory)]; 178 } 179 } 180 } 181 } 182 VkResult result = dev_data->device_dispatch_table->FlushMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges); 183 if (local_pMemoryRanges) 184 delete[] local_pMemoryRanges; 185 return result; 186 } 187 188 VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges( 189 VkDevice device, 190 uint32_t memoryRangeCount, 191 const VkMappedMemoryRange* pMemoryRanges) 192 { 193 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 194 safe_VkMappedMemoryRange *local_pMemoryRanges = NULL; 195 { 196 std::lock_guard<std::mutex> lock(global_lock); 197 if (pMemoryRanges) { 198 local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount]; 199 for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) { 200 local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]); 201 if (pMemoryRanges[index0].memory) { 202 local_pMemoryRanges[index0].memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pMemoryRanges[index0].memory)]; 203 } 204 } 205 } 206 } 207 VkResult result = dev_data->device_dispatch_table->InvalidateMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges); 208 if (local_pMemoryRanges) 209 delete[] local_pMemoryRanges; 210 return result; 211 } 212 213 VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment( 214 VkDevice device, 215 VkDeviceMemory memory, 216 VkDeviceSize* pCommittedMemoryInBytes) 217 { 218 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 219 { 220 std::lock_guard<std::mutex> lock(global_lock); 221 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 222 } 223 dev_data->device_dispatch_table->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); 224 225 } 226 227 VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory( 228 VkDevice device, 229 VkBuffer buffer, 230 VkDeviceMemory memory, 231 VkDeviceSize memoryOffset) 232 { 233 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 234 { 235 std::lock_guard<std::mutex> lock(global_lock); 236 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 237 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 238 } 239 VkResult result = dev_data->device_dispatch_table->BindBufferMemory(device, buffer, memory, memoryOffset); 240 241 return result; 242 } 243 244 VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory( 245 VkDevice device, 246 VkImage image, 247 VkDeviceMemory memory, 248 VkDeviceSize memoryOffset) 249 { 250 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 251 { 252 std::lock_guard<std::mutex> lock(global_lock); 253 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 254 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 255 } 256 VkResult result = dev_data->device_dispatch_table->BindImageMemory(device, image, memory, memoryOffset); 257 258 return result; 259 } 260 261 VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements( 262 VkDevice device, 263 VkBuffer buffer, 264 VkMemoryRequirements* pMemoryRequirements) 265 { 266 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 267 { 268 std::lock_guard<std::mutex> lock(global_lock); 269 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 270 } 271 dev_data->device_dispatch_table->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements); 272 273 } 274 275 VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements( 276 VkDevice device, 277 VkImage image, 278 VkMemoryRequirements* pMemoryRequirements) 279 { 280 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 281 { 282 std::lock_guard<std::mutex> lock(global_lock); 283 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 284 } 285 dev_data->device_dispatch_table->GetImageMemoryRequirements(device, image, pMemoryRequirements); 286 287 } 288 289 VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements( 290 VkDevice device, 291 VkImage image, 292 uint32_t* pSparseMemoryRequirementCount, 293 VkSparseImageMemoryRequirements* pSparseMemoryRequirements) 294 { 295 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 296 { 297 std::lock_guard<std::mutex> lock(global_lock); 298 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 299 } 300 dev_data->device_dispatch_table->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 301 302 } 303 304 VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse( 305 VkQueue queue, 306 uint32_t bindInfoCount, 307 const VkBindSparseInfo* pBindInfo, 308 VkFence fence) 309 { 310 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 311 safe_VkBindSparseInfo *local_pBindInfo = NULL; 312 { 313 std::lock_guard<std::mutex> lock(global_lock); 314 if (pBindInfo) { 315 local_pBindInfo = new safe_VkBindSparseInfo[bindInfoCount]; 316 for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) { 317 local_pBindInfo[index0].initialize(&pBindInfo[index0]); 318 if (local_pBindInfo[index0].pWaitSemaphores) { 319 for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].waitSemaphoreCount; ++index1) { 320 local_pBindInfo[index0].pWaitSemaphores[index1] = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pBindInfo[index0].pWaitSemaphores[index1])]; 321 } 322 } 323 if (local_pBindInfo[index0].pBufferBinds) { 324 for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].bufferBindCount; ++index1) { 325 if (pBindInfo[index0].pBufferBinds[index1].buffer) { 326 local_pBindInfo[index0].pBufferBinds[index1].buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pBufferBinds[index1].buffer)]; 327 } 328 if (local_pBindInfo[index0].pBufferBinds[index1].pBinds) { 329 for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) { 330 if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) { 331 local_pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory)]; 332 } 333 } 334 } 335 } 336 } 337 if (local_pBindInfo[index0].pImageOpaqueBinds) { 338 for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageOpaqueBindCount; ++index1) { 339 if (pBindInfo[index0].pImageOpaqueBinds[index1].image) { 340 local_pBindInfo[index0].pImageOpaqueBinds[index1].image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pImageOpaqueBinds[index1].image)]; 341 } 342 if (local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) { 343 for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) { 344 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) { 345 local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory)]; 346 } 347 } 348 } 349 } 350 } 351 if (local_pBindInfo[index0].pImageBinds) { 352 for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageBindCount; ++index1) { 353 if (pBindInfo[index0].pImageBinds[index1].image) { 354 local_pBindInfo[index0].pImageBinds[index1].image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pImageBinds[index1].image)]; 355 } 356 if (local_pBindInfo[index0].pImageBinds[index1].pBinds) { 357 for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) { 358 if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) { 359 local_pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory)]; 360 } 361 } 362 } 363 } 364 } 365 if (local_pBindInfo[index0].pSignalSemaphores) { 366 for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].signalSemaphoreCount; ++index1) { 367 local_pBindInfo[index0].pSignalSemaphores[index1] = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pBindInfo[index0].pSignalSemaphores[index1])]; 368 } 369 } 370 } 371 } 372 fence = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(fence)]; 373 } 374 VkResult result = dev_data->device_dispatch_table->QueueBindSparse(queue, bindInfoCount, (const VkBindSparseInfo*)local_pBindInfo, fence); 375 if (local_pBindInfo) 376 delete[] local_pBindInfo; 377 return result; 378 } 379 380 VKAPI_ATTR VkResult VKAPI_CALL CreateFence( 381 VkDevice device, 382 const VkFenceCreateInfo* pCreateInfo, 383 const VkAllocationCallbacks* pAllocator, 384 VkFence* pFence) 385 { 386 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 387 VkResult result = dev_data->device_dispatch_table->CreateFence(device, pCreateInfo, pAllocator, pFence); 388 if (VK_SUCCESS == result) { 389 std::lock_guard<std::mutex> lock(global_lock); 390 uint64_t unique_id = global_unique_id++; 391 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pFence); 392 *pFence = reinterpret_cast<VkFence&>(unique_id); 393 } 394 return result; 395 } 396 397 VKAPI_ATTR void VKAPI_CALL DestroyFence( 398 VkDevice device, 399 VkFence fence, 400 const VkAllocationCallbacks* pAllocator) 401 { 402 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 403 std::unique_lock<std::mutex> lock(global_lock); 404 uint64_t fence_id = reinterpret_cast<uint64_t &>(fence); 405 fence = (VkFence)dev_data->unique_id_mapping[fence_id]; 406 dev_data->unique_id_mapping.erase(fence_id); 407 lock.unlock(); 408 dev_data->device_dispatch_table->DestroyFence(device, fence, pAllocator); 409 410 } 411 412 VKAPI_ATTR VkResult VKAPI_CALL ResetFences( 413 VkDevice device, 414 uint32_t fenceCount, 415 const VkFence* pFences) 416 { 417 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 418 VkFence *local_pFences = NULL; 419 { 420 std::lock_guard<std::mutex> lock(global_lock); 421 if (pFences) { 422 local_pFences = new VkFence[fenceCount]; 423 for (uint32_t index0 = 0; index0 < fenceCount; ++index0) { 424 local_pFences[index0] = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pFences[index0])]; 425 } 426 } 427 } 428 VkResult result = dev_data->device_dispatch_table->ResetFences(device, fenceCount, (const VkFence*)local_pFences); 429 if (local_pFences) 430 delete[] local_pFences; 431 return result; 432 } 433 434 VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus( 435 VkDevice device, 436 VkFence fence) 437 { 438 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 439 { 440 std::lock_guard<std::mutex> lock(global_lock); 441 fence = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(fence)]; 442 } 443 VkResult result = dev_data->device_dispatch_table->GetFenceStatus(device, fence); 444 445 return result; 446 } 447 448 VKAPI_ATTR VkResult VKAPI_CALL WaitForFences( 449 VkDevice device, 450 uint32_t fenceCount, 451 const VkFence* pFences, 452 VkBool32 waitAll, 453 uint64_t timeout) 454 { 455 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 456 VkFence *local_pFences = NULL; 457 { 458 std::lock_guard<std::mutex> lock(global_lock); 459 if (pFences) { 460 local_pFences = new VkFence[fenceCount]; 461 for (uint32_t index0 = 0; index0 < fenceCount; ++index0) { 462 local_pFences[index0] = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pFences[index0])]; 463 } 464 } 465 } 466 VkResult result = dev_data->device_dispatch_table->WaitForFences(device, fenceCount, (const VkFence*)local_pFences, waitAll, timeout); 467 if (local_pFences) 468 delete[] local_pFences; 469 return result; 470 } 471 472 VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore( 473 VkDevice device, 474 const VkSemaphoreCreateInfo* pCreateInfo, 475 const VkAllocationCallbacks* pAllocator, 476 VkSemaphore* pSemaphore) 477 { 478 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 479 VkResult result = dev_data->device_dispatch_table->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); 480 if (VK_SUCCESS == result) { 481 std::lock_guard<std::mutex> lock(global_lock); 482 uint64_t unique_id = global_unique_id++; 483 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSemaphore); 484 *pSemaphore = reinterpret_cast<VkSemaphore&>(unique_id); 485 } 486 return result; 487 } 488 489 VKAPI_ATTR void VKAPI_CALL DestroySemaphore( 490 VkDevice device, 491 VkSemaphore semaphore, 492 const VkAllocationCallbacks* pAllocator) 493 { 494 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 495 std::unique_lock<std::mutex> lock(global_lock); 496 uint64_t semaphore_id = reinterpret_cast<uint64_t &>(semaphore); 497 semaphore = (VkSemaphore)dev_data->unique_id_mapping[semaphore_id]; 498 dev_data->unique_id_mapping.erase(semaphore_id); 499 lock.unlock(); 500 dev_data->device_dispatch_table->DestroySemaphore(device, semaphore, pAllocator); 501 502 } 503 504 VKAPI_ATTR VkResult VKAPI_CALL CreateEvent( 505 VkDevice device, 506 const VkEventCreateInfo* pCreateInfo, 507 const VkAllocationCallbacks* pAllocator, 508 VkEvent* pEvent) 509 { 510 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 511 VkResult result = dev_data->device_dispatch_table->CreateEvent(device, pCreateInfo, pAllocator, pEvent); 512 if (VK_SUCCESS == result) { 513 std::lock_guard<std::mutex> lock(global_lock); 514 uint64_t unique_id = global_unique_id++; 515 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pEvent); 516 *pEvent = reinterpret_cast<VkEvent&>(unique_id); 517 } 518 return result; 519 } 520 521 VKAPI_ATTR void VKAPI_CALL DestroyEvent( 522 VkDevice device, 523 VkEvent event, 524 const VkAllocationCallbacks* pAllocator) 525 { 526 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 527 std::unique_lock<std::mutex> lock(global_lock); 528 uint64_t event_id = reinterpret_cast<uint64_t &>(event); 529 event = (VkEvent)dev_data->unique_id_mapping[event_id]; 530 dev_data->unique_id_mapping.erase(event_id); 531 lock.unlock(); 532 dev_data->device_dispatch_table->DestroyEvent(device, event, pAllocator); 533 534 } 535 536 VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus( 537 VkDevice device, 538 VkEvent event) 539 { 540 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 541 { 542 std::lock_guard<std::mutex> lock(global_lock); 543 event = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(event)]; 544 } 545 VkResult result = dev_data->device_dispatch_table->GetEventStatus(device, event); 546 547 return result; 548 } 549 550 VKAPI_ATTR VkResult VKAPI_CALL SetEvent( 551 VkDevice device, 552 VkEvent event) 553 { 554 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 555 { 556 std::lock_guard<std::mutex> lock(global_lock); 557 event = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(event)]; 558 } 559 VkResult result = dev_data->device_dispatch_table->SetEvent(device, event); 560 561 return result; 562 } 563 564 VKAPI_ATTR VkResult VKAPI_CALL ResetEvent( 565 VkDevice device, 566 VkEvent event) 567 { 568 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 569 { 570 std::lock_guard<std::mutex> lock(global_lock); 571 event = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(event)]; 572 } 573 VkResult result = dev_data->device_dispatch_table->ResetEvent(device, event); 574 575 return result; 576 } 577 578 VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool( 579 VkDevice device, 580 const VkQueryPoolCreateInfo* pCreateInfo, 581 const VkAllocationCallbacks* pAllocator, 582 VkQueryPool* pQueryPool) 583 { 584 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 585 VkResult result = dev_data->device_dispatch_table->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); 586 if (VK_SUCCESS == result) { 587 std::lock_guard<std::mutex> lock(global_lock); 588 uint64_t unique_id = global_unique_id++; 589 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pQueryPool); 590 *pQueryPool = reinterpret_cast<VkQueryPool&>(unique_id); 591 } 592 return result; 593 } 594 595 VKAPI_ATTR void VKAPI_CALL DestroyQueryPool( 596 VkDevice device, 597 VkQueryPool queryPool, 598 const VkAllocationCallbacks* pAllocator) 599 { 600 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 601 std::unique_lock<std::mutex> lock(global_lock); 602 uint64_t queryPool_id = reinterpret_cast<uint64_t &>(queryPool); 603 queryPool = (VkQueryPool)dev_data->unique_id_mapping[queryPool_id]; 604 dev_data->unique_id_mapping.erase(queryPool_id); 605 lock.unlock(); 606 dev_data->device_dispatch_table->DestroyQueryPool(device, queryPool, pAllocator); 607 608 } 609 610 VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults( 611 VkDevice device, 612 VkQueryPool queryPool, 613 uint32_t firstQuery, 614 uint32_t queryCount, 615 size_t dataSize, 616 void* pData, 617 VkDeviceSize stride, 618 VkQueryResultFlags flags) 619 { 620 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 621 { 622 std::lock_guard<std::mutex> lock(global_lock); 623 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 624 } 625 VkResult result = dev_data->device_dispatch_table->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); 626 627 return result; 628 } 629 630 VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer( 631 VkDevice device, 632 const VkBufferCreateInfo* pCreateInfo, 633 const VkAllocationCallbacks* pAllocator, 634 VkBuffer* pBuffer) 635 { 636 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 637 VkResult result = dev_data->device_dispatch_table->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer); 638 if (VK_SUCCESS == result) { 639 std::lock_guard<std::mutex> lock(global_lock); 640 uint64_t unique_id = global_unique_id++; 641 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pBuffer); 642 *pBuffer = reinterpret_cast<VkBuffer&>(unique_id); 643 } 644 return result; 645 } 646 647 VKAPI_ATTR void VKAPI_CALL DestroyBuffer( 648 VkDevice device, 649 VkBuffer buffer, 650 const VkAllocationCallbacks* pAllocator) 651 { 652 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 653 std::unique_lock<std::mutex> lock(global_lock); 654 uint64_t buffer_id = reinterpret_cast<uint64_t &>(buffer); 655 buffer = (VkBuffer)dev_data->unique_id_mapping[buffer_id]; 656 dev_data->unique_id_mapping.erase(buffer_id); 657 lock.unlock(); 658 dev_data->device_dispatch_table->DestroyBuffer(device, buffer, pAllocator); 659 660 } 661 662 VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView( 663 VkDevice device, 664 const VkBufferViewCreateInfo* pCreateInfo, 665 const VkAllocationCallbacks* pAllocator, 666 VkBufferView* pView) 667 { 668 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 669 safe_VkBufferViewCreateInfo *local_pCreateInfo = NULL; 670 { 671 std::lock_guard<std::mutex> lock(global_lock); 672 if (pCreateInfo) { 673 local_pCreateInfo = new safe_VkBufferViewCreateInfo(pCreateInfo); 674 if (pCreateInfo->buffer) { 675 local_pCreateInfo->buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->buffer)]; 676 } 677 } 678 } 679 VkResult result = dev_data->device_dispatch_table->CreateBufferView(device, (const VkBufferViewCreateInfo*)local_pCreateInfo, pAllocator, pView); 680 if (local_pCreateInfo) 681 delete local_pCreateInfo; 682 if (VK_SUCCESS == result) { 683 std::lock_guard<std::mutex> lock(global_lock); 684 uint64_t unique_id = global_unique_id++; 685 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pView); 686 *pView = reinterpret_cast<VkBufferView&>(unique_id); 687 } 688 return result; 689 } 690 691 VKAPI_ATTR void VKAPI_CALL DestroyBufferView( 692 VkDevice device, 693 VkBufferView bufferView, 694 const VkAllocationCallbacks* pAllocator) 695 { 696 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 697 std::unique_lock<std::mutex> lock(global_lock); 698 uint64_t bufferView_id = reinterpret_cast<uint64_t &>(bufferView); 699 bufferView = (VkBufferView)dev_data->unique_id_mapping[bufferView_id]; 700 dev_data->unique_id_mapping.erase(bufferView_id); 701 lock.unlock(); 702 dev_data->device_dispatch_table->DestroyBufferView(device, bufferView, pAllocator); 703 704 } 705 706 VKAPI_ATTR VkResult VKAPI_CALL CreateImage( 707 VkDevice device, 708 const VkImageCreateInfo* pCreateInfo, 709 const VkAllocationCallbacks* pAllocator, 710 VkImage* pImage) 711 { 712 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 713 VkResult result = dev_data->device_dispatch_table->CreateImage(device, pCreateInfo, pAllocator, pImage); 714 if (VK_SUCCESS == result) { 715 std::lock_guard<std::mutex> lock(global_lock); 716 uint64_t unique_id = global_unique_id++; 717 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pImage); 718 *pImage = reinterpret_cast<VkImage&>(unique_id); 719 } 720 return result; 721 } 722 723 VKAPI_ATTR void VKAPI_CALL DestroyImage( 724 VkDevice device, 725 VkImage image, 726 const VkAllocationCallbacks* pAllocator) 727 { 728 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 729 std::unique_lock<std::mutex> lock(global_lock); 730 uint64_t image_id = reinterpret_cast<uint64_t &>(image); 731 image = (VkImage)dev_data->unique_id_mapping[image_id]; 732 dev_data->unique_id_mapping.erase(image_id); 733 lock.unlock(); 734 dev_data->device_dispatch_table->DestroyImage(device, image, pAllocator); 735 736 } 737 738 VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout( 739 VkDevice device, 740 VkImage image, 741 const VkImageSubresource* pSubresource, 742 VkSubresourceLayout* pLayout) 743 { 744 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 745 { 746 std::lock_guard<std::mutex> lock(global_lock); 747 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 748 } 749 dev_data->device_dispatch_table->GetImageSubresourceLayout(device, image, pSubresource, pLayout); 750 751 } 752 753 VKAPI_ATTR VkResult VKAPI_CALL CreateImageView( 754 VkDevice device, 755 const VkImageViewCreateInfo* pCreateInfo, 756 const VkAllocationCallbacks* pAllocator, 757 VkImageView* pView) 758 { 759 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 760 safe_VkImageViewCreateInfo *local_pCreateInfo = NULL; 761 { 762 std::lock_guard<std::mutex> lock(global_lock); 763 if (pCreateInfo) { 764 local_pCreateInfo = new safe_VkImageViewCreateInfo(pCreateInfo); 765 if (pCreateInfo->image) { 766 local_pCreateInfo->image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->image)]; 767 } 768 } 769 } 770 VkResult result = dev_data->device_dispatch_table->CreateImageView(device, (const VkImageViewCreateInfo*)local_pCreateInfo, pAllocator, pView); 771 if (local_pCreateInfo) 772 delete local_pCreateInfo; 773 if (VK_SUCCESS == result) { 774 std::lock_guard<std::mutex> lock(global_lock); 775 uint64_t unique_id = global_unique_id++; 776 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pView); 777 *pView = reinterpret_cast<VkImageView&>(unique_id); 778 } 779 return result; 780 } 781 782 VKAPI_ATTR void VKAPI_CALL DestroyImageView( 783 VkDevice device, 784 VkImageView imageView, 785 const VkAllocationCallbacks* pAllocator) 786 { 787 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 788 std::unique_lock<std::mutex> lock(global_lock); 789 uint64_t imageView_id = reinterpret_cast<uint64_t &>(imageView); 790 imageView = (VkImageView)dev_data->unique_id_mapping[imageView_id]; 791 dev_data->unique_id_mapping.erase(imageView_id); 792 lock.unlock(); 793 dev_data->device_dispatch_table->DestroyImageView(device, imageView, pAllocator); 794 795 } 796 797 VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule( 798 VkDevice device, 799 const VkShaderModuleCreateInfo* pCreateInfo, 800 const VkAllocationCallbacks* pAllocator, 801 VkShaderModule* pShaderModule) 802 { 803 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 804 VkResult result = dev_data->device_dispatch_table->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 805 if (VK_SUCCESS == result) { 806 std::lock_guard<std::mutex> lock(global_lock); 807 uint64_t unique_id = global_unique_id++; 808 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pShaderModule); 809 *pShaderModule = reinterpret_cast<VkShaderModule&>(unique_id); 810 } 811 return result; 812 } 813 814 VKAPI_ATTR void VKAPI_CALL DestroyShaderModule( 815 VkDevice device, 816 VkShaderModule shaderModule, 817 const VkAllocationCallbacks* pAllocator) 818 { 819 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 820 std::unique_lock<std::mutex> lock(global_lock); 821 uint64_t shaderModule_id = reinterpret_cast<uint64_t &>(shaderModule); 822 shaderModule = (VkShaderModule)dev_data->unique_id_mapping[shaderModule_id]; 823 dev_data->unique_id_mapping.erase(shaderModule_id); 824 lock.unlock(); 825 dev_data->device_dispatch_table->DestroyShaderModule(device, shaderModule, pAllocator); 826 827 } 828 829 VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache( 830 VkDevice device, 831 const VkPipelineCacheCreateInfo* pCreateInfo, 832 const VkAllocationCallbacks* pAllocator, 833 VkPipelineCache* pPipelineCache) 834 { 835 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 836 VkResult result = dev_data->device_dispatch_table->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); 837 if (VK_SUCCESS == result) { 838 std::lock_guard<std::mutex> lock(global_lock); 839 uint64_t unique_id = global_unique_id++; 840 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pPipelineCache); 841 *pPipelineCache = reinterpret_cast<VkPipelineCache&>(unique_id); 842 } 843 return result; 844 } 845 846 VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache( 847 VkDevice device, 848 VkPipelineCache pipelineCache, 849 const VkAllocationCallbacks* pAllocator) 850 { 851 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 852 std::unique_lock<std::mutex> lock(global_lock); 853 uint64_t pipelineCache_id = reinterpret_cast<uint64_t &>(pipelineCache); 854 pipelineCache = (VkPipelineCache)dev_data->unique_id_mapping[pipelineCache_id]; 855 dev_data->unique_id_mapping.erase(pipelineCache_id); 856 lock.unlock(); 857 dev_data->device_dispatch_table->DestroyPipelineCache(device, pipelineCache, pAllocator); 858 859 } 860 861 VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData( 862 VkDevice device, 863 VkPipelineCache pipelineCache, 864 size_t* pDataSize, 865 void* pData) 866 { 867 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 868 { 869 std::lock_guard<std::mutex> lock(global_lock); 870 pipelineCache = (VkPipelineCache)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)]; 871 } 872 VkResult result = dev_data->device_dispatch_table->GetPipelineCacheData(device, pipelineCache, pDataSize, pData); 873 874 return result; 875 } 876 877 VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches( 878 VkDevice device, 879 VkPipelineCache dstCache, 880 uint32_t srcCacheCount, 881 const VkPipelineCache* pSrcCaches) 882 { 883 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 884 VkPipelineCache *local_pSrcCaches = NULL; 885 { 886 std::lock_guard<std::mutex> lock(global_lock); 887 dstCache = (VkPipelineCache)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstCache)]; 888 if (pSrcCaches) { 889 local_pSrcCaches = new VkPipelineCache[srcCacheCount]; 890 for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) { 891 local_pSrcCaches[index0] = (VkPipelineCache)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pSrcCaches[index0])]; 892 } 893 } 894 } 895 VkResult result = dev_data->device_dispatch_table->MergePipelineCaches(device, dstCache, srcCacheCount, (const VkPipelineCache*)local_pSrcCaches); 896 if (local_pSrcCaches) 897 delete[] local_pSrcCaches; 898 return result; 899 } 900 901 // Declare only 902 VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines( 903 VkDevice device, 904 VkPipelineCache pipelineCache, 905 uint32_t createInfoCount, 906 const VkGraphicsPipelineCreateInfo* pCreateInfos, 907 const VkAllocationCallbacks* pAllocator, 908 VkPipeline* pPipelines); 909 910 // Declare only 911 VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines( 912 VkDevice device, 913 VkPipelineCache pipelineCache, 914 uint32_t createInfoCount, 915 const VkComputePipelineCreateInfo* pCreateInfos, 916 const VkAllocationCallbacks* pAllocator, 917 VkPipeline* pPipelines); 918 919 VKAPI_ATTR void VKAPI_CALL DestroyPipeline( 920 VkDevice device, 921 VkPipeline pipeline, 922 const VkAllocationCallbacks* pAllocator) 923 { 924 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 925 std::unique_lock<std::mutex> lock(global_lock); 926 uint64_t pipeline_id = reinterpret_cast<uint64_t &>(pipeline); 927 pipeline = (VkPipeline)dev_data->unique_id_mapping[pipeline_id]; 928 dev_data->unique_id_mapping.erase(pipeline_id); 929 lock.unlock(); 930 dev_data->device_dispatch_table->DestroyPipeline(device, pipeline, pAllocator); 931 932 } 933 934 VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout( 935 VkDevice device, 936 const VkPipelineLayoutCreateInfo* pCreateInfo, 937 const VkAllocationCallbacks* pAllocator, 938 VkPipelineLayout* pPipelineLayout) 939 { 940 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 941 safe_VkPipelineLayoutCreateInfo *local_pCreateInfo = NULL; 942 { 943 std::lock_guard<std::mutex> lock(global_lock); 944 if (pCreateInfo) { 945 local_pCreateInfo = new safe_VkPipelineLayoutCreateInfo(pCreateInfo); 946 if (local_pCreateInfo->pSetLayouts) { 947 for (uint32_t index1 = 0; index1 < local_pCreateInfo->setLayoutCount; ++index1) { 948 local_pCreateInfo->pSetLayouts[index1] = (VkDescriptorSetLayout)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pCreateInfo->pSetLayouts[index1])]; 949 } 950 } 951 } 952 } 953 VkResult result = dev_data->device_dispatch_table->CreatePipelineLayout(device, (const VkPipelineLayoutCreateInfo*)local_pCreateInfo, pAllocator, pPipelineLayout); 954 if (local_pCreateInfo) 955 delete local_pCreateInfo; 956 if (VK_SUCCESS == result) { 957 std::lock_guard<std::mutex> lock(global_lock); 958 uint64_t unique_id = global_unique_id++; 959 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pPipelineLayout); 960 *pPipelineLayout = reinterpret_cast<VkPipelineLayout&>(unique_id); 961 } 962 return result; 963 } 964 965 VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout( 966 VkDevice device, 967 VkPipelineLayout pipelineLayout, 968 const VkAllocationCallbacks* pAllocator) 969 { 970 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 971 std::unique_lock<std::mutex> lock(global_lock); 972 uint64_t pipelineLayout_id = reinterpret_cast<uint64_t &>(pipelineLayout); 973 pipelineLayout = (VkPipelineLayout)dev_data->unique_id_mapping[pipelineLayout_id]; 974 dev_data->unique_id_mapping.erase(pipelineLayout_id); 975 lock.unlock(); 976 dev_data->device_dispatch_table->DestroyPipelineLayout(device, pipelineLayout, pAllocator); 977 978 } 979 980 VKAPI_ATTR VkResult VKAPI_CALL CreateSampler( 981 VkDevice device, 982 const VkSamplerCreateInfo* pCreateInfo, 983 const VkAllocationCallbacks* pAllocator, 984 VkSampler* pSampler) 985 { 986 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 987 VkResult result = dev_data->device_dispatch_table->CreateSampler(device, pCreateInfo, pAllocator, pSampler); 988 if (VK_SUCCESS == result) { 989 std::lock_guard<std::mutex> lock(global_lock); 990 uint64_t unique_id = global_unique_id++; 991 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSampler); 992 *pSampler = reinterpret_cast<VkSampler&>(unique_id); 993 } 994 return result; 995 } 996 997 VKAPI_ATTR void VKAPI_CALL DestroySampler( 998 VkDevice device, 999 VkSampler sampler, 1000 const VkAllocationCallbacks* pAllocator) 1001 { 1002 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1003 std::unique_lock<std::mutex> lock(global_lock); 1004 uint64_t sampler_id = reinterpret_cast<uint64_t &>(sampler); 1005 sampler = (VkSampler)dev_data->unique_id_mapping[sampler_id]; 1006 dev_data->unique_id_mapping.erase(sampler_id); 1007 lock.unlock(); 1008 dev_data->device_dispatch_table->DestroySampler(device, sampler, pAllocator); 1009 1010 } 1011 1012 VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout( 1013 VkDevice device, 1014 const VkDescriptorSetLayoutCreateInfo* pCreateInfo, 1015 const VkAllocationCallbacks* pAllocator, 1016 VkDescriptorSetLayout* pSetLayout) 1017 { 1018 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1019 safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL; 1020 { 1021 std::lock_guard<std::mutex> lock(global_lock); 1022 if (pCreateInfo) { 1023 local_pCreateInfo = new safe_VkDescriptorSetLayoutCreateInfo(pCreateInfo); 1024 if (local_pCreateInfo->pBindings) { 1025 for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) { 1026 if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) { 1027 for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) { 1028 local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = (VkSampler)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2])]; 1029 } 1030 } 1031 } 1032 } 1033 } 1034 } 1035 VkResult result = dev_data->device_dispatch_table->CreateDescriptorSetLayout(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pAllocator, pSetLayout); 1036 if (local_pCreateInfo) 1037 delete local_pCreateInfo; 1038 if (VK_SUCCESS == result) { 1039 std::lock_guard<std::mutex> lock(global_lock); 1040 uint64_t unique_id = global_unique_id++; 1041 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSetLayout); 1042 *pSetLayout = reinterpret_cast<VkDescriptorSetLayout&>(unique_id); 1043 } 1044 return result; 1045 } 1046 1047 VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout( 1048 VkDevice device, 1049 VkDescriptorSetLayout descriptorSetLayout, 1050 const VkAllocationCallbacks* pAllocator) 1051 { 1052 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1053 std::unique_lock<std::mutex> lock(global_lock); 1054 uint64_t descriptorSetLayout_id = reinterpret_cast<uint64_t &>(descriptorSetLayout); 1055 descriptorSetLayout = (VkDescriptorSetLayout)dev_data->unique_id_mapping[descriptorSetLayout_id]; 1056 dev_data->unique_id_mapping.erase(descriptorSetLayout_id); 1057 lock.unlock(); 1058 dev_data->device_dispatch_table->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); 1059 1060 } 1061 1062 VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool( 1063 VkDevice device, 1064 const VkDescriptorPoolCreateInfo* pCreateInfo, 1065 const VkAllocationCallbacks* pAllocator, 1066 VkDescriptorPool* pDescriptorPool) 1067 { 1068 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1069 VkResult result = dev_data->device_dispatch_table->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); 1070 if (VK_SUCCESS == result) { 1071 std::lock_guard<std::mutex> lock(global_lock); 1072 uint64_t unique_id = global_unique_id++; 1073 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pDescriptorPool); 1074 *pDescriptorPool = reinterpret_cast<VkDescriptorPool&>(unique_id); 1075 } 1076 return result; 1077 } 1078 1079 VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool( 1080 VkDevice device, 1081 VkDescriptorPool descriptorPool, 1082 const VkAllocationCallbacks* pAllocator) 1083 { 1084 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1085 std::unique_lock<std::mutex> lock(global_lock); 1086 uint64_t descriptorPool_id = reinterpret_cast<uint64_t &>(descriptorPool); 1087 descriptorPool = (VkDescriptorPool)dev_data->unique_id_mapping[descriptorPool_id]; 1088 dev_data->unique_id_mapping.erase(descriptorPool_id); 1089 lock.unlock(); 1090 dev_data->device_dispatch_table->DestroyDescriptorPool(device, descriptorPool, pAllocator); 1091 1092 } 1093 1094 VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool( 1095 VkDevice device, 1096 VkDescriptorPool descriptorPool, 1097 VkDescriptorPoolResetFlags flags) 1098 { 1099 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1100 { 1101 std::lock_guard<std::mutex> lock(global_lock); 1102 descriptorPool = (VkDescriptorPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(descriptorPool)]; 1103 } 1104 VkResult result = dev_data->device_dispatch_table->ResetDescriptorPool(device, descriptorPool, flags); 1105 1106 return result; 1107 } 1108 1109 VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets( 1110 VkDevice device, 1111 const VkDescriptorSetAllocateInfo* pAllocateInfo, 1112 VkDescriptorSet* pDescriptorSets) 1113 { 1114 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1115 safe_VkDescriptorSetAllocateInfo *local_pAllocateInfo = NULL; 1116 { 1117 std::lock_guard<std::mutex> lock(global_lock); 1118 if (pAllocateInfo) { 1119 local_pAllocateInfo = new safe_VkDescriptorSetAllocateInfo(pAllocateInfo); 1120 if (pAllocateInfo->descriptorPool) { 1121 local_pAllocateInfo->descriptorPool = (VkDescriptorPool)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pAllocateInfo->descriptorPool)]; 1122 } 1123 if (local_pAllocateInfo->pSetLayouts) { 1124 for (uint32_t index1 = 0; index1 < local_pAllocateInfo->descriptorSetCount; ++index1) { 1125 local_pAllocateInfo->pSetLayouts[index1] = (VkDescriptorSetLayout)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pAllocateInfo->pSetLayouts[index1])]; 1126 } 1127 } 1128 } 1129 } 1130 VkResult result = dev_data->device_dispatch_table->AllocateDescriptorSets(device, (const VkDescriptorSetAllocateInfo*)local_pAllocateInfo, pDescriptorSets); 1131 if (local_pAllocateInfo) 1132 delete local_pAllocateInfo; 1133 if (VK_SUCCESS == result) { 1134 std::lock_guard<std::mutex> lock(global_lock); 1135 for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) { 1136 uint64_t unique_id = global_unique_id++; 1137 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pDescriptorSets[index0]); 1138 pDescriptorSets[index0] = reinterpret_cast<VkDescriptorSet&>(unique_id); 1139 } 1140 } 1141 return result; 1142 } 1143 1144 VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets( 1145 VkDevice device, 1146 VkDescriptorPool descriptorPool, 1147 uint32_t descriptorSetCount, 1148 const VkDescriptorSet* pDescriptorSets) 1149 { 1150 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1151 VkDescriptorSet *local_pDescriptorSets = NULL; 1152 { 1153 std::lock_guard<std::mutex> lock(global_lock); 1154 descriptorPool = (VkDescriptorPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(descriptorPool)]; 1155 if (pDescriptorSets) { 1156 local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount]; 1157 for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) { 1158 local_pDescriptorSets[index0] = (VkDescriptorSet)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorSets[index0])]; 1159 } 1160 } 1161 } 1162 VkResult result = dev_data->device_dispatch_table->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, (const VkDescriptorSet*)local_pDescriptorSets); 1163 if (local_pDescriptorSets) 1164 delete[] local_pDescriptorSets; 1165 if ((VK_SUCCESS == result) && (pDescriptorSets)) { 1166 std::unique_lock<std::mutex> lock(global_lock); 1167 for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) { 1168 VkDescriptorSet handle = pDescriptorSets[index0]; 1169 uint64_t unique_id = reinterpret_cast<uint64_t &>(handle); 1170 dev_data->unique_id_mapping.erase(unique_id); 1171 } 1172 } 1173 return result; 1174 } 1175 1176 VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets( 1177 VkDevice device, 1178 uint32_t descriptorWriteCount, 1179 const VkWriteDescriptorSet* pDescriptorWrites, 1180 uint32_t descriptorCopyCount, 1181 const VkCopyDescriptorSet* pDescriptorCopies) 1182 { 1183 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1184 safe_VkWriteDescriptorSet *local_pDescriptorWrites = NULL; 1185 safe_VkCopyDescriptorSet *local_pDescriptorCopies = NULL; 1186 { 1187 std::lock_guard<std::mutex> lock(global_lock); 1188 if (pDescriptorWrites) { 1189 local_pDescriptorWrites = new safe_VkWriteDescriptorSet[descriptorWriteCount]; 1190 for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) { 1191 local_pDescriptorWrites[index0].initialize(&pDescriptorWrites[index0]); 1192 if (pDescriptorWrites[index0].dstSet) { 1193 local_pDescriptorWrites[index0].dstSet = (VkDescriptorSet)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorWrites[index0].dstSet)]; 1194 } 1195 if (local_pDescriptorWrites[index0].pImageInfo) { 1196 for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) { 1197 if (pDescriptorWrites[index0].pImageInfo[index1].sampler) { 1198 local_pDescriptorWrites[index0].pImageInfo[index1].sampler = (VkSampler)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorWrites[index0].pImageInfo[index1].sampler)]; 1199 } 1200 if (pDescriptorWrites[index0].pImageInfo[index1].imageView) { 1201 local_pDescriptorWrites[index0].pImageInfo[index1].imageView = (VkImageView)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorWrites[index0].pImageInfo[index1].imageView)]; 1202 } 1203 } 1204 } 1205 if (local_pDescriptorWrites[index0].pBufferInfo) { 1206 for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) { 1207 if (pDescriptorWrites[index0].pBufferInfo[index1].buffer) { 1208 local_pDescriptorWrites[index0].pBufferInfo[index1].buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorWrites[index0].pBufferInfo[index1].buffer)]; 1209 } 1210 } 1211 } 1212 if (local_pDescriptorWrites[index0].pTexelBufferView) { 1213 for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) { 1214 local_pDescriptorWrites[index0].pTexelBufferView[index1] = (VkBufferView)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pDescriptorWrites[index0].pTexelBufferView[index1])]; 1215 } 1216 } 1217 } 1218 } 1219 if (pDescriptorCopies) { 1220 local_pDescriptorCopies = new safe_VkCopyDescriptorSet[descriptorCopyCount]; 1221 for (uint32_t index0 = 0; index0 < descriptorCopyCount; ++index0) { 1222 local_pDescriptorCopies[index0].initialize(&pDescriptorCopies[index0]); 1223 if (pDescriptorCopies[index0].srcSet) { 1224 local_pDescriptorCopies[index0].srcSet = (VkDescriptorSet)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorCopies[index0].srcSet)]; 1225 } 1226 if (pDescriptorCopies[index0].dstSet) { 1227 local_pDescriptorCopies[index0].dstSet = (VkDescriptorSet)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorCopies[index0].dstSet)]; 1228 } 1229 } 1230 } 1231 } 1232 dev_data->device_dispatch_table->UpdateDescriptorSets(device, descriptorWriteCount, (const VkWriteDescriptorSet*)local_pDescriptorWrites, descriptorCopyCount, (const VkCopyDescriptorSet*)local_pDescriptorCopies); 1233 if (local_pDescriptorWrites) 1234 delete[] local_pDescriptorWrites; 1235 if (local_pDescriptorCopies) 1236 delete[] local_pDescriptorCopies; 1237 } 1238 1239 VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer( 1240 VkDevice device, 1241 const VkFramebufferCreateInfo* pCreateInfo, 1242 const VkAllocationCallbacks* pAllocator, 1243 VkFramebuffer* pFramebuffer) 1244 { 1245 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1246 safe_VkFramebufferCreateInfo *local_pCreateInfo = NULL; 1247 { 1248 std::lock_guard<std::mutex> lock(global_lock); 1249 if (pCreateInfo) { 1250 local_pCreateInfo = new safe_VkFramebufferCreateInfo(pCreateInfo); 1251 if (pCreateInfo->renderPass) { 1252 local_pCreateInfo->renderPass = (VkRenderPass)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->renderPass)]; 1253 } 1254 if (local_pCreateInfo->pAttachments) { 1255 for (uint32_t index1 = 0; index1 < local_pCreateInfo->attachmentCount; ++index1) { 1256 local_pCreateInfo->pAttachments[index1] = (VkImageView)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pCreateInfo->pAttachments[index1])]; 1257 } 1258 } 1259 } 1260 } 1261 VkResult result = dev_data->device_dispatch_table->CreateFramebuffer(device, (const VkFramebufferCreateInfo*)local_pCreateInfo, pAllocator, pFramebuffer); 1262 if (local_pCreateInfo) 1263 delete local_pCreateInfo; 1264 if (VK_SUCCESS == result) { 1265 std::lock_guard<std::mutex> lock(global_lock); 1266 uint64_t unique_id = global_unique_id++; 1267 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pFramebuffer); 1268 *pFramebuffer = reinterpret_cast<VkFramebuffer&>(unique_id); 1269 } 1270 return result; 1271 } 1272 1273 VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer( 1274 VkDevice device, 1275 VkFramebuffer framebuffer, 1276 const VkAllocationCallbacks* pAllocator) 1277 { 1278 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1279 std::unique_lock<std::mutex> lock(global_lock); 1280 uint64_t framebuffer_id = reinterpret_cast<uint64_t &>(framebuffer); 1281 framebuffer = (VkFramebuffer)dev_data->unique_id_mapping[framebuffer_id]; 1282 dev_data->unique_id_mapping.erase(framebuffer_id); 1283 lock.unlock(); 1284 dev_data->device_dispatch_table->DestroyFramebuffer(device, framebuffer, pAllocator); 1285 1286 } 1287 1288 VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass( 1289 VkDevice device, 1290 const VkRenderPassCreateInfo* pCreateInfo, 1291 const VkAllocationCallbacks* pAllocator, 1292 VkRenderPass* pRenderPass) 1293 { 1294 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1295 VkResult result = dev_data->device_dispatch_table->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); 1296 if (VK_SUCCESS == result) { 1297 std::lock_guard<std::mutex> lock(global_lock); 1298 uint64_t unique_id = global_unique_id++; 1299 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pRenderPass); 1300 *pRenderPass = reinterpret_cast<VkRenderPass&>(unique_id); 1301 } 1302 return result; 1303 } 1304 1305 VKAPI_ATTR void VKAPI_CALL DestroyRenderPass( 1306 VkDevice device, 1307 VkRenderPass renderPass, 1308 const VkAllocationCallbacks* pAllocator) 1309 { 1310 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1311 std::unique_lock<std::mutex> lock(global_lock); 1312 uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass); 1313 renderPass = (VkRenderPass)dev_data->unique_id_mapping[renderPass_id]; 1314 dev_data->unique_id_mapping.erase(renderPass_id); 1315 lock.unlock(); 1316 dev_data->device_dispatch_table->DestroyRenderPass(device, renderPass, pAllocator); 1317 1318 } 1319 1320 VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity( 1321 VkDevice device, 1322 VkRenderPass renderPass, 1323 VkExtent2D* pGranularity) 1324 { 1325 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1326 { 1327 std::lock_guard<std::mutex> lock(global_lock); 1328 renderPass = (VkRenderPass)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(renderPass)]; 1329 } 1330 dev_data->device_dispatch_table->GetRenderAreaGranularity(device, renderPass, pGranularity); 1331 1332 } 1333 1334 VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool( 1335 VkDevice device, 1336 const VkCommandPoolCreateInfo* pCreateInfo, 1337 const VkAllocationCallbacks* pAllocator, 1338 VkCommandPool* pCommandPool) 1339 { 1340 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1341 VkResult result = dev_data->device_dispatch_table->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); 1342 if (VK_SUCCESS == result) { 1343 std::lock_guard<std::mutex> lock(global_lock); 1344 uint64_t unique_id = global_unique_id++; 1345 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pCommandPool); 1346 *pCommandPool = reinterpret_cast<VkCommandPool&>(unique_id); 1347 } 1348 return result; 1349 } 1350 1351 VKAPI_ATTR void VKAPI_CALL DestroyCommandPool( 1352 VkDevice device, 1353 VkCommandPool commandPool, 1354 const VkAllocationCallbacks* pAllocator) 1355 { 1356 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1357 std::unique_lock<std::mutex> lock(global_lock); 1358 uint64_t commandPool_id = reinterpret_cast<uint64_t &>(commandPool); 1359 commandPool = (VkCommandPool)dev_data->unique_id_mapping[commandPool_id]; 1360 dev_data->unique_id_mapping.erase(commandPool_id); 1361 lock.unlock(); 1362 dev_data->device_dispatch_table->DestroyCommandPool(device, commandPool, pAllocator); 1363 1364 } 1365 1366 VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool( 1367 VkDevice device, 1368 VkCommandPool commandPool, 1369 VkCommandPoolResetFlags flags) 1370 { 1371 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1372 { 1373 std::lock_guard<std::mutex> lock(global_lock); 1374 commandPool = (VkCommandPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(commandPool)]; 1375 } 1376 VkResult result = dev_data->device_dispatch_table->ResetCommandPool(device, commandPool, flags); 1377 1378 return result; 1379 } 1380 1381 VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers( 1382 VkDevice device, 1383 const VkCommandBufferAllocateInfo* pAllocateInfo, 1384 VkCommandBuffer* pCommandBuffers) 1385 { 1386 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1387 safe_VkCommandBufferAllocateInfo *local_pAllocateInfo = NULL; 1388 { 1389 std::lock_guard<std::mutex> lock(global_lock); 1390 if (pAllocateInfo) { 1391 local_pAllocateInfo = new safe_VkCommandBufferAllocateInfo(pAllocateInfo); 1392 if (pAllocateInfo->commandPool) { 1393 local_pAllocateInfo->commandPool = (VkCommandPool)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pAllocateInfo->commandPool)]; 1394 } 1395 } 1396 } 1397 VkResult result = dev_data->device_dispatch_table->AllocateCommandBuffers(device, (const VkCommandBufferAllocateInfo*)local_pAllocateInfo, pCommandBuffers); 1398 if (local_pAllocateInfo) 1399 delete local_pAllocateInfo; 1400 return result; 1401 } 1402 1403 VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers( 1404 VkDevice device, 1405 VkCommandPool commandPool, 1406 uint32_t commandBufferCount, 1407 const VkCommandBuffer* pCommandBuffers) 1408 { 1409 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1410 { 1411 std::lock_guard<std::mutex> lock(global_lock); 1412 commandPool = (VkCommandPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(commandPool)]; 1413 } 1414 dev_data->device_dispatch_table->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); 1415 1416 } 1417 1418 VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer( 1419 VkCommandBuffer commandBuffer, 1420 const VkCommandBufferBeginInfo* pBeginInfo) 1421 { 1422 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1423 safe_VkCommandBufferBeginInfo *local_pBeginInfo = NULL; 1424 { 1425 std::lock_guard<std::mutex> lock(global_lock); 1426 if (pBeginInfo) { 1427 local_pBeginInfo = new safe_VkCommandBufferBeginInfo(pBeginInfo); 1428 if (local_pBeginInfo->pInheritanceInfo) { 1429 if (pBeginInfo->pInheritanceInfo->renderPass) { 1430 local_pBeginInfo->pInheritanceInfo->renderPass = (VkRenderPass)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBeginInfo->pInheritanceInfo->renderPass)]; 1431 } 1432 if (pBeginInfo->pInheritanceInfo->framebuffer) { 1433 local_pBeginInfo->pInheritanceInfo->framebuffer = (VkFramebuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBeginInfo->pInheritanceInfo->framebuffer)]; 1434 } 1435 } 1436 } 1437 } 1438 VkResult result = dev_data->device_dispatch_table->BeginCommandBuffer(commandBuffer, (const VkCommandBufferBeginInfo*)local_pBeginInfo); 1439 if (local_pBeginInfo) 1440 delete local_pBeginInfo; 1441 return result; 1442 } 1443 1444 VKAPI_ATTR void VKAPI_CALL CmdBindPipeline( 1445 VkCommandBuffer commandBuffer, 1446 VkPipelineBindPoint pipelineBindPoint, 1447 VkPipeline pipeline) 1448 { 1449 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1450 { 1451 std::lock_guard<std::mutex> lock(global_lock); 1452 pipeline = (VkPipeline)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipeline)]; 1453 } 1454 dev_data->device_dispatch_table->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); 1455 1456 } 1457 1458 VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets( 1459 VkCommandBuffer commandBuffer, 1460 VkPipelineBindPoint pipelineBindPoint, 1461 VkPipelineLayout layout, 1462 uint32_t firstSet, 1463 uint32_t descriptorSetCount, 1464 const VkDescriptorSet* pDescriptorSets, 1465 uint32_t dynamicOffsetCount, 1466 const uint32_t* pDynamicOffsets) 1467 { 1468 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1469 VkDescriptorSet *local_pDescriptorSets = NULL; 1470 { 1471 std::lock_guard<std::mutex> lock(global_lock); 1472 layout = (VkPipelineLayout)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(layout)]; 1473 if (pDescriptorSets) { 1474 local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount]; 1475 for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) { 1476 local_pDescriptorSets[index0] = (VkDescriptorSet)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pDescriptorSets[index0])]; 1477 } 1478 } 1479 } 1480 dev_data->device_dispatch_table->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, (const VkDescriptorSet*)local_pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); 1481 if (local_pDescriptorSets) 1482 delete[] local_pDescriptorSets; 1483 } 1484 1485 VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer( 1486 VkCommandBuffer commandBuffer, 1487 VkBuffer buffer, 1488 VkDeviceSize offset, 1489 VkIndexType indexType) 1490 { 1491 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1492 { 1493 std::lock_guard<std::mutex> lock(global_lock); 1494 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 1495 } 1496 dev_data->device_dispatch_table->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); 1497 1498 } 1499 1500 VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers( 1501 VkCommandBuffer commandBuffer, 1502 uint32_t firstBinding, 1503 uint32_t bindingCount, 1504 const VkBuffer* pBuffers, 1505 const VkDeviceSize* pOffsets) 1506 { 1507 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1508 VkBuffer *local_pBuffers = NULL; 1509 { 1510 std::lock_guard<std::mutex> lock(global_lock); 1511 if (pBuffers) { 1512 local_pBuffers = new VkBuffer[bindingCount]; 1513 for (uint32_t index0 = 0; index0 < bindingCount; ++index0) { 1514 local_pBuffers[index0] = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBuffers[index0])]; 1515 } 1516 } 1517 } 1518 dev_data->device_dispatch_table->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, (const VkBuffer*)local_pBuffers, pOffsets); 1519 if (local_pBuffers) 1520 delete[] local_pBuffers; 1521 } 1522 1523 VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect( 1524 VkCommandBuffer commandBuffer, 1525 VkBuffer buffer, 1526 VkDeviceSize offset, 1527 uint32_t drawCount, 1528 uint32_t stride) 1529 { 1530 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1531 { 1532 std::lock_guard<std::mutex> lock(global_lock); 1533 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 1534 } 1535 dev_data->device_dispatch_table->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); 1536 1537 } 1538 1539 VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect( 1540 VkCommandBuffer commandBuffer, 1541 VkBuffer buffer, 1542 VkDeviceSize offset, 1543 uint32_t drawCount, 1544 uint32_t stride) 1545 { 1546 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1547 { 1548 std::lock_guard<std::mutex> lock(global_lock); 1549 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 1550 } 1551 dev_data->device_dispatch_table->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride); 1552 1553 } 1554 1555 VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect( 1556 VkCommandBuffer commandBuffer, 1557 VkBuffer buffer, 1558 VkDeviceSize offset) 1559 { 1560 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1561 { 1562 std::lock_guard<std::mutex> lock(global_lock); 1563 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 1564 } 1565 dev_data->device_dispatch_table->CmdDispatchIndirect(commandBuffer, buffer, offset); 1566 1567 } 1568 1569 VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer( 1570 VkCommandBuffer commandBuffer, 1571 VkBuffer srcBuffer, 1572 VkBuffer dstBuffer, 1573 uint32_t regionCount, 1574 const VkBufferCopy* pRegions) 1575 { 1576 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1577 { 1578 std::lock_guard<std::mutex> lock(global_lock); 1579 srcBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcBuffer)]; 1580 dstBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstBuffer)]; 1581 } 1582 dev_data->device_dispatch_table->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); 1583 1584 } 1585 1586 VKAPI_ATTR void VKAPI_CALL CmdCopyImage( 1587 VkCommandBuffer commandBuffer, 1588 VkImage srcImage, 1589 VkImageLayout srcImageLayout, 1590 VkImage dstImage, 1591 VkImageLayout dstImageLayout, 1592 uint32_t regionCount, 1593 const VkImageCopy* pRegions) 1594 { 1595 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1596 { 1597 std::lock_guard<std::mutex> lock(global_lock); 1598 srcImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcImage)]; 1599 dstImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstImage)]; 1600 } 1601 dev_data->device_dispatch_table->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 1602 1603 } 1604 1605 VKAPI_ATTR void VKAPI_CALL CmdBlitImage( 1606 VkCommandBuffer commandBuffer, 1607 VkImage srcImage, 1608 VkImageLayout srcImageLayout, 1609 VkImage dstImage, 1610 VkImageLayout dstImageLayout, 1611 uint32_t regionCount, 1612 const VkImageBlit* pRegions, 1613 VkFilter filter) 1614 { 1615 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1616 { 1617 std::lock_guard<std::mutex> lock(global_lock); 1618 srcImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcImage)]; 1619 dstImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstImage)]; 1620 } 1621 dev_data->device_dispatch_table->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); 1622 1623 } 1624 1625 VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage( 1626 VkCommandBuffer commandBuffer, 1627 VkBuffer srcBuffer, 1628 VkImage dstImage, 1629 VkImageLayout dstImageLayout, 1630 uint32_t regionCount, 1631 const VkBufferImageCopy* pRegions) 1632 { 1633 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1634 { 1635 std::lock_guard<std::mutex> lock(global_lock); 1636 srcBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcBuffer)]; 1637 dstImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstImage)]; 1638 } 1639 dev_data->device_dispatch_table->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); 1640 1641 } 1642 1643 VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer( 1644 VkCommandBuffer commandBuffer, 1645 VkImage srcImage, 1646 VkImageLayout srcImageLayout, 1647 VkBuffer dstBuffer, 1648 uint32_t regionCount, 1649 const VkBufferImageCopy* pRegions) 1650 { 1651 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1652 { 1653 std::lock_guard<std::mutex> lock(global_lock); 1654 srcImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcImage)]; 1655 dstBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstBuffer)]; 1656 } 1657 dev_data->device_dispatch_table->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); 1658 1659 } 1660 1661 VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer( 1662 VkCommandBuffer commandBuffer, 1663 VkBuffer dstBuffer, 1664 VkDeviceSize dstOffset, 1665 VkDeviceSize dataSize, 1666 const void* pData) 1667 { 1668 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1669 { 1670 std::lock_guard<std::mutex> lock(global_lock); 1671 dstBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstBuffer)]; 1672 } 1673 dev_data->device_dispatch_table->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); 1674 1675 } 1676 1677 VKAPI_ATTR void VKAPI_CALL CmdFillBuffer( 1678 VkCommandBuffer commandBuffer, 1679 VkBuffer dstBuffer, 1680 VkDeviceSize dstOffset, 1681 VkDeviceSize size, 1682 uint32_t data) 1683 { 1684 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1685 { 1686 std::lock_guard<std::mutex> lock(global_lock); 1687 dstBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstBuffer)]; 1688 } 1689 dev_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); 1690 1691 } 1692 1693 VKAPI_ATTR void VKAPI_CALL CmdClearColorImage( 1694 VkCommandBuffer commandBuffer, 1695 VkImage image, 1696 VkImageLayout imageLayout, 1697 const VkClearColorValue* pColor, 1698 uint32_t rangeCount, 1699 const VkImageSubresourceRange* pRanges) 1700 { 1701 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1702 { 1703 std::lock_guard<std::mutex> lock(global_lock); 1704 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 1705 } 1706 dev_data->device_dispatch_table->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); 1707 1708 } 1709 1710 VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage( 1711 VkCommandBuffer commandBuffer, 1712 VkImage image, 1713 VkImageLayout imageLayout, 1714 const VkClearDepthStencilValue* pDepthStencil, 1715 uint32_t rangeCount, 1716 const VkImageSubresourceRange* pRanges) 1717 { 1718 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1719 { 1720 std::lock_guard<std::mutex> lock(global_lock); 1721 image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(image)]; 1722 } 1723 dev_data->device_dispatch_table->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); 1724 1725 } 1726 1727 VKAPI_ATTR void VKAPI_CALL CmdResolveImage( 1728 VkCommandBuffer commandBuffer, 1729 VkImage srcImage, 1730 VkImageLayout srcImageLayout, 1731 VkImage dstImage, 1732 VkImageLayout dstImageLayout, 1733 uint32_t regionCount, 1734 const VkImageResolve* pRegions) 1735 { 1736 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1737 { 1738 std::lock_guard<std::mutex> lock(global_lock); 1739 srcImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(srcImage)]; 1740 dstImage = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstImage)]; 1741 } 1742 dev_data->device_dispatch_table->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 1743 1744 } 1745 1746 VKAPI_ATTR void VKAPI_CALL CmdSetEvent( 1747 VkCommandBuffer commandBuffer, 1748 VkEvent event, 1749 VkPipelineStageFlags stageMask) 1750 { 1751 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1752 { 1753 std::lock_guard<std::mutex> lock(global_lock); 1754 event = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(event)]; 1755 } 1756 dev_data->device_dispatch_table->CmdSetEvent(commandBuffer, event, stageMask); 1757 1758 } 1759 1760 VKAPI_ATTR void VKAPI_CALL CmdResetEvent( 1761 VkCommandBuffer commandBuffer, 1762 VkEvent event, 1763 VkPipelineStageFlags stageMask) 1764 { 1765 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1766 { 1767 std::lock_guard<std::mutex> lock(global_lock); 1768 event = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(event)]; 1769 } 1770 dev_data->device_dispatch_table->CmdResetEvent(commandBuffer, event, stageMask); 1771 1772 } 1773 1774 VKAPI_ATTR void VKAPI_CALL CmdWaitEvents( 1775 VkCommandBuffer commandBuffer, 1776 uint32_t eventCount, 1777 const VkEvent* pEvents, 1778 VkPipelineStageFlags srcStageMask, 1779 VkPipelineStageFlags dstStageMask, 1780 uint32_t memoryBarrierCount, 1781 const VkMemoryBarrier* pMemoryBarriers, 1782 uint32_t bufferMemoryBarrierCount, 1783 const VkBufferMemoryBarrier* pBufferMemoryBarriers, 1784 uint32_t imageMemoryBarrierCount, 1785 const VkImageMemoryBarrier* pImageMemoryBarriers) 1786 { 1787 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1788 VkEvent *local_pEvents = NULL; 1789 safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL; 1790 safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL; 1791 { 1792 std::lock_guard<std::mutex> lock(global_lock); 1793 if (pEvents) { 1794 local_pEvents = new VkEvent[eventCount]; 1795 for (uint32_t index0 = 0; index0 < eventCount; ++index0) { 1796 local_pEvents[index0] = (VkEvent)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pEvents[index0])]; 1797 } 1798 } 1799 if (pBufferMemoryBarriers) { 1800 local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount]; 1801 for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) { 1802 local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]); 1803 if (pBufferMemoryBarriers[index0].buffer) { 1804 local_pBufferMemoryBarriers[index0].buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBufferMemoryBarriers[index0].buffer)]; 1805 } 1806 } 1807 } 1808 if (pImageMemoryBarriers) { 1809 local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount]; 1810 for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) { 1811 local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]); 1812 if (pImageMemoryBarriers[index0].image) { 1813 local_pImageMemoryBarriers[index0].image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pImageMemoryBarriers[index0].image)]; 1814 } 1815 } 1816 } 1817 } 1818 dev_data->device_dispatch_table->CmdWaitEvents(commandBuffer, eventCount, (const VkEvent*)local_pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers); 1819 if (local_pEvents) 1820 delete[] local_pEvents; 1821 if (local_pBufferMemoryBarriers) 1822 delete[] local_pBufferMemoryBarriers; 1823 if (local_pImageMemoryBarriers) 1824 delete[] local_pImageMemoryBarriers; 1825 } 1826 1827 VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier( 1828 VkCommandBuffer commandBuffer, 1829 VkPipelineStageFlags srcStageMask, 1830 VkPipelineStageFlags dstStageMask, 1831 VkDependencyFlags dependencyFlags, 1832 uint32_t memoryBarrierCount, 1833 const VkMemoryBarrier* pMemoryBarriers, 1834 uint32_t bufferMemoryBarrierCount, 1835 const VkBufferMemoryBarrier* pBufferMemoryBarriers, 1836 uint32_t imageMemoryBarrierCount, 1837 const VkImageMemoryBarrier* pImageMemoryBarriers) 1838 { 1839 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1840 safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL; 1841 safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL; 1842 { 1843 std::lock_guard<std::mutex> lock(global_lock); 1844 if (pBufferMemoryBarriers) { 1845 local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount]; 1846 for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) { 1847 local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]); 1848 if (pBufferMemoryBarriers[index0].buffer) { 1849 local_pBufferMemoryBarriers[index0].buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pBufferMemoryBarriers[index0].buffer)]; 1850 } 1851 } 1852 } 1853 if (pImageMemoryBarriers) { 1854 local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount]; 1855 for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) { 1856 local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]); 1857 if (pImageMemoryBarriers[index0].image) { 1858 local_pImageMemoryBarriers[index0].image = (VkImage)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pImageMemoryBarriers[index0].image)]; 1859 } 1860 } 1861 } 1862 } 1863 dev_data->device_dispatch_table->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers); 1864 if (local_pBufferMemoryBarriers) 1865 delete[] local_pBufferMemoryBarriers; 1866 if (local_pImageMemoryBarriers) 1867 delete[] local_pImageMemoryBarriers; 1868 } 1869 1870 VKAPI_ATTR void VKAPI_CALL CmdBeginQuery( 1871 VkCommandBuffer commandBuffer, 1872 VkQueryPool queryPool, 1873 uint32_t query, 1874 VkQueryControlFlags flags) 1875 { 1876 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1877 { 1878 std::lock_guard<std::mutex> lock(global_lock); 1879 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 1880 } 1881 dev_data->device_dispatch_table->CmdBeginQuery(commandBuffer, queryPool, query, flags); 1882 1883 } 1884 1885 VKAPI_ATTR void VKAPI_CALL CmdEndQuery( 1886 VkCommandBuffer commandBuffer, 1887 VkQueryPool queryPool, 1888 uint32_t query) 1889 { 1890 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1891 { 1892 std::lock_guard<std::mutex> lock(global_lock); 1893 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 1894 } 1895 dev_data->device_dispatch_table->CmdEndQuery(commandBuffer, queryPool, query); 1896 1897 } 1898 1899 VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool( 1900 VkCommandBuffer commandBuffer, 1901 VkQueryPool queryPool, 1902 uint32_t firstQuery, 1903 uint32_t queryCount) 1904 { 1905 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1906 { 1907 std::lock_guard<std::mutex> lock(global_lock); 1908 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 1909 } 1910 dev_data->device_dispatch_table->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); 1911 1912 } 1913 1914 VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp( 1915 VkCommandBuffer commandBuffer, 1916 VkPipelineStageFlagBits pipelineStage, 1917 VkQueryPool queryPool, 1918 uint32_t query) 1919 { 1920 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1921 { 1922 std::lock_guard<std::mutex> lock(global_lock); 1923 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 1924 } 1925 dev_data->device_dispatch_table->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 1926 1927 } 1928 1929 VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults( 1930 VkCommandBuffer commandBuffer, 1931 VkQueryPool queryPool, 1932 uint32_t firstQuery, 1933 uint32_t queryCount, 1934 VkBuffer dstBuffer, 1935 VkDeviceSize dstOffset, 1936 VkDeviceSize stride, 1937 VkQueryResultFlags flags) 1938 { 1939 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1940 { 1941 std::lock_guard<std::mutex> lock(global_lock); 1942 queryPool = (VkQueryPool)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(queryPool)]; 1943 dstBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(dstBuffer)]; 1944 } 1945 dev_data->device_dispatch_table->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); 1946 1947 } 1948 1949 VKAPI_ATTR void VKAPI_CALL CmdPushConstants( 1950 VkCommandBuffer commandBuffer, 1951 VkPipelineLayout layout, 1952 VkShaderStageFlags stageFlags, 1953 uint32_t offset, 1954 uint32_t size, 1955 const void* pValues) 1956 { 1957 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1958 { 1959 std::lock_guard<std::mutex> lock(global_lock); 1960 layout = (VkPipelineLayout)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(layout)]; 1961 } 1962 dev_data->device_dispatch_table->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); 1963 1964 } 1965 1966 VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass( 1967 VkCommandBuffer commandBuffer, 1968 const VkRenderPassBeginInfo* pRenderPassBegin, 1969 VkSubpassContents contents) 1970 { 1971 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 1972 safe_VkRenderPassBeginInfo *local_pRenderPassBegin = NULL; 1973 { 1974 std::lock_guard<std::mutex> lock(global_lock); 1975 if (pRenderPassBegin) { 1976 local_pRenderPassBegin = new safe_VkRenderPassBeginInfo(pRenderPassBegin); 1977 if (pRenderPassBegin->renderPass) { 1978 local_pRenderPassBegin->renderPass = (VkRenderPass)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pRenderPassBegin->renderPass)]; 1979 } 1980 if (pRenderPassBegin->framebuffer) { 1981 local_pRenderPassBegin->framebuffer = (VkFramebuffer)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pRenderPassBegin->framebuffer)]; 1982 } 1983 } 1984 } 1985 dev_data->device_dispatch_table->CmdBeginRenderPass(commandBuffer, (const VkRenderPassBeginInfo*)local_pRenderPassBegin, contents); 1986 if (local_pRenderPassBegin) 1987 delete local_pRenderPassBegin; 1988 } 1989 1990 1991 VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR( 1992 VkInstance instance, 1993 VkSurfaceKHR surface, 1994 const VkAllocationCallbacks* pAllocator) 1995 { 1996 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 1997 std::unique_lock<std::mutex> lock(global_lock); 1998 uint64_t surface_id = reinterpret_cast<uint64_t &>(surface); 1999 surface = (VkSurfaceKHR)dev_data->unique_id_mapping[surface_id]; 2000 dev_data->unique_id_mapping.erase(surface_id); 2001 lock.unlock(); 2002 dev_data->instance_dispatch_table->DestroySurfaceKHR(instance, surface, pAllocator); 2003 2004 } 2005 2006 VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR( 2007 VkPhysicalDevice physicalDevice, 2008 uint32_t queueFamilyIndex, 2009 VkSurfaceKHR surface, 2010 VkBool32* pSupported) 2011 { 2012 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2013 { 2014 std::lock_guard<std::mutex> lock(global_lock); 2015 surface = (VkSurfaceKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(surface)]; 2016 } 2017 VkResult result = dev_data->instance_dispatch_table->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); 2018 2019 return result; 2020 } 2021 2022 VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR( 2023 VkPhysicalDevice physicalDevice, 2024 VkSurfaceKHR surface, 2025 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) 2026 { 2027 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2028 { 2029 std::lock_guard<std::mutex> lock(global_lock); 2030 surface = (VkSurfaceKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(surface)]; 2031 } 2032 VkResult result = dev_data->instance_dispatch_table->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); 2033 2034 return result; 2035 } 2036 2037 VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR( 2038 VkPhysicalDevice physicalDevice, 2039 VkSurfaceKHR surface, 2040 uint32_t* pSurfaceFormatCount, 2041 VkSurfaceFormatKHR* pSurfaceFormats) 2042 { 2043 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2044 { 2045 std::lock_guard<std::mutex> lock(global_lock); 2046 surface = (VkSurfaceKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(surface)]; 2047 } 2048 VkResult result = dev_data->instance_dispatch_table->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); 2049 2050 return result; 2051 } 2052 2053 VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR( 2054 VkPhysicalDevice physicalDevice, 2055 VkSurfaceKHR surface, 2056 uint32_t* pPresentModeCount, 2057 VkPresentModeKHR* pPresentModes) 2058 { 2059 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2060 { 2061 std::lock_guard<std::mutex> lock(global_lock); 2062 surface = (VkSurfaceKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(surface)]; 2063 } 2064 VkResult result = dev_data->instance_dispatch_table->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); 2065 2066 return result; 2067 } 2068 2069 2070 // Declare only 2071 VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR( 2072 VkDevice device, 2073 const VkSwapchainCreateInfoKHR* pCreateInfo, 2074 const VkAllocationCallbacks* pAllocator, 2075 VkSwapchainKHR* pSwapchain); 2076 2077 VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR( 2078 VkDevice device, 2079 VkSwapchainKHR swapchain, 2080 const VkAllocationCallbacks* pAllocator) 2081 { 2082 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2083 std::unique_lock<std::mutex> lock(global_lock); 2084 uint64_t swapchain_id = reinterpret_cast<uint64_t &>(swapchain); 2085 swapchain = (VkSwapchainKHR)dev_data->unique_id_mapping[swapchain_id]; 2086 dev_data->unique_id_mapping.erase(swapchain_id); 2087 lock.unlock(); 2088 dev_data->device_dispatch_table->DestroySwapchainKHR(device, swapchain, pAllocator); 2089 2090 } 2091 2092 // Declare only 2093 VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR( 2094 VkDevice device, 2095 VkSwapchainKHR swapchain, 2096 uint32_t* pSwapchainImageCount, 2097 VkImage* pSwapchainImages); 2098 2099 VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR( 2100 VkDevice device, 2101 VkSwapchainKHR swapchain, 2102 uint64_t timeout, 2103 VkSemaphore semaphore, 2104 VkFence fence, 2105 uint32_t* pImageIndex) 2106 { 2107 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2108 { 2109 std::lock_guard<std::mutex> lock(global_lock); 2110 swapchain = (VkSwapchainKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(swapchain)]; 2111 semaphore = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(semaphore)]; 2112 fence = (VkFence)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(fence)]; 2113 } 2114 VkResult result = dev_data->device_dispatch_table->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); 2115 2116 return result; 2117 } 2118 2119 VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR( 2120 VkQueue queue, 2121 const VkPresentInfoKHR* pPresentInfo) 2122 { 2123 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 2124 safe_VkPresentInfoKHR *local_pPresentInfo = NULL; 2125 { 2126 std::lock_guard<std::mutex> lock(global_lock); 2127 if (pPresentInfo) { 2128 local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo); 2129 if (local_pPresentInfo->pWaitSemaphores) { 2130 for (uint32_t index1 = 0; index1 < local_pPresentInfo->waitSemaphoreCount; ++index1) { 2131 local_pPresentInfo->pWaitSemaphores[index1] = (VkSemaphore)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pPresentInfo->pWaitSemaphores[index1])]; 2132 } 2133 } 2134 if (local_pPresentInfo->pSwapchains) { 2135 for (uint32_t index1 = 0; index1 < local_pPresentInfo->swapchainCount; ++index1) { 2136 local_pPresentInfo->pSwapchains[index1] = (VkSwapchainKHR)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(local_pPresentInfo->pSwapchains[index1])]; 2137 } 2138 } 2139 } 2140 } 2141 VkResult result = dev_data->device_dispatch_table->QueuePresentKHR(queue, (const VkPresentInfoKHR*)local_pPresentInfo); 2142 if (local_pPresentInfo) 2143 delete local_pPresentInfo; 2144 return result; 2145 } 2146 2147 2148 VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR( 2149 VkPhysicalDevice physicalDevice, 2150 VkDisplayKHR display, 2151 const VkDisplayModeCreateInfoKHR* pCreateInfo, 2152 const VkAllocationCallbacks* pAllocator, 2153 VkDisplayModeKHR* pMode) 2154 { 2155 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2156 VkResult result = dev_data->instance_dispatch_table->CreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode); 2157 if (VK_SUCCESS == result) { 2158 std::lock_guard<std::mutex> lock(global_lock); 2159 uint64_t unique_id = global_unique_id++; 2160 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pMode); 2161 *pMode = reinterpret_cast<VkDisplayModeKHR&>(unique_id); 2162 } 2163 return result; 2164 } 2165 2166 VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR( 2167 VkPhysicalDevice physicalDevice, 2168 VkDisplayModeKHR mode, 2169 uint32_t planeIndex, 2170 VkDisplayPlaneCapabilitiesKHR* pCapabilities) 2171 { 2172 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2173 { 2174 std::lock_guard<std::mutex> lock(global_lock); 2175 mode = (VkDisplayModeKHR)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(mode)]; 2176 } 2177 VkResult result = dev_data->instance_dispatch_table->GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities); 2178 2179 return result; 2180 } 2181 2182 VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR( 2183 VkInstance instance, 2184 const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, 2185 const VkAllocationCallbacks* pAllocator, 2186 VkSurfaceKHR* pSurface) 2187 { 2188 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2189 safe_VkDisplaySurfaceCreateInfoKHR *local_pCreateInfo = NULL; 2190 { 2191 std::lock_guard<std::mutex> lock(global_lock); 2192 if (pCreateInfo) { 2193 local_pCreateInfo = new safe_VkDisplaySurfaceCreateInfoKHR(pCreateInfo); 2194 if (pCreateInfo->displayMode) { 2195 local_pCreateInfo->displayMode = (VkDisplayModeKHR)dev_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->displayMode)]; 2196 } 2197 } 2198 } 2199 VkResult result = dev_data->instance_dispatch_table->CreateDisplayPlaneSurfaceKHR(instance, (const VkDisplaySurfaceCreateInfoKHR*)local_pCreateInfo, pAllocator, pSurface); 2200 if (local_pCreateInfo) 2201 delete local_pCreateInfo; 2202 if (VK_SUCCESS == result) { 2203 std::lock_guard<std::mutex> lock(global_lock); 2204 uint64_t unique_id = global_unique_id++; 2205 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2206 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2207 } 2208 return result; 2209 } 2210 2211 2212 VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR( 2213 VkDevice device, 2214 uint32_t swapchainCount, 2215 const VkSwapchainCreateInfoKHR* pCreateInfos, 2216 const VkAllocationCallbacks* pAllocator, 2217 VkSwapchainKHR* pSwapchains) 2218 { 2219 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2220 VkResult result = dev_data->device_dispatch_table->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); 2221 if (VK_SUCCESS == result) { 2222 std::lock_guard<std::mutex> lock(global_lock); 2223 for (uint32_t index0 = 0; index0 < swapchainCount; index0++) { 2224 uint64_t unique_id = global_unique_id++; 2225 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pSwapchains[index0]); 2226 pSwapchains[index0] = reinterpret_cast<VkSwapchainKHR&>(unique_id); 2227 } 2228 } 2229 return result; 2230 } 2231 2232 #ifdef VK_USE_PLATFORM_XLIB_KHR 2233 2234 VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR( 2235 VkInstance instance, 2236 const VkXlibSurfaceCreateInfoKHR* pCreateInfo, 2237 const VkAllocationCallbacks* pAllocator, 2238 VkSurfaceKHR* pSurface) 2239 { 2240 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2241 VkResult result = dev_data->instance_dispatch_table->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2242 if (VK_SUCCESS == result) { 2243 std::lock_guard<std::mutex> lock(global_lock); 2244 uint64_t unique_id = global_unique_id++; 2245 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2246 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2247 } 2248 return result; 2249 } 2250 #endif // VK_USE_PLATFORM_XLIB_KHR 2251 2252 #ifdef VK_USE_PLATFORM_XCB_KHR 2253 2254 VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR( 2255 VkInstance instance, 2256 const VkXcbSurfaceCreateInfoKHR* pCreateInfo, 2257 const VkAllocationCallbacks* pAllocator, 2258 VkSurfaceKHR* pSurface) 2259 { 2260 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2261 VkResult result = dev_data->instance_dispatch_table->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2262 if (VK_SUCCESS == result) { 2263 std::lock_guard<std::mutex> lock(global_lock); 2264 uint64_t unique_id = global_unique_id++; 2265 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2266 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2267 } 2268 return result; 2269 } 2270 #endif // VK_USE_PLATFORM_XCB_KHR 2271 2272 #ifdef VK_USE_PLATFORM_WAYLAND_KHR 2273 2274 VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR( 2275 VkInstance instance, 2276 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, 2277 const VkAllocationCallbacks* pAllocator, 2278 VkSurfaceKHR* pSurface) 2279 { 2280 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2281 VkResult result = dev_data->instance_dispatch_table->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2282 if (VK_SUCCESS == result) { 2283 std::lock_guard<std::mutex> lock(global_lock); 2284 uint64_t unique_id = global_unique_id++; 2285 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2286 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2287 } 2288 return result; 2289 } 2290 #endif // VK_USE_PLATFORM_WAYLAND_KHR 2291 2292 #ifdef VK_USE_PLATFORM_MIR_KHR 2293 2294 VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR( 2295 VkInstance instance, 2296 const VkMirSurfaceCreateInfoKHR* pCreateInfo, 2297 const VkAllocationCallbacks* pAllocator, 2298 VkSurfaceKHR* pSurface) 2299 { 2300 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2301 VkResult result = dev_data->instance_dispatch_table->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2302 if (VK_SUCCESS == result) { 2303 std::lock_guard<std::mutex> lock(global_lock); 2304 uint64_t unique_id = global_unique_id++; 2305 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2306 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2307 } 2308 return result; 2309 } 2310 #endif // VK_USE_PLATFORM_MIR_KHR 2311 2312 #ifdef VK_USE_PLATFORM_ANDROID_KHR 2313 2314 VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR( 2315 VkInstance instance, 2316 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, 2317 const VkAllocationCallbacks* pAllocator, 2318 VkSurfaceKHR* pSurface) 2319 { 2320 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2321 VkResult result = dev_data->instance_dispatch_table->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2322 if (VK_SUCCESS == result) { 2323 std::lock_guard<std::mutex> lock(global_lock); 2324 uint64_t unique_id = global_unique_id++; 2325 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2326 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2327 } 2328 return result; 2329 } 2330 #endif // VK_USE_PLATFORM_ANDROID_KHR 2331 2332 #ifdef VK_USE_PLATFORM_WIN32_KHR 2333 2334 VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR( 2335 VkInstance instance, 2336 const VkWin32SurfaceCreateInfoKHR* pCreateInfo, 2337 const VkAllocationCallbacks* pAllocator, 2338 VkSurfaceKHR* pSurface) 2339 { 2340 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2341 VkResult result = dev_data->instance_dispatch_table->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2342 if (VK_SUCCESS == result) { 2343 std::lock_guard<std::mutex> lock(global_lock); 2344 uint64_t unique_id = global_unique_id++; 2345 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSurface); 2346 *pSurface = reinterpret_cast<VkSurfaceKHR&>(unique_id); 2347 } 2348 return result; 2349 } 2350 #endif // VK_USE_PLATFORM_WIN32_KHR 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD( 2374 VkCommandBuffer commandBuffer, 2375 VkBuffer buffer, 2376 VkDeviceSize offset, 2377 VkBuffer countBuffer, 2378 VkDeviceSize countBufferOffset, 2379 uint32_t maxDrawCount, 2380 uint32_t stride) 2381 { 2382 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 2383 { 2384 std::lock_guard<std::mutex> lock(global_lock); 2385 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 2386 countBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(countBuffer)]; 2387 } 2388 dev_data->device_dispatch_table->CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); 2389 2390 } 2391 2392 VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD( 2393 VkCommandBuffer commandBuffer, 2394 VkBuffer buffer, 2395 VkDeviceSize offset, 2396 VkBuffer countBuffer, 2397 VkDeviceSize countBufferOffset, 2398 uint32_t maxDrawCount, 2399 uint32_t stride) 2400 { 2401 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 2402 { 2403 std::lock_guard<std::mutex> lock(global_lock); 2404 buffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(buffer)]; 2405 countBuffer = (VkBuffer)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(countBuffer)]; 2406 } 2407 dev_data->device_dispatch_table->CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); 2408 2409 } 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 #ifdef VK_USE_PLATFORM_WIN32_KHR 2424 2425 VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV( 2426 VkDevice device, 2427 VkDeviceMemory memory, 2428 VkExternalMemoryHandleTypeFlagsNV handleType, 2429 HANDLE* pHandle) 2430 { 2431 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2432 { 2433 std::lock_guard<std::mutex> lock(global_lock); 2434 memory = (VkDeviceMemory)dev_data->unique_id_mapping[reinterpret_cast<uint64_t &>(memory)]; 2435 } 2436 VkResult result = dev_data->device_dispatch_table->GetMemoryWin32HandleNV(device, memory, handleType, pHandle); 2437 2438 return result; 2439 } 2440 #endif // VK_USE_PLATFORM_WIN32_KHR 2441 2442 #ifdef VK_USE_PLATFORM_WIN32_KHR 2443 2444 #endif // VK_USE_PLATFORM_WIN32_KHR 2445 2446 2447 2448 // intercepts 2449 struct { const char* name; PFN_vkVoidFunction pFunc;} procmap[] = { 2450 {"vkCreateInstance", reinterpret_cast<PFN_vkVoidFunction>(CreateInstance)}, 2451 {"vkDestroyInstance", reinterpret_cast<PFN_vkVoidFunction>(DestroyInstance)}, 2452 {"vkGetInstanceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetInstanceProcAddr)}, 2453 {"vkGetDeviceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr)}, 2454 {"vkCreateDevice", reinterpret_cast<PFN_vkVoidFunction>(CreateDevice)}, 2455 {"vkDestroyDevice", reinterpret_cast<PFN_vkVoidFunction>(DestroyDevice)}, 2456 {"vkEnumerateInstanceExtensionProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceExtensionProperties)}, 2457 {"vkEnumerateInstanceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceLayerProperties)}, 2458 {"vkEnumerateDeviceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateDeviceLayerProperties)}, 2459 {"vkQueueSubmit", reinterpret_cast<PFN_vkVoidFunction>(QueueSubmit)}, 2460 {"vkAllocateMemory", reinterpret_cast<PFN_vkVoidFunction>(AllocateMemory)}, 2461 {"vkFreeMemory", reinterpret_cast<PFN_vkVoidFunction>(FreeMemory)}, 2462 {"vkMapMemory", reinterpret_cast<PFN_vkVoidFunction>(MapMemory)}, 2463 {"vkUnmapMemory", reinterpret_cast<PFN_vkVoidFunction>(UnmapMemory)}, 2464 {"vkFlushMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(FlushMappedMemoryRanges)}, 2465 {"vkInvalidateMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(InvalidateMappedMemoryRanges)}, 2466 {"vkGetDeviceMemoryCommitment", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceMemoryCommitment)}, 2467 {"vkBindBufferMemory", reinterpret_cast<PFN_vkVoidFunction>(BindBufferMemory)}, 2468 {"vkBindImageMemory", reinterpret_cast<PFN_vkVoidFunction>(BindImageMemory)}, 2469 {"vkGetBufferMemoryRequirements", reinterpret_cast<PFN_vkVoidFunction>(GetBufferMemoryRequirements)}, 2470 {"vkGetImageMemoryRequirements", reinterpret_cast<PFN_vkVoidFunction>(GetImageMemoryRequirements)}, 2471 {"vkGetImageSparseMemoryRequirements", reinterpret_cast<PFN_vkVoidFunction>(GetImageSparseMemoryRequirements)}, 2472 {"vkQueueBindSparse", reinterpret_cast<PFN_vkVoidFunction>(QueueBindSparse)}, 2473 {"vkCreateFence", reinterpret_cast<PFN_vkVoidFunction>(CreateFence)}, 2474 {"vkDestroyFence", reinterpret_cast<PFN_vkVoidFunction>(DestroyFence)}, 2475 {"vkResetFences", reinterpret_cast<PFN_vkVoidFunction>(ResetFences)}, 2476 {"vkGetFenceStatus", reinterpret_cast<PFN_vkVoidFunction>(GetFenceStatus)}, 2477 {"vkWaitForFences", reinterpret_cast<PFN_vkVoidFunction>(WaitForFences)}, 2478 {"vkCreateSemaphore", reinterpret_cast<PFN_vkVoidFunction>(CreateSemaphore)}, 2479 {"vkDestroySemaphore", reinterpret_cast<PFN_vkVoidFunction>(DestroySemaphore)}, 2480 {"vkCreateEvent", reinterpret_cast<PFN_vkVoidFunction>(CreateEvent)}, 2481 {"vkDestroyEvent", reinterpret_cast<PFN_vkVoidFunction>(DestroyEvent)}, 2482 {"vkGetEventStatus", reinterpret_cast<PFN_vkVoidFunction>(GetEventStatus)}, 2483 {"vkSetEvent", reinterpret_cast<PFN_vkVoidFunction>(SetEvent)}, 2484 {"vkResetEvent", reinterpret_cast<PFN_vkVoidFunction>(ResetEvent)}, 2485 {"vkCreateQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CreateQueryPool)}, 2486 {"vkDestroyQueryPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyQueryPool)}, 2487 {"vkGetQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(GetQueryPoolResults)}, 2488 {"vkCreateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateBuffer)}, 2489 {"vkDestroyBuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyBuffer)}, 2490 {"vkCreateBufferView", reinterpret_cast<PFN_vkVoidFunction>(CreateBufferView)}, 2491 {"vkDestroyBufferView", reinterpret_cast<PFN_vkVoidFunction>(DestroyBufferView)}, 2492 {"vkCreateImage", reinterpret_cast<PFN_vkVoidFunction>(CreateImage)}, 2493 {"vkDestroyImage", reinterpret_cast<PFN_vkVoidFunction>(DestroyImage)}, 2494 {"vkGetImageSubresourceLayout", reinterpret_cast<PFN_vkVoidFunction>(GetImageSubresourceLayout)}, 2495 {"vkCreateImageView", reinterpret_cast<PFN_vkVoidFunction>(CreateImageView)}, 2496 {"vkDestroyImageView", reinterpret_cast<PFN_vkVoidFunction>(DestroyImageView)}, 2497 {"vkCreateShaderModule", reinterpret_cast<PFN_vkVoidFunction>(CreateShaderModule)}, 2498 {"vkDestroyShaderModule", reinterpret_cast<PFN_vkVoidFunction>(DestroyShaderModule)}, 2499 {"vkCreatePipelineCache", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineCache)}, 2500 {"vkDestroyPipelineCache", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineCache)}, 2501 {"vkGetPipelineCacheData", reinterpret_cast<PFN_vkVoidFunction>(GetPipelineCacheData)}, 2502 {"vkMergePipelineCaches", reinterpret_cast<PFN_vkVoidFunction>(MergePipelineCaches)}, 2503 {"vkCreateGraphicsPipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateGraphicsPipelines)}, 2504 {"vkCreateComputePipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateComputePipelines)}, 2505 {"vkDestroyPipeline", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipeline)}, 2506 {"vkCreatePipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineLayout)}, 2507 {"vkDestroyPipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineLayout)}, 2508 {"vkCreateSampler", reinterpret_cast<PFN_vkVoidFunction>(CreateSampler)}, 2509 {"vkDestroySampler", reinterpret_cast<PFN_vkVoidFunction>(DestroySampler)}, 2510 {"vkCreateDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorSetLayout)}, 2511 {"vkDestroyDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorSetLayout)}, 2512 {"vkCreateDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorPool)}, 2513 {"vkDestroyDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorPool)}, 2514 {"vkResetDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(ResetDescriptorPool)}, 2515 {"vkAllocateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(AllocateDescriptorSets)}, 2516 {"vkFreeDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(FreeDescriptorSets)}, 2517 {"vkUpdateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(UpdateDescriptorSets)}, 2518 {"vkCreateFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateFramebuffer)}, 2519 {"vkDestroyFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyFramebuffer)}, 2520 {"vkCreateRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CreateRenderPass)}, 2521 {"vkDestroyRenderPass", reinterpret_cast<PFN_vkVoidFunction>(DestroyRenderPass)}, 2522 {"vkGetRenderAreaGranularity", reinterpret_cast<PFN_vkVoidFunction>(GetRenderAreaGranularity)}, 2523 {"vkCreateCommandPool", reinterpret_cast<PFN_vkVoidFunction>(CreateCommandPool)}, 2524 {"vkDestroyCommandPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyCommandPool)}, 2525 {"vkResetCommandPool", reinterpret_cast<PFN_vkVoidFunction>(ResetCommandPool)}, 2526 {"vkAllocateCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(AllocateCommandBuffers)}, 2527 {"vkFreeCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(FreeCommandBuffers)}, 2528 {"vkBeginCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(BeginCommandBuffer)}, 2529 {"vkCmdBindPipeline", reinterpret_cast<PFN_vkVoidFunction>(CmdBindPipeline)}, 2530 {"vkCmdBindDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(CmdBindDescriptorSets)}, 2531 {"vkCmdBindIndexBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdBindIndexBuffer)}, 2532 {"vkCmdBindVertexBuffers", reinterpret_cast<PFN_vkVoidFunction>(CmdBindVertexBuffers)}, 2533 {"vkCmdDrawIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndirect)}, 2534 {"vkCmdDrawIndexedIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexedIndirect)}, 2535 {"vkCmdDispatchIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDispatchIndirect)}, 2536 {"vkCmdCopyBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBuffer)}, 2537 {"vkCmdCopyImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImage)}, 2538 {"vkCmdBlitImage", reinterpret_cast<PFN_vkVoidFunction>(CmdBlitImage)}, 2539 {"vkCmdCopyBufferToImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBufferToImage)}, 2540 {"vkCmdCopyImageToBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImageToBuffer)}, 2541 {"vkCmdUpdateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdUpdateBuffer)}, 2542 {"vkCmdFillBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdFillBuffer)}, 2543 {"vkCmdClearColorImage", reinterpret_cast<PFN_vkVoidFunction>(CmdClearColorImage)}, 2544 {"vkCmdClearDepthStencilImage", reinterpret_cast<PFN_vkVoidFunction>(CmdClearDepthStencilImage)}, 2545 {"vkCmdResolveImage", reinterpret_cast<PFN_vkVoidFunction>(CmdResolveImage)}, 2546 {"vkCmdSetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdSetEvent)}, 2547 {"vkCmdResetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdResetEvent)}, 2548 {"vkCmdWaitEvents", reinterpret_cast<PFN_vkVoidFunction>(CmdWaitEvents)}, 2549 {"vkCmdPipelineBarrier", reinterpret_cast<PFN_vkVoidFunction>(CmdPipelineBarrier)}, 2550 {"vkCmdBeginQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginQuery)}, 2551 {"vkCmdEndQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdEndQuery)}, 2552 {"vkCmdResetQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CmdResetQueryPool)}, 2553 {"vkCmdWriteTimestamp", reinterpret_cast<PFN_vkVoidFunction>(CmdWriteTimestamp)}, 2554 {"vkCmdCopyQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyQueryPoolResults)}, 2555 {"vkCmdPushConstants", reinterpret_cast<PFN_vkVoidFunction>(CmdPushConstants)}, 2556 {"vkCmdBeginRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginRenderPass)}, 2557 {"vkDestroySurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR)}, 2558 {"vkGetPhysicalDeviceSurfaceSupportKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR)}, 2559 {"vkGetPhysicalDeviceSurfaceCapabilitiesKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR)}, 2560 {"vkGetPhysicalDeviceSurfaceFormatsKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR)}, 2561 {"vkGetPhysicalDeviceSurfacePresentModesKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR)}, 2562 {"vkCreateSwapchainKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR)}, 2563 {"vkDestroySwapchainKHR", reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR)}, 2564 {"vkGetSwapchainImagesKHR", reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR)}, 2565 {"vkAcquireNextImageKHR", reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR)}, 2566 {"vkQueuePresentKHR", reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR)}, 2567 {"vkCreateDisplayModeKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateDisplayModeKHR)}, 2568 {"vkGetDisplayPlaneCapabilitiesKHR", reinterpret_cast<PFN_vkVoidFunction>(GetDisplayPlaneCapabilitiesKHR)}, 2569 {"vkCreateDisplayPlaneSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateDisplayPlaneSurfaceKHR)}, 2570 {"vkCreateSharedSwapchainsKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR)}, 2571 #ifdef VK_USE_PLATFORM_XLIB_KHR 2572 {"vkCreateXlibSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR)}, 2573 #endif 2574 #ifdef VK_USE_PLATFORM_XCB_KHR 2575 {"vkCreateXcbSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR)}, 2576 #endif 2577 #ifdef VK_USE_PLATFORM_WAYLAND_KHR 2578 {"vkCreateWaylandSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR)}, 2579 #endif 2580 #ifdef VK_USE_PLATFORM_MIR_KHR 2581 {"vkCreateMirSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR)}, 2582 #endif 2583 #ifdef VK_USE_PLATFORM_ANDROID_KHR 2584 {"vkCreateAndroidSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR)}, 2585 #endif 2586 #ifdef VK_USE_PLATFORM_WIN32_KHR 2587 {"vkCreateWin32SurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR)}, 2588 #endif 2589 {"vkCmdDrawIndirectCountAMD", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndirectCountAMD)}, 2590 {"vkCmdDrawIndexedIndirectCountAMD", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexedIndirectCountAMD)}, 2591 #ifdef VK_USE_PLATFORM_WIN32_KHR 2592 {"vkGetMemoryWin32HandleNV", reinterpret_cast<PFN_vkVoidFunction>(GetMemoryWin32HandleNV)}, 2593 #endif 2594 }; 2595 2596 2597 } // namespace unique_objects 2598