1 #!/usr/bin/python3 -i 2 # 3 # Copyright (c) 2015-2017 The Khronos Group Inc. 4 # Copyright (c) 2015-2017 Valve Corporation 5 # Copyright (c) 2015-2017 LunarG, Inc. 6 # Copyright (c) 2015-2017 Google Inc. 7 # 8 # Licensed under the Apache License, Version 2.0 (the "License"); 9 # you may not use this file except in compliance with the License. 10 # You may obtain a copy of the License at 11 # 12 # http://www.apache.org/licenses/LICENSE-2.0 13 # 14 # Unless required by applicable law or agreed to in writing, software 15 # distributed under the License is distributed on an "AS IS" BASIS, 16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 # See the License for the specific language governing permissions and 18 # limitations under the License. 19 # 20 # Author: Tobin Ehlis <tobine (at] google.com> 21 # 22 # This script generates a Mock ICD that intercepts almost all Vulkan 23 # functions. That layer is not intended to be useful or even compilable 24 # in its initial state. Rather it's intended to be a starting point that 25 # can be copied and customized to assist in creation of a new layer. 26 27 import os,re,sys 28 from generator import * 29 30 # Mock header code 31 HEADER_C_CODE = ''' 32 using mutex_t = std::mutex; 33 using lock_guard_t = std::lock_guard<mutex_t>; 34 using unique_lock_t = std::unique_lock<mutex_t>; 35 36 static mutex_t global_lock; 37 static uint64_t global_unique_handle = 1; 38 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5; 39 static uint32_t loader_interface_version = 0; 40 static bool negotiate_loader_icd_interface_called = false; 41 static void* CreateDispObjHandle() { 42 auto handle = new VK_LOADER_DATA; 43 set_loader_magic_value(handle); 44 return handle; 45 } 46 static void DestroyDispObjHandle(void* handle) { 47 delete reinterpret_cast<VK_LOADER_DATA*>(handle); 48 } 49 ''' 50 51 # Manual code at the top of the cpp source file 52 SOURCE_CPP_PREFIX = ''' 53 using std::unordered_map; 54 55 // Map device memory handle to any mapped allocations that we'll need to free on unmap 56 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map; 57 58 static VkPhysicalDevice physical_device = nullptr; 59 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map; 60 61 // TODO: Would like to codegen this but limits aren't in XML 62 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) { 63 limits->maxImageDimension1D = 4096; 64 limits->maxImageDimension2D = 4096; 65 limits->maxImageDimension3D = 256; 66 limits->maxImageDimensionCube = 4096; 67 limits->maxImageArrayLayers = 256; 68 limits->maxTexelBufferElements = 65536; 69 limits->maxUniformBufferRange = 16384; 70 limits->maxStorageBufferRange = 134217728; 71 limits->maxPushConstantsSize = 128; 72 limits->maxMemoryAllocationCount = 4096; 73 limits->maxSamplerAllocationCount = 4000; 74 limits->bufferImageGranularity = 1; 75 limits->sparseAddressSpaceSize = 2147483648; 76 limits->maxBoundDescriptorSets = 4; 77 limits->maxPerStageDescriptorSamplers = 16; 78 limits->maxPerStageDescriptorUniformBuffers = 12; 79 limits->maxPerStageDescriptorStorageBuffers = 4; 80 limits->maxPerStageDescriptorSampledImages = 16; 81 limits->maxPerStageDescriptorStorageImages = 4; 82 limits->maxPerStageDescriptorInputAttachments = 4; 83 limits->maxPerStageResources = 128^2; 84 limits->maxDescriptorSetSamplers = 96^8; 85 limits->maxDescriptorSetUniformBuffers = 72^8; 86 limits->maxDescriptorSetUniformBuffersDynamic = 8; 87 limits->maxDescriptorSetStorageBuffers = 24^8; 88 limits->maxDescriptorSetStorageBuffersDynamic = 4; 89 limits->maxDescriptorSetSampledImages = 96^8; 90 limits->maxDescriptorSetStorageImages = 24^8; 91 limits->maxDescriptorSetInputAttachments = 4; 92 limits->maxVertexInputAttributes = 16; 93 limits->maxVertexInputBindings = 16; 94 limits->maxVertexInputAttributeOffset = 2047; 95 limits->maxVertexInputBindingStride = 2048; 96 limits->maxVertexOutputComponents = 64; 97 limits->maxTessellationGenerationLevel = 64; 98 limits->maxTessellationPatchSize = 32; 99 limits->maxTessellationControlPerVertexInputComponents = 64; 100 limits->maxTessellationControlPerVertexOutputComponents = 64; 101 limits->maxTessellationControlPerPatchOutputComponents = 120; 102 limits->maxTessellationControlTotalOutputComponents = 2048; 103 limits->maxTessellationEvaluationInputComponents = 64; 104 limits->maxTessellationEvaluationOutputComponents = 64; 105 limits->maxGeometryShaderInvocations = 32; 106 limits->maxGeometryInputComponents = 64; 107 limits->maxGeometryOutputComponents = 64; 108 limits->maxGeometryOutputVertices = 256; 109 limits->maxGeometryTotalOutputComponents = 1024; 110 limits->maxFragmentInputComponents = 64; 111 limits->maxFragmentOutputAttachments = 4; 112 limits->maxFragmentDualSrcAttachments = 1; 113 limits->maxFragmentCombinedOutputResources = 4; 114 limits->maxComputeSharedMemorySize = 16384; 115 limits->maxComputeWorkGroupCount[0] = 65535; 116 limits->maxComputeWorkGroupCount[1] = 65535; 117 limits->maxComputeWorkGroupCount[2] = 65535; 118 limits->maxComputeWorkGroupInvocations = 128; 119 limits->maxComputeWorkGroupSize[0] = 128; 120 limits->maxComputeWorkGroupSize[1] = 128; 121 limits->maxComputeWorkGroupSize[2] = 64; 122 limits->subPixelPrecisionBits = 4; 123 limits->subTexelPrecisionBits = 4; 124 limits->mipmapPrecisionBits = 4; 125 limits->maxDrawIndexedIndexValue = (2^32) - 1; 126 limits->maxDrawIndirectCount = (2^16) - 1; 127 limits->maxSamplerLodBias = 2.0f; 128 limits->maxSamplerAnisotropy = 16; 129 limits->maxViewports = 16; 130 limits->maxViewportDimensions[0] = 4096; 131 limits->maxViewportDimensions[1] = 4096; 132 limits->viewportBoundsRange[0] = -8192; 133 limits->viewportBoundsRange[1] = 8191; 134 limits->viewportSubPixelBits = 0; 135 limits->minMemoryMapAlignment = 64; 136 limits->minTexelBufferOffsetAlignment = 16; 137 limits->minUniformBufferOffsetAlignment = 16; 138 limits->minStorageBufferOffsetAlignment = 16; 139 limits->minTexelOffset = -8; 140 limits->maxTexelOffset = 7; 141 limits->minTexelGatherOffset = -8; 142 limits->maxTexelGatherOffset = 7; 143 limits->minInterpolationOffset = 0.0f; 144 limits->maxInterpolationOffset = 0.5f; 145 limits->subPixelInterpolationOffsetBits = 4; 146 limits->maxFramebufferWidth = 4096; 147 limits->maxFramebufferHeight = 4096; 148 limits->maxFramebufferLayers = 256; 149 limits->framebufferColorSampleCounts = 0x7F; 150 limits->framebufferDepthSampleCounts = 0x7F; 151 limits->framebufferStencilSampleCounts = 0x7F; 152 limits->framebufferNoAttachmentsSampleCounts = 0x7F; 153 limits->maxColorAttachments = 4; 154 limits->sampledImageColorSampleCounts = 0x7F; 155 limits->sampledImageIntegerSampleCounts = 0x7F; 156 limits->sampledImageDepthSampleCounts = 0x7F; 157 limits->sampledImageStencilSampleCounts = 0x7F; 158 limits->storageImageSampleCounts = 0x7F; 159 limits->maxSampleMaskWords = 1; 160 limits->timestampComputeAndGraphics = VK_TRUE; 161 limits->timestampPeriod = 1; 162 limits->maxClipDistances = 8; 163 limits->maxCullDistances = 8; 164 limits->maxCombinedClipAndCullDistances = 8; 165 limits->discreteQueuePriorities = 2; 166 limits->pointSizeRange[0] = 1.0f; 167 limits->pointSizeRange[1] = 64.0f; 168 limits->lineWidthRange[0] = 1.0f; 169 limits->lineWidthRange[1] = 8.0f; 170 limits->pointSizeGranularity = 1.0f; 171 limits->lineWidthGranularity = 1.0f; 172 limits->strictLines = VK_TRUE; 173 limits->standardSampleLocations = VK_TRUE; 174 limits->optimalBufferCopyOffsetAlignment = 1; 175 limits->optimalBufferCopyRowPitchAlignment = 1; 176 limits->nonCoherentAtomSize = 256; 177 178 return *limits; 179 } 180 ''' 181 182 # Manual code at the end of the cpp source file 183 SOURCE_CPP_POSTFIX = ''' 184 185 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) { 186 // TODO: This function should only care about physical device functions and return nullptr for other functions 187 const auto &item = name_to_funcptr_map.find(funcName); 188 if (item != name_to_funcptr_map.end()) { 189 return reinterpret_cast<PFN_vkVoidFunction>(item->second); 190 } 191 // Mock should intercept all functions so if we get here just return null 192 return nullptr; 193 } 194 195 } // namespace vkmock 196 197 #if defined(__GNUC__) && __GNUC__ >= 4 198 #define EXPORT __attribute__((visibility("default"))) 199 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) 200 #define EXPORT __attribute__((visibility("default"))) 201 #else 202 #define EXPORT 203 #endif 204 205 extern "C" { 206 207 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) { 208 if (!vkmock::negotiate_loader_icd_interface_called) { 209 vkmock::loader_interface_version = 1; 210 } 211 return vkmock::GetInstanceProcAddr(instance, pName); 212 } 213 214 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) { 215 return vkmock::GetPhysicalDeviceProcAddr(instance, pName); 216 } 217 218 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) { 219 vkmock::negotiate_loader_icd_interface_called = true; 220 vkmock::loader_interface_version = *pSupportedVersion; 221 if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) { 222 *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION; 223 } 224 return VK_SUCCESS; 225 } 226 227 228 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( 229 VkInstance instance, 230 VkSurfaceKHR surface, 231 const VkAllocationCallbacks* pAllocator) 232 { 233 vkmock::DestroySurfaceKHR(instance, surface, pAllocator); 234 } 235 236 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( 237 VkPhysicalDevice physicalDevice, 238 uint32_t queueFamilyIndex, 239 VkSurfaceKHR surface, 240 VkBool32* pSupported) 241 { 242 return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); 243 } 244 245 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( 246 VkPhysicalDevice physicalDevice, 247 VkSurfaceKHR surface, 248 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) 249 { 250 return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); 251 } 252 253 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( 254 VkPhysicalDevice physicalDevice, 255 VkSurfaceKHR surface, 256 uint32_t* pSurfaceFormatCount, 257 VkSurfaceFormatKHR* pSurfaceFormats) 258 { 259 return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); 260 } 261 262 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( 263 VkPhysicalDevice physicalDevice, 264 VkSurfaceKHR surface, 265 uint32_t* pPresentModeCount, 266 VkPresentModeKHR* pPresentModes) 267 { 268 return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); 269 } 270 271 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( 272 VkInstance instance, 273 const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, 274 const VkAllocationCallbacks* pAllocator, 275 VkSurfaceKHR* pSurface) 276 { 277 return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 278 } 279 280 #ifdef VK_USE_PLATFORM_XLIB_KHR 281 282 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( 283 VkInstance instance, 284 const VkXlibSurfaceCreateInfoKHR* pCreateInfo, 285 const VkAllocationCallbacks* pAllocator, 286 VkSurfaceKHR* pSurface) 287 { 288 return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 289 } 290 #endif /* VK_USE_PLATFORM_XLIB_KHR */ 291 292 #ifdef VK_USE_PLATFORM_XCB_KHR 293 294 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( 295 VkInstance instance, 296 const VkXcbSurfaceCreateInfoKHR* pCreateInfo, 297 const VkAllocationCallbacks* pAllocator, 298 VkSurfaceKHR* pSurface) 299 { 300 return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 301 } 302 #endif /* VK_USE_PLATFORM_XCB_KHR */ 303 304 #ifdef VK_USE_PLATFORM_WAYLAND_KHR 305 306 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( 307 VkInstance instance, 308 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, 309 const VkAllocationCallbacks* pAllocator, 310 VkSurfaceKHR* pSurface) 311 { 312 return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 313 } 314 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */ 315 316 #ifdef VK_USE_PLATFORM_MIR_KHR 317 318 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( 319 VkInstance instance, 320 const VkMirSurfaceCreateInfoKHR* pCreateInfo, 321 const VkAllocationCallbacks* pAllocator, 322 VkSurfaceKHR* pSurface) 323 { 324 return vkmock::CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 325 } 326 #endif /* VK_USE_PLATFORM_MIR_KHR */ 327 328 #ifdef VK_USE_PLATFORM_ANDROID_KHR 329 330 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( 331 VkInstance instance, 332 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, 333 const VkAllocationCallbacks* pAllocator, 334 VkSurfaceKHR* pSurface) 335 { 336 return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 337 } 338 #endif /* VK_USE_PLATFORM_ANDROID_KHR */ 339 340 #ifdef VK_USE_PLATFORM_WIN32_KHR 341 342 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( 343 VkInstance instance, 344 const VkWin32SurfaceCreateInfoKHR* pCreateInfo, 345 const VkAllocationCallbacks* pAllocator, 346 VkSurfaceKHR* pSurface) 347 { 348 return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 349 } 350 #endif /* VK_USE_PLATFORM_WIN32_KHR */ 351 352 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHX( 353 VkDevice device, 354 VkSurfaceKHR surface, 355 VkDeviceGroupPresentModeFlagsKHX* pModes) 356 { 357 return vkmock::GetDeviceGroupSurfacePresentModesKHX(device, surface, pModes); 358 } 359 360 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHX( 361 VkPhysicalDevice physicalDevice, 362 VkSurfaceKHR surface, 363 uint32_t* pRectCount, 364 VkRect2D* pRects) 365 { 366 return vkmock::GetPhysicalDevicePresentRectanglesKHX(physicalDevice, surface, pRectCount, pRects); 367 } 368 369 #ifdef VK_USE_PLATFORM_VI_NN 370 371 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( 372 VkInstance instance, 373 const VkViSurfaceCreateInfoNN* pCreateInfo, 374 const VkAllocationCallbacks* pAllocator, 375 VkSurfaceKHR* pSurface) 376 { 377 return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface); 378 } 379 #endif /* VK_USE_PLATFORM_VI_NN */ 380 381 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( 382 VkPhysicalDevice physicalDevice, 383 VkSurfaceKHR surface, 384 VkSurfaceCapabilities2EXT* pSurfaceCapabilities) 385 { 386 return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities); 387 } 388 389 #ifdef VK_USE_PLATFORM_IOS_MVK 390 391 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( 392 VkInstance instance, 393 const VkIOSSurfaceCreateInfoMVK* pCreateInfo, 394 const VkAllocationCallbacks* pAllocator, 395 VkSurfaceKHR* pSurface) 396 { 397 return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); 398 } 399 #endif /* VK_USE_PLATFORM_IOS_MVK */ 400 401 #ifdef VK_USE_PLATFORM_MACOS_MVK 402 403 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( 404 VkInstance instance, 405 const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, 406 const VkAllocationCallbacks* pAllocator, 407 VkSurfaceKHR* pSurface) 408 { 409 return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); 410 } 411 #endif /* VK_USE_PLATFORM_MACOS_MVK */ 412 413 } // end extern "C" 414 415 ''' 416 417 CUSTOM_C_INTERCEPTS = { 418 'vkCreateInstance': ''' 419 // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with 420 // apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the 421 // ICD should behave as normal. 422 if (loader_interface_version <= 4) { 423 return VK_ERROR_INCOMPATIBLE_DRIVER; 424 } 425 *pInstance = (VkInstance)CreateDispObjHandle(); 426 // TODO: If emulating specific device caps, will need to add intelligence here 427 return VK_SUCCESS; 428 ''', 429 'vkDestroyInstance': ''' 430 // Destroy physical device 431 DestroyDispObjHandle((void*)physical_device); 432 433 DestroyDispObjHandle((void*)instance); 434 ''', 435 'vkEnumeratePhysicalDevices': ''' 436 if (pPhysicalDevices) { 437 if (!physical_device) { 438 physical_device = (VkPhysicalDevice)CreateDispObjHandle(); 439 } 440 *pPhysicalDevices = physical_device; 441 } else { 442 *pPhysicalDeviceCount = 1; 443 } 444 return VK_SUCCESS; 445 ''', 446 'vkCreateDevice': ''' 447 *pDevice = (VkDevice)CreateDispObjHandle(); 448 // TODO: If emulating specific device caps, will need to add intelligence here 449 return VK_SUCCESS; 450 ''', 451 'vkDestroyDevice': ''' 452 unique_lock_t lock(global_lock); 453 // First destroy sub-device objects 454 // Destroy Queues 455 for (auto dev_queue_map_pair : queue_map) { 456 for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) { 457 for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) { 458 DestroyDispObjHandle((void*)index_queue_pair.second); 459 } 460 } 461 } 462 queue_map.clear(); 463 // Now destroy device 464 DestroyDispObjHandle((void*)device); 465 // TODO: If emulating specific device caps, will need to add intelligence here 466 ''', 467 'vkGetDeviceQueue': ''' 468 unique_lock_t lock(global_lock); 469 auto queue = queue_map[device][queueFamilyIndex][queueIndex]; 470 if (queue) { 471 *pQueue = queue; 472 } else { 473 *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle(); 474 } 475 // TODO: If emulating specific device caps, will need to add intelligence here 476 return; 477 ''', 478 'vkEnumerateInstanceLayerProperties': ''' 479 return VK_SUCCESS; 480 ''', 481 'vkEnumerateDeviceLayerProperties': ''' 482 return VK_SUCCESS; 483 ''', 484 'vkEnumerateInstanceExtensionProperties': ''' 485 // If requesting number of extensions, return that 486 if (!pLayerName) { 487 if (!pProperties) { 488 *pPropertyCount = (uint32_t)instance_extension_map.size(); 489 } else { 490 uint32_t i = 0; 491 for (const auto &name_ver_pair : instance_extension_map) { 492 if (i == *pPropertyCount) { 493 break; 494 } 495 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName)); 496 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0; 497 pProperties[i].specVersion = name_ver_pair.second; 498 ++i; 499 } 500 if (i != instance_extension_map.size()) { 501 return VK_INCOMPLETE; 502 } 503 } 504 } 505 // If requesting extension properties, fill in data struct for number of extensions 506 return VK_SUCCESS; 507 ''', 508 'vkEnumerateDeviceExtensionProperties': ''' 509 // If requesting number of extensions, return that 510 if (!pLayerName) { 511 if (!pProperties) { 512 *pPropertyCount = (uint32_t)device_extension_map.size(); 513 } else { 514 uint32_t i = 0; 515 for (const auto &name_ver_pair : device_extension_map) { 516 if (i == *pPropertyCount) { 517 break; 518 } 519 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName)); 520 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0; 521 pProperties[i].specVersion = name_ver_pair.second; 522 ++i; 523 } 524 if (i != device_extension_map.size()) { 525 return VK_INCOMPLETE; 526 } 527 } 528 } 529 // If requesting extension properties, fill in data struct for number of extensions 530 return VK_SUCCESS; 531 ''', 532 'vkGetInstanceProcAddr': ''' 533 if (!negotiate_loader_icd_interface_called) { 534 loader_interface_version = 0; 535 } 536 const auto &item = name_to_funcptr_map.find(pName); 537 if (item != name_to_funcptr_map.end()) { 538 return reinterpret_cast<PFN_vkVoidFunction>(item->second); 539 } 540 // Mock should intercept all functions so if we get here just return null 541 return nullptr; 542 ''', 543 'vkGetDeviceProcAddr': ''' 544 return GetInstanceProcAddr(nullptr, pName); 545 ''', 546 'vkGetPhysicalDeviceMemoryProperties': ''' 547 pMemoryProperties->memoryTypeCount = 2; 548 pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 549 pMemoryProperties->memoryTypes[0].heapIndex = 0; 550 pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 551 pMemoryProperties->memoryTypes[1].heapIndex = 1; 552 pMemoryProperties->memoryHeapCount = 2; 553 pMemoryProperties->memoryHeaps[0].flags = 0; 554 pMemoryProperties->memoryHeaps[0].size = 8000000000; 555 pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT; 556 pMemoryProperties->memoryHeaps[1].size = 8000000000; 557 ''', 558 'vkGetPhysicalDeviceMemoryProperties2KHR': ''' 559 GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties); 560 ''', 561 'vkGetPhysicalDeviceQueueFamilyProperties': ''' 562 if (!pQueueFamilyProperties) { 563 *pQueueFamilyPropertyCount = 1; 564 } else { 565 if (*pQueueFamilyPropertyCount) { 566 pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT; 567 pQueueFamilyProperties[0].queueCount = 1; 568 pQueueFamilyProperties[0].timestampValidBits = 0; 569 pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1}; 570 } 571 } 572 ''', 573 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': ''' 574 if (pQueueFamilyPropertyCount && pQueueFamilyProperties) { 575 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties); 576 } else { 577 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr); 578 } 579 ''', 580 'vkGetPhysicalDeviceFeatures': ''' 581 uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32); 582 VkBool32 *pBool = &pFeatures->robustBufferAccess; 583 for (uint32_t i = 0; i < num_bools; ++i) { 584 pBool[i] = VK_TRUE; 585 } 586 ''', 587 'vkGetPhysicalDeviceFeatures2KHR': ''' 588 GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features); 589 ''', 590 'vkGetPhysicalDeviceFormatProperties': ''' 591 if (VK_FORMAT_UNDEFINED == format) { 592 *pFormatProperties = { 0x0, 0x0, 0x0 }; 593 } else { 594 // TODO: Just returning full support for everything initially 595 *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF }; 596 } 597 ''', 598 'vkGetPhysicalDeviceFormatProperties2KHR': ''' 599 GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties); 600 ''', 601 'vkGetPhysicalDeviceImageFormatProperties': ''' 602 // TODO: Just hard-coding some values for now 603 // TODO: If tiling is linear, limit the mips, levels, & sample count 604 if (VK_IMAGE_TILING_LINEAR == tiling) { 605 *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 }; 606 } else { 607 *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F, 4294967296 }; 608 } 609 return VK_SUCCESS; 610 ''', 611 'vkGetPhysicalDeviceImageFormatProperties2KHR': ''' 612 GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties); 613 return VK_SUCCESS; 614 ''', 615 'vkGetPhysicalDeviceProperties': ''' 616 // TODO: Just hard-coding some values for now 617 pProperties->apiVersion = VK_API_VERSION_1_0; 618 pProperties->driverVersion = 1; 619 pProperties->vendorID = 0xba5eba11; 620 pProperties->deviceID = 0xf005ba11; 621 pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU; 622 //std::string devName = "Vulkan Mock Device"; 623 strcpy(pProperties->deviceName, "Vulkan Mock Device"); 624 pProperties->pipelineCacheUUID[0] = 18; 625 pProperties->limits = SetLimits(&pProperties->limits); 626 pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE }; 627 ''', 628 'vkGetPhysicalDeviceProperties2KHR': ''' 629 GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties); 630 ''', 631 'vkGetBufferMemoryRequirements': ''' 632 // TODO: Just hard-coding reqs for now 633 pMemoryRequirements->size = 4096; 634 pMemoryRequirements->alignment = 1; 635 pMemoryRequirements->memoryTypeBits = 0xFFFF; 636 ''', 637 'vkGetBufferMemoryRequirements2KHR': ''' 638 GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements); 639 ''', 640 'vkGetImageMemoryRequirements': ''' 641 // TODO: Just hard-coding reqs for now 642 pMemoryRequirements->size = 4096; 643 pMemoryRequirements->alignment = 1; 644 pMemoryRequirements->memoryTypeBits = 0xFFFF; 645 ''', 646 'vkGetImageMemoryRequirements2KHR': ''' 647 GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements); 648 ''', 649 'vkMapMemory': ''' 650 unique_lock_t lock(global_lock); 651 // TODO: Just hard-coding 64k whole size for now 652 if (VK_WHOLE_SIZE == size) 653 size = 0x10000; 654 void* map_addr = malloc((size_t)size); 655 mapped_memory_map[memory].push_back(map_addr); 656 *ppData = map_addr; 657 return VK_SUCCESS; 658 ''', 659 'vkUnmapMemory': ''' 660 unique_lock_t lock(global_lock); 661 for (auto map_addr : mapped_memory_map[memory]) { 662 free(map_addr); 663 } 664 mapped_memory_map.erase(memory); 665 ''', 666 'vkGetImageSubresourceLayout': ''' 667 // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure. 668 *pLayout = VkSubresourceLayout(); // Default constructor zero values. 669 ''', 670 } 671 672 # MockICDGeneratorOptions - subclass of GeneratorOptions. 673 # 674 # Adds options used by MockICDOutputGenerator objects during Mock 675 # ICD generation. 676 # 677 # Additional members 678 # prefixText - list of strings to prefix generated header with 679 # (usually a copyright statement + calling convention macros). 680 # protectFile - True if multiple inclusion protection should be 681 # generated (based on the filename) around the entire header. 682 # protectFeature - True if #ifndef..#endif protection should be 683 # generated around a feature interface in the header file. 684 # genFuncPointers - True if function pointer typedefs should be 685 # generated 686 # protectProto - If conditional protection should be generated 687 # around prototype declarations, set to either '#ifdef' 688 # to require opt-in (#ifdef protectProtoStr) or '#ifndef' 689 # to require opt-out (#ifndef protectProtoStr). Otherwise 690 # set to None. 691 # protectProtoStr - #ifdef/#ifndef symbol to use around prototype 692 # declarations, if protectProto is set 693 # apicall - string to use for the function declaration prefix, 694 # such as APICALL on Windows. 695 # apientry - string to use for the calling convention macro, 696 # in typedefs, such as APIENTRY. 697 # apientryp - string to use for the calling convention macro 698 # in function pointer typedefs, such as APIENTRYP. 699 # indentFuncProto - True if prototype declarations should put each 700 # parameter on a separate line 701 # indentFuncPointer - True if typedefed function pointers should put each 702 # parameter on a separate line 703 # alignFuncParam - if nonzero and parameters are being put on a 704 # separate line, align parameter names at the specified column 705 class MockICDGeneratorOptions(GeneratorOptions): 706 def __init__(self, 707 filename = None, 708 directory = '.', 709 apiname = None, 710 profile = None, 711 versions = '.*', 712 emitversions = '.*', 713 defaultExtensions = None, 714 addExtensions = None, 715 removeExtensions = None, 716 sortProcedure = regSortFeatures, 717 prefixText = "", 718 genFuncPointers = True, 719 protectFile = True, 720 protectFeature = True, 721 protectProto = None, 722 protectProtoStr = None, 723 apicall = '', 724 apientry = '', 725 apientryp = '', 726 indentFuncProto = True, 727 indentFuncPointer = False, 728 alignFuncParam = 0, 729 helper_file_type = ''): 730 GeneratorOptions.__init__(self, filename, directory, apiname, profile, 731 versions, emitversions, defaultExtensions, 732 addExtensions, removeExtensions, sortProcedure) 733 self.prefixText = prefixText 734 self.genFuncPointers = genFuncPointers 735 self.protectFile = protectFile 736 self.protectFeature = protectFeature 737 self.protectProto = protectProto 738 self.protectProtoStr = protectProtoStr 739 self.apicall = apicall 740 self.apientry = apientry 741 self.apientryp = apientryp 742 self.indentFuncProto = indentFuncProto 743 self.indentFuncPointer = indentFuncPointer 744 self.alignFuncParam = alignFuncParam 745 746 # MockICDOutputGenerator - subclass of OutputGenerator. 747 # Generates a mock vulkan ICD. 748 # This is intended to be a minimal replacement for a vulkan device in order 749 # to enable Vulkan Validation testing. 750 # 751 # ---- methods ---- 752 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for 753 # OutputGenerator. Defines additional internal state. 754 # ---- methods overriding base class ---- 755 # beginFile(genOpts) 756 # endFile() 757 # beginFeature(interface, emit) 758 # endFeature() 759 # genType(typeinfo,name) 760 # genStruct(typeinfo,name) 761 # genGroup(groupinfo,name) 762 # genEnum(enuminfo, name) 763 # genCmd(cmdinfo) 764 class MockICDOutputGenerator(OutputGenerator): 765 """Generate specified API interfaces in a specific style, such as a C header""" 766 # This is an ordered list of sections in the header file. 767 TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum', 768 'group', 'bitmask', 'funcpointer', 'struct'] 769 ALL_SECTIONS = TYPE_SECTIONS + ['command'] 770 def __init__(self, 771 errFile = sys.stderr, 772 warnFile = sys.stderr, 773 diagFile = sys.stdout): 774 OutputGenerator.__init__(self, errFile, warnFile, diagFile) 775 # Internal state - accumulators for different inner block text 776 self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) 777 self.intercepts = [] 778 779 # Check if the parameter passed in is a pointer to an array 780 def paramIsArray(self, param): 781 return param.attrib.get('len') is not None 782 783 # Check if the parameter passed in is a pointer 784 def paramIsPointer(self, param): 785 ispointer = False 786 for elem in param: 787 if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail: 788 ispointer = True 789 return ispointer 790 791 # Check if an object is a non-dispatchable handle 792 def isHandleTypeNonDispatchable(self, handletype): 793 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']") 794 if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE': 795 return True 796 else: 797 return False 798 799 # Check if an object is a dispatchable handle 800 def isHandleTypeDispatchable(self, handletype): 801 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']") 802 if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE': 803 return True 804 else: 805 return False 806 807 def beginFile(self, genOpts): 808 OutputGenerator.beginFile(self, genOpts) 809 # C-specific 810 # 811 # Multiple inclusion protection & C++ namespace. 812 self.header = False 813 if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]): 814 self.header = True 815 headerSym = '__' + re.sub('\.h', '_h_', os.path.basename(self.genOpts.filename)) 816 write('#ifndef', headerSym, file=self.outFile) 817 write('#define', headerSym, '1', file=self.outFile) 818 self.newline() 819 # 820 # User-supplied prefix text, if any (list of strings) 821 if (genOpts.prefixText): 822 for s in genOpts.prefixText: 823 write(s, file=self.outFile) 824 if self.header: 825 write('#include <unordered_map>', file=self.outFile) 826 write('#include <mutex>', file=self.outFile) 827 write('#include <string>', file=self.outFile) 828 write('#include <cstring>', file=self.outFile) 829 write('#include "vulkan/vk_icd.h"', file=self.outFile) 830 else: 831 write('#include "mock_icd.h"', file=self.outFile) 832 write('#include <stdlib.h>', file=self.outFile) 833 write('#include <vector>', file=self.outFile) 834 835 write('namespace vkmock {', file=self.outFile) 836 if self.header: 837 self.newline() 838 write(HEADER_C_CODE, file=self.outFile) 839 # Include all of the extensions in ICD except specific ignored ones 840 device_exts = [] 841 instance_exts = [] 842 # Ignore extensions that ICDs should not implement or are not safe to report 843 ignore_exts = ['VK_EXT_validation_cache', 'VK_KHR_push_descriptor'] 844 for ext in self.registry.tree.findall("extensions/extension"): 845 if '0' != ext[0][0].attrib['value']: # Only include implemented extensions 846 if (ext.attrib['name'] in ignore_exts): 847 pass 848 elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']): 849 instance_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value'])) 850 else: 851 device_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value'])) 852 write('// Map of instance extension name to version', file=self.outFile) 853 write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile) 854 write('\n'.join(instance_exts), file=self.outFile) 855 write('};', file=self.outFile) 856 write('// Map of device extension name to version', file=self.outFile) 857 write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile) 858 write('\n'.join(device_exts), file=self.outFile) 859 write('};', file=self.outFile) 860 861 else: 862 self.newline() 863 write(SOURCE_CPP_PREFIX, file=self.outFile) 864 865 def endFile(self): 866 # C-specific 867 # Finish C++ namespace and multiple inclusion protection 868 self.newline() 869 if self.header: 870 # record intercepted procedures 871 write('// Map of all APIs to be intercepted by this layer', file=self.outFile) 872 write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile) 873 write('\n'.join(self.intercepts), file=self.outFile) 874 write('};\n', file=self.outFile) 875 self.newline() 876 write('} // namespace vkmock', file=self.outFile) 877 self.newline() 878 write('#endif', file=self.outFile) 879 else: # Loader-layer-interface, need to implement global interface functions 880 write(SOURCE_CPP_POSTFIX, file=self.outFile) 881 # Finish processing in superclass 882 OutputGenerator.endFile(self) 883 def beginFeature(self, interface, emit): 884 #write('// starting beginFeature', file=self.outFile) 885 # Start processing in superclass 886 OutputGenerator.beginFeature(self, interface, emit) 887 # C-specific 888 # Accumulate includes, defines, types, enums, function pointer typedefs, 889 # end function prototypes separately for this feature. They're only 890 # printed in endFeature(). 891 self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) 892 #write('// ending beginFeature', file=self.outFile) 893 def endFeature(self): 894 # C-specific 895 # Actually write the interface to the output file. 896 #write('// starting endFeature', file=self.outFile) 897 if (self.emit): 898 self.newline() 899 if (self.genOpts.protectFeature): 900 write('#ifndef', self.featureName, file=self.outFile) 901 # If type declarations are needed by other features based on 902 # this one, it may be necessary to suppress the ExtraProtect, 903 # or move it below the 'for section...' loop. 904 #write('// endFeature looking at self.featureExtraProtect', file=self.outFile) 905 if (self.featureExtraProtect != None): 906 write('#ifdef', self.featureExtraProtect, file=self.outFile) 907 #write('#define', self.featureName, '1', file=self.outFile) 908 for section in self.TYPE_SECTIONS: 909 #write('// endFeature writing section'+section, file=self.outFile) 910 contents = self.sections[section] 911 if contents: 912 write('\n'.join(contents), file=self.outFile) 913 self.newline() 914 #write('// endFeature looking at self.sections[command]', file=self.outFile) 915 if (self.sections['command']): 916 write('\n'.join(self.sections['command']), end=u'', file=self.outFile) 917 self.newline() 918 if (self.featureExtraProtect != None): 919 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile) 920 if (self.genOpts.protectFeature): 921 write('#endif /*', self.featureName, '*/', file=self.outFile) 922 # Finish processing in superclass 923 OutputGenerator.endFeature(self) 924 #write('// ending endFeature', file=self.outFile) 925 # 926 # Append a definition to the specified section 927 def appendSection(self, section, text): 928 # self.sections[section].append('SECTION: ' + section + '\n') 929 self.sections[section].append(text) 930 # 931 # Type generation 932 def genType(self, typeinfo, name): 933 pass 934 # 935 # Struct (e.g. C "struct" type) generation. 936 # This is a special case of the <type> tag where the contents are 937 # interpreted as a set of <member> tags instead of freeform C 938 # C type declarations. The <member> tags are just like <param> 939 # tags - they are a declaration of a struct or union member. 940 # Only simple member declarations are supported (no nested 941 # structs etc.) 942 def genStruct(self, typeinfo, typeName): 943 OutputGenerator.genStruct(self, typeinfo, typeName) 944 body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n' 945 # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam) 946 for member in typeinfo.elem.findall('.//member'): 947 body += self.makeCParamDecl(member, self.genOpts.alignFuncParam) 948 body += ';\n' 949 body += '} ' + typeName + ';\n' 950 self.appendSection('struct', body) 951 # 952 # Group (e.g. C "enum" type) generation. 953 # These are concatenated together with other types. 954 def genGroup(self, groupinfo, groupName): 955 pass 956 # Enumerant generation 957 # <enum> tags may specify their values in several ways, but are usually 958 # just integers. 959 def genEnum(self, enuminfo, name): 960 pass 961 # 962 # Command generation 963 def genCmd(self, cmdinfo, name): 964 decls = self.makeCDecls(cmdinfo.elem) 965 if self.header: # In the header declare all intercepts 966 self.appendSection('command', '') 967 self.appendSection('command', 'static %s' % (decls[0])) 968 if (self.featureExtraProtect != None): 969 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ] 970 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ] 971 if (self.featureExtraProtect != None): 972 self.intercepts += [ '#endif' ] 973 return 974 975 manual_functions = [ 976 # Include functions here to be interecpted w/ manually implemented function bodies 977 'vkGetDeviceProcAddr', 978 'vkGetInstanceProcAddr', 979 'vkCreateDevice', 980 'vkDestroyDevice', 981 'vkCreateInstance', 982 'vkDestroyInstance', 983 #'vkCreateDebugReportCallbackEXT', 984 #'vkDestroyDebugReportCallbackEXT', 985 'vkEnumerateInstanceLayerProperties', 986 'vkEnumerateInstanceExtensionProperties', 987 'vkEnumerateDeviceLayerProperties', 988 'vkEnumerateDeviceExtensionProperties', 989 ] 990 if name in manual_functions: 991 self.appendSection('command', '') 992 if name not in CUSTOM_C_INTERCEPTS: 993 self.appendSection('command', '// declare only') 994 self.appendSection('command', 'static %s' % (decls[0])) 995 self.appendSection('command', '// TODO: Implement custom intercept body') 996 else: 997 self.appendSection('command', 'static %s' % (decls[0][:-1])) 998 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name])) 999 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ] 1000 return 1001 # record that the function will be intercepted 1002 if (self.featureExtraProtect != None): 1003 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ] 1004 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ] 1005 if (self.featureExtraProtect != None): 1006 self.intercepts += [ '#endif' ] 1007 1008 OutputGenerator.genCmd(self, cmdinfo, name) 1009 # 1010 self.appendSection('command', '') 1011 self.appendSection('command', 'static %s' % (decls[0][:-1])) 1012 if name in CUSTOM_C_INTERCEPTS: 1013 self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name])) 1014 return 1015 self.appendSection('command', '{') 1016 1017 api_function_name = cmdinfo.elem.attrib.get('name') 1018 # GET THE TYPE OF FUNCTION 1019 if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]: 1020 # Get last param 1021 last_param = cmdinfo.elem.findall('param')[-1] 1022 lp_txt = last_param.find('name').text 1023 lp_len = None 1024 if ('len' in last_param.attrib): 1025 lp_len = last_param.attrib['len'] 1026 lp_len = lp_len.replace('::', '->') 1027 lp_type = last_param.find('type').text 1028 handle_type = 'dispatchable' 1029 allocator_txt = 'CreateDispObjHandle()'; 1030 if (self.isHandleTypeNonDispatchable(lp_type)): 1031 handle_type = 'non-' + handle_type 1032 allocator_txt = 'global_unique_handle++'; 1033 # Need to lock in both cases 1034 self.appendSection('command', ' unique_lock_t lock(global_lock);') 1035 if (lp_len != None): 1036 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len)) 1037 self.appendSection('command', ' for (uint32_t i = 0; i < %s; ++i) {' % (lp_len)) 1038 self.appendSection('command', ' %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt)) 1039 self.appendSection('command', ' }') 1040 else: 1041 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type)) 1042 self.appendSection('command', ' *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt)) 1043 elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]: 1044 self.appendSection('command', '//Destroy object') 1045 else: 1046 self.appendSection('command', '//Not a CREATE or DESTROY function') 1047 # Declare result variable, if any. 1048 resulttype = cmdinfo.elem.find('proto/type') 1049 if (resulttype != None and resulttype.text == 'void'): 1050 resulttype = None 1051 1052 # Return result variable, if any. 1053 if (resulttype != None): 1054 self.appendSection('command', ' return VK_SUCCESS;') 1055 self.appendSection('command', '}') 1056 # 1057 # override makeProtoName to drop the "vk" prefix 1058 def makeProtoName(self, name, tail): 1059 return self.genOpts.apientry + name[2:] + tail 1060