1 /*------------------------------------------------------------------------- 2 * Vulkan CTS Framework 3 * -------------------- 4 * 5 * Copyright (c) 2015 Google Inc. 6 * 7 * Licensed under the Apache License, Version 2.0 (the "License"); 8 * you may not use this file except in compliance with the License. 9 * You may obtain a copy of the License at 10 * 11 * http://www.apache.org/licenses/LICENSE-2.0 12 * 13 * Unless required by applicable law or agreed to in writing, software 14 * distributed under the License is distributed on an "AS IS" BASIS, 15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 * See the License for the specific language governing permissions and 17 * limitations under the License. 18 * 19 *//*! 20 * \file 21 * \brief Null (dummy) Vulkan implementation. 22 *//*--------------------------------------------------------------------*/ 23 24 #include "vkNullDriver.hpp" 25 #include "vkPlatform.hpp" 26 #include "vkImageUtil.hpp" 27 #include "vkQueryUtil.hpp" 28 #include "tcuFunctionLibrary.hpp" 29 #include "deMemory.h" 30 31 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */) 32 # define USE_ANDROID_O_HARDWARE_BUFFER 33 #endif 34 #if defined(USE_ANDROID_O_HARDWARE_BUFFER) 35 # include <android/hardware_buffer.h> 36 #endif 37 38 #include <stdexcept> 39 #include <algorithm> 40 41 namespace vk 42 { 43 44 namespace 45 { 46 47 using std::vector; 48 49 // Memory management 50 51 template<typename T> 52 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope) 53 { 54 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope); 55 if (!ptr) 56 throw std::bad_alloc(); 57 return ptr; 58 } 59 60 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem) 61 { 62 pAllocator->pfnFree(pAllocator->pUserData, mem); 63 } 64 65 template<typename Object, typename Handle, typename Parent, typename CreateInfo> 66 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator) 67 { 68 Object* obj = DE_NULL; 69 70 if (pAllocator) 71 { 72 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); 73 try 74 { 75 obj = new (mem) Object(parent, pCreateInfo); 76 DE_ASSERT(obj == mem); 77 } 78 catch (...) 79 { 80 pAllocator->pfnFree(pAllocator->pUserData, mem); 81 throw; 82 } 83 } 84 else 85 obj = new Object(parent, pCreateInfo); 86 87 return reinterpret_cast<Handle>(obj); 88 } 89 90 template<typename Object, typename Handle, typename CreateInfo> 91 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator) 92 { 93 Object* obj = DE_NULL; 94 95 if (pAllocator) 96 { 97 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); 98 try 99 { 100 obj = new (mem) Object(pCreateInfo); 101 DE_ASSERT(obj == mem); 102 } 103 catch (...) 104 { 105 pAllocator->pfnFree(pAllocator->pUserData, mem); 106 throw; 107 } 108 } 109 else 110 obj = new Object(pCreateInfo); 111 112 return reinterpret_cast<Handle>(obj); 113 } 114 115 template<typename Object, typename Handle> 116 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator) 117 { 118 Object* obj = reinterpret_cast<Object*>(handle); 119 120 if (pAllocator) 121 { 122 obj->~Object(); 123 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj)); 124 } 125 else 126 delete obj; 127 } 128 129 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo> 130 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator) 131 { 132 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator); 133 return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj)); 134 } 135 136 template<typename Object, typename Handle, typename Parent, typename CreateInfo> 137 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator) 138 { 139 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator); 140 } 141 142 template<typename Object, typename Handle> 143 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator) 144 { 145 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator); 146 } 147 148 // Object definitions 149 150 #define VK_NULL_RETURN(STMT) \ 151 do { \ 152 try { \ 153 STMT; \ 154 return VK_SUCCESS; \ 155 } catch (const std::bad_alloc&) { \ 156 return VK_ERROR_OUT_OF_HOST_MEMORY; \ 157 } catch (VkResult res) { \ 158 return res; \ 159 } \ 160 } while (deGetFalse()) 161 162 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar 163 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC) 164 165 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \ 166 struct NAME \ 167 { \ 168 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \ 169 } 170 171 VK_NULL_DEFINE_DEVICE_OBJ(Fence); 172 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore); 173 VK_NULL_DEFINE_DEVICE_OBJ(Event); 174 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool); 175 VK_NULL_DEFINE_DEVICE_OBJ(BufferView); 176 VK_NULL_DEFINE_DEVICE_OBJ(ImageView); 177 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule); 178 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache); 179 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout); 180 VK_NULL_DEFINE_DEVICE_OBJ(RenderPass); 181 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout); 182 VK_NULL_DEFINE_DEVICE_OBJ(Sampler); 183 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer); 184 185 class Instance 186 { 187 public: 188 Instance (const VkInstanceCreateInfo* instanceInfo); 189 ~Instance (void) {} 190 191 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); } 192 193 private: 194 const tcu::StaticFunctionLibrary m_functions; 195 }; 196 197 class SurfaceKHR 198 { 199 public: 200 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {} 201 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {} 202 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {} 203 SurfaceKHR (VkInstance, const VkMirSurfaceCreateInfoKHR*) {} 204 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {} 205 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {} 206 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {} 207 SurfaceKHR (VkInstance, const VkViSurfaceCreateInfoNN*) {} 208 SurfaceKHR (VkInstance, const VkIOSSurfaceCreateInfoMVK*) {} 209 SurfaceKHR (VkInstance, const VkMacOSSurfaceCreateInfoMVK*) {} 210 ~SurfaceKHR (void) {} 211 }; 212 213 class DisplayModeKHR 214 { 215 public: 216 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {} 217 ~DisplayModeKHR (void) {} 218 }; 219 220 class DebugReportCallbackEXT 221 { 222 public: 223 DebugReportCallbackEXT (VkInstance, const VkDebugReportCallbackCreateInfoEXT*) {} 224 ~DebugReportCallbackEXT (void) {} 225 }; 226 227 class Device 228 { 229 public: 230 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo); 231 ~Device (void) {} 232 233 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); } 234 235 private: 236 const tcu::StaticFunctionLibrary m_functions; 237 }; 238 239 class Pipeline 240 { 241 public: 242 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {} 243 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {} 244 }; 245 246 class SwapchainKHR 247 { 248 public: 249 SwapchainKHR (VkDevice, const VkSwapchainCreateInfoKHR*) {} 250 ~SwapchainKHR (void) {} 251 }; 252 253 class SamplerYcbcrConversion 254 { 255 public: 256 SamplerYcbcrConversion (VkDevice, const VkSamplerYcbcrConversionCreateInfo*) {} 257 }; 258 259 class Buffer 260 { 261 public: 262 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo) 263 : m_size (pCreateInfo->size) 264 { 265 } 266 267 VkDeviceSize getSize (void) const { return m_size; } 268 269 private: 270 const VkDeviceSize m_size; 271 }; 272 273 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo) 274 { 275 const VkExternalMemoryImageCreateInfo* const externalInfo = findStructure<VkExternalMemoryImageCreateInfo> (pCreateInfo->pNext); 276 277 return externalInfo ? externalInfo->handleTypes : 0u; 278 } 279 280 class Image 281 { 282 public: 283 Image (VkDevice, const VkImageCreateInfo* pCreateInfo) 284 : m_imageType (pCreateInfo->imageType) 285 , m_format (pCreateInfo->format) 286 , m_extent (pCreateInfo->extent) 287 , m_arrayLayers (pCreateInfo->arrayLayers) 288 , m_samples (pCreateInfo->samples) 289 , m_usage (pCreateInfo->usage) 290 , m_flags (pCreateInfo->flags) 291 , m_externalHandleTypes (getExternalTypesHandle(pCreateInfo)) 292 { 293 } 294 295 VkImageType getImageType (void) const { return m_imageType; } 296 VkFormat getFormat (void) const { return m_format; } 297 VkExtent3D getExtent (void) const { return m_extent; } 298 deUint32 getArrayLayers (void) const { return m_arrayLayers; } 299 VkSampleCountFlagBits getSamples (void) const { return m_samples; } 300 VkImageUsageFlags getUsage (void) const { return m_usage; } 301 VkImageCreateFlags getFlags (void) const { return m_flags; } 302 VkExternalMemoryHandleTypeFlags getExternalHandleTypes (void) const { return m_externalHandleTypes; } 303 304 private: 305 const VkImageType m_imageType; 306 const VkFormat m_format; 307 const VkExtent3D m_extent; 308 const deUint32 m_arrayLayers; 309 const VkSampleCountFlagBits m_samples; 310 const VkImageUsageFlags m_usage; 311 const VkImageCreateFlags m_flags; 312 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes; 313 }; 314 315 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo) 316 { 317 // \todo [2015-12-03 pyry] Alignment requirements? 318 // \todo [2015-12-03 pyry] Empty allocations okay? 319 if (pAllocInfo->allocationSize > 0) 320 { 321 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize); 322 if (!heapPtr) 323 throw std::bad_alloc(); 324 return heapPtr; 325 } 326 else 327 return DE_NULL; 328 } 329 330 void freeHeap (void* ptr) 331 { 332 deFree(ptr); 333 } 334 335 class DeviceMemory 336 { 337 public: 338 virtual ~DeviceMemory (void) {} 339 virtual void* map (void) = 0; 340 virtual void unmap (void) = 0; 341 }; 342 343 class PrivateDeviceMemory : public DeviceMemory 344 { 345 public: 346 PrivateDeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo) 347 : m_memory(allocateHeap(pAllocInfo)) 348 { 349 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis, 350 // but currently it mostly hinders it. 351 if (m_memory) 352 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize); 353 } 354 virtual ~PrivateDeviceMemory (void) 355 { 356 freeHeap(m_memory); 357 } 358 359 virtual void* map (void) /*override*/ { return m_memory; } 360 virtual void unmap (void) /*override*/ {} 361 362 private: 363 void* const m_memory; 364 }; 365 366 #if defined(USE_ANDROID_O_HARDWARE_BUFFER) 367 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo) 368 { 369 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext); 370 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext); 371 const VkMemoryDedicatedAllocateInfo* const dedicatedInfo = findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext); 372 const Image* const image = dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL; 373 AHardwareBuffer* hwbuffer = DE_NULL; 374 375 // Import and export aren't mutually exclusive; we can have both simultaneously. 376 DE_ASSERT((importInfo && importInfo->buffer.internal) || 377 (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)); 378 379 if (importInfo && importInfo->buffer.internal) 380 { 381 hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal; 382 AHardwareBuffer_acquire(hwbuffer); 383 } 384 else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0) 385 { 386 AHardwareBuffer_Desc hwbufferDesc; 387 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc)); 388 389 if (image) 390 { 391 hwbufferDesc.width = image->getExtent().width; 392 hwbufferDesc.height = image->getExtent().height; 393 hwbufferDesc.layers = image->getArrayLayers(); 394 switch (image->getFormat()) 395 { 396 case VK_FORMAT_R8G8B8A8_UNORM: 397 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM; 398 break; 399 case VK_FORMAT_R8G8B8_UNORM: 400 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM; 401 break; 402 case VK_FORMAT_R5G6B5_UNORM_PACK16: 403 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM; 404 break; 405 case VK_FORMAT_R16G16B16A16_SFLOAT: 406 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT; 407 break; 408 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: 409 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM; 410 break; 411 default: 412 DE_FATAL("Unsupported image format for Android hardware buffer export"); 413 break; 414 } 415 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0) 416 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 417 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0) 418 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT; 419 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0) 420 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT; 421 422 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any 423 // Vulkan usages with corresponding to AHB GPU usages. 424 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0) 425 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 426 } 427 else 428 { 429 hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize); 430 hwbufferDesc.height = 1, 431 hwbufferDesc.layers = 1, 432 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB, 433 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER; 434 } 435 436 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer); 437 } 438 439 return hwbuffer; 440 } 441 442 class ExternalDeviceMemoryAndroid : public DeviceMemory 443 { 444 public: 445 ExternalDeviceMemoryAndroid (VkDevice, const VkMemoryAllocateInfo* pAllocInfo) 446 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo)) 447 {} 448 virtual ~ExternalDeviceMemoryAndroid (void) 449 { 450 if (m_hwbuffer) 451 AHardwareBuffer_release(m_hwbuffer); 452 } 453 454 virtual void* map (void) /*override*/ 455 { 456 void* p; 457 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p); 458 return p; 459 } 460 461 virtual void unmap (void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); } 462 463 AHardwareBuffer* getHwBuffer (void) { return m_hwbuffer; } 464 465 private: 466 AHardwareBuffer* const m_hwbuffer; 467 }; 468 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER) 469 470 class IndirectCommandsLayoutNVX 471 { 472 public: 473 IndirectCommandsLayoutNVX (VkDevice, const VkIndirectCommandsLayoutCreateInfoNVX*) 474 {} 475 }; 476 477 class ObjectTableNVX 478 { 479 public: 480 ObjectTableNVX (VkDevice, const VkObjectTableCreateInfoNVX*) 481 {} 482 }; 483 484 class ValidationCacheEXT 485 { 486 public: 487 ValidationCacheEXT (VkDevice, const VkValidationCacheCreateInfoEXT*) 488 {} 489 }; 490 491 class CommandBuffer 492 { 493 public: 494 CommandBuffer (VkDevice, VkCommandPool, VkCommandBufferLevel) 495 {} 496 }; 497 498 class DescriptorUpdateTemplate 499 { 500 public: 501 DescriptorUpdateTemplate (VkDevice, const VkDescriptorUpdateTemplateCreateInfo*) 502 {} 503 }; 504 505 506 class CommandPool 507 { 508 public: 509 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*) 510 : m_device(device) 511 {} 512 ~CommandPool (void); 513 514 VkCommandBuffer allocate (VkCommandBufferLevel level); 515 void free (VkCommandBuffer buffer); 516 517 private: 518 const VkDevice m_device; 519 520 vector<CommandBuffer*> m_buffers; 521 }; 522 523 CommandPool::~CommandPool (void) 524 { 525 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx) 526 delete m_buffers[ndx]; 527 } 528 529 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level) 530 { 531 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level); 532 533 try 534 { 535 m_buffers.push_back(impl); 536 } 537 catch (...) 538 { 539 delete impl; 540 throw; 541 } 542 543 return reinterpret_cast<VkCommandBuffer>(impl); 544 } 545 546 void CommandPool::free (VkCommandBuffer buffer) 547 { 548 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer); 549 550 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx) 551 { 552 if (m_buffers[ndx] == impl) 553 { 554 std::swap(m_buffers[ndx], m_buffers.back()); 555 m_buffers.pop_back(); 556 delete impl; 557 return; 558 } 559 } 560 561 DE_FATAL("VkCommandBuffer not owned by VkCommandPool"); 562 } 563 564 class DescriptorSet 565 { 566 public: 567 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {} 568 }; 569 570 class DescriptorPool 571 { 572 public: 573 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo) 574 : m_device (device) 575 , m_flags (pCreateInfo->flags) 576 {} 577 ~DescriptorPool (void) 578 { 579 reset(); 580 } 581 582 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout); 583 void free (VkDescriptorSet set); 584 585 void reset (void); 586 587 private: 588 const VkDevice m_device; 589 const VkDescriptorPoolCreateFlags m_flags; 590 591 vector<DescriptorSet*> m_managedSets; 592 }; 593 594 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout) 595 { 596 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout); 597 598 try 599 { 600 m_managedSets.push_back(impl); 601 } 602 catch (...) 603 { 604 delete impl; 605 throw; 606 } 607 608 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl)); 609 } 610 611 void DescriptorPool::free (VkDescriptorSet set) 612 { 613 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal()); 614 615 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT); 616 DE_UNREF(m_flags); 617 618 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx) 619 { 620 if (m_managedSets[ndx] == impl) 621 { 622 std::swap(m_managedSets[ndx], m_managedSets.back()); 623 m_managedSets.pop_back(); 624 delete impl; 625 return; 626 } 627 } 628 629 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool"); 630 } 631 632 void DescriptorPool::reset (void) 633 { 634 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx) 635 delete m_managedSets[ndx]; 636 m_managedSets.clear(); 637 } 638 639 // API implementation 640 641 extern "C" 642 { 643 644 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName) 645 { 646 return reinterpret_cast<Device*>(device)->getProcAddr(pName); 647 } 648 649 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) 650 { 651 deUint32 allocNdx; 652 try 653 { 654 for (allocNdx = 0; allocNdx < count; allocNdx++) 655 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator); 656 657 return VK_SUCCESS; 658 } 659 catch (const std::bad_alloc&) 660 { 661 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++) 662 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator); 663 664 return VK_ERROR_OUT_OF_HOST_MEMORY; 665 } 666 catch (VkResult err) 667 { 668 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++) 669 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator); 670 671 return err; 672 } 673 } 674 675 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) 676 { 677 deUint32 allocNdx; 678 try 679 { 680 for (allocNdx = 0; allocNdx < count; allocNdx++) 681 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator); 682 683 return VK_SUCCESS; 684 } 685 catch (const std::bad_alloc&) 686 { 687 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++) 688 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator); 689 690 return VK_ERROR_OUT_OF_HOST_MEMORY; 691 } 692 catch (VkResult err) 693 { 694 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++) 695 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator); 696 697 return err; 698 } 699 } 700 701 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices) 702 { 703 if (pDevices && *pPhysicalDeviceCount >= 1u) 704 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u); 705 706 *pPhysicalDeviceCount = 1; 707 708 return VK_SUCCESS; 709 } 710 711 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties) 712 { 713 const deUint32 dstSize = pPropertyCount ? *pPropertyCount : 0; 714 715 if (pPropertyCount) 716 *pPropertyCount = numExtensions; 717 718 if (pProperties) 719 { 720 for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx) 721 pProperties[ndx] = extensions[ndx]; 722 723 if (dstSize < numExtensions) 724 return VK_INCOMPLETE; 725 } 726 727 return VK_SUCCESS; 728 } 729 730 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) 731 { 732 static const VkExtensionProperties s_extensions[] = 733 { 734 { "VK_KHR_get_physical_device_properties2", 1u }, 735 { "VK_KHR_external_memory_capabilities", 1u }, 736 }; 737 738 if (!pLayerName) 739 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties); 740 else 741 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties); 742 } 743 744 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) 745 { 746 DE_UNREF(physicalDevice); 747 748 static const VkExtensionProperties s_extensions[] = 749 { 750 { "VK_KHR_bind_memory2", 1u }, 751 { "VK_KHR_external_memory", 1u }, 752 { "VK_KHR_get_memory_requirements2", 1u }, 753 { "VK_KHR_maintenance1", 1u }, 754 { "VK_KHR_sampler_ycbcr_conversion", 1u }, 755 #if defined(USE_ANDROID_O_HARDWARE_BUFFER) 756 { "VK_ANDROID_external_memory_android_hardware_buffer", 1u }, 757 #endif 758 }; 759 760 if (!pLayerName) 761 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties); 762 else 763 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties); 764 } 765 766 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) 767 { 768 DE_UNREF(physicalDevice); 769 770 // Enable all features allow as many tests to run as possible 771 pFeatures->robustBufferAccess = VK_TRUE; 772 pFeatures->fullDrawIndexUint32 = VK_TRUE; 773 pFeatures->imageCubeArray = VK_TRUE; 774 pFeatures->independentBlend = VK_TRUE; 775 pFeatures->geometryShader = VK_TRUE; 776 pFeatures->tessellationShader = VK_TRUE; 777 pFeatures->sampleRateShading = VK_TRUE; 778 pFeatures->dualSrcBlend = VK_TRUE; 779 pFeatures->logicOp = VK_TRUE; 780 pFeatures->multiDrawIndirect = VK_TRUE; 781 pFeatures->drawIndirectFirstInstance = VK_TRUE; 782 pFeatures->depthClamp = VK_TRUE; 783 pFeatures->depthBiasClamp = VK_TRUE; 784 pFeatures->fillModeNonSolid = VK_TRUE; 785 pFeatures->depthBounds = VK_TRUE; 786 pFeatures->wideLines = VK_TRUE; 787 pFeatures->largePoints = VK_TRUE; 788 pFeatures->alphaToOne = VK_TRUE; 789 pFeatures->multiViewport = VK_TRUE; 790 pFeatures->samplerAnisotropy = VK_TRUE; 791 pFeatures->textureCompressionETC2 = VK_TRUE; 792 pFeatures->textureCompressionASTC_LDR = VK_TRUE; 793 pFeatures->textureCompressionBC = VK_TRUE; 794 pFeatures->occlusionQueryPrecise = VK_TRUE; 795 pFeatures->pipelineStatisticsQuery = VK_TRUE; 796 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE; 797 pFeatures->fragmentStoresAndAtomics = VK_TRUE; 798 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE; 799 pFeatures->shaderImageGatherExtended = VK_TRUE; 800 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE; 801 pFeatures->shaderStorageImageMultisample = VK_TRUE; 802 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE; 803 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE; 804 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE; 805 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE; 806 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE; 807 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE; 808 pFeatures->shaderClipDistance = VK_TRUE; 809 pFeatures->shaderCullDistance = VK_TRUE; 810 pFeatures->shaderFloat64 = VK_TRUE; 811 pFeatures->shaderInt64 = VK_TRUE; 812 pFeatures->shaderInt16 = VK_TRUE; 813 pFeatures->shaderResourceResidency = VK_TRUE; 814 pFeatures->shaderResourceMinLod = VK_TRUE; 815 pFeatures->sparseBinding = VK_TRUE; 816 pFeatures->sparseResidencyBuffer = VK_TRUE; 817 pFeatures->sparseResidencyImage2D = VK_TRUE; 818 pFeatures->sparseResidencyImage3D = VK_TRUE; 819 pFeatures->sparseResidency2Samples = VK_TRUE; 820 pFeatures->sparseResidency4Samples = VK_TRUE; 821 pFeatures->sparseResidency8Samples = VK_TRUE; 822 pFeatures->sparseResidency16Samples = VK_TRUE; 823 pFeatures->sparseResidencyAliased = VK_TRUE; 824 pFeatures->variableMultisampleRate = VK_TRUE; 825 pFeatures->inheritedQueries = VK_TRUE; 826 } 827 828 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props) 829 { 830 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties)); 831 832 props->apiVersion = VK_API_VERSION_1_1; 833 props->driverVersion = 1u; 834 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER; 835 836 deMemcpy(props->deviceName, "null", 5); 837 838 // Spec minmax 839 props->limits.maxImageDimension1D = 4096; 840 props->limits.maxImageDimension2D = 4096; 841 props->limits.maxImageDimension3D = 256; 842 props->limits.maxImageDimensionCube = 4096; 843 props->limits.maxImageArrayLayers = 256; 844 props->limits.maxTexelBufferElements = 65536; 845 props->limits.maxUniformBufferRange = 16384; 846 props->limits.maxStorageBufferRange = 1u<<27; 847 props->limits.maxPushConstantsSize = 128; 848 props->limits.maxMemoryAllocationCount = 4096; 849 props->limits.maxSamplerAllocationCount = 4000; 850 props->limits.bufferImageGranularity = 131072; 851 props->limits.sparseAddressSpaceSize = 1u<<31; 852 props->limits.maxBoundDescriptorSets = 4; 853 props->limits.maxPerStageDescriptorSamplers = 16; 854 props->limits.maxPerStageDescriptorUniformBuffers = 12; 855 props->limits.maxPerStageDescriptorStorageBuffers = 4; 856 props->limits.maxPerStageDescriptorSampledImages = 16; 857 props->limits.maxPerStageDescriptorStorageImages = 4; 858 props->limits.maxPerStageDescriptorInputAttachments = 4; 859 props->limits.maxPerStageResources = 128; 860 props->limits.maxDescriptorSetSamplers = 96; 861 props->limits.maxDescriptorSetUniformBuffers = 72; 862 props->limits.maxDescriptorSetUniformBuffersDynamic = 8; 863 props->limits.maxDescriptorSetStorageBuffers = 24; 864 props->limits.maxDescriptorSetStorageBuffersDynamic = 4; 865 props->limits.maxDescriptorSetSampledImages = 96; 866 props->limits.maxDescriptorSetStorageImages = 24; 867 props->limits.maxDescriptorSetInputAttachments = 4; 868 props->limits.maxVertexInputAttributes = 16; 869 props->limits.maxVertexInputBindings = 16; 870 props->limits.maxVertexInputAttributeOffset = 2047; 871 props->limits.maxVertexInputBindingStride = 2048; 872 props->limits.maxVertexOutputComponents = 64; 873 props->limits.maxTessellationGenerationLevel = 64; 874 props->limits.maxTessellationPatchSize = 32; 875 props->limits.maxTessellationControlPerVertexInputComponents = 64; 876 props->limits.maxTessellationControlPerVertexOutputComponents = 64; 877 props->limits.maxTessellationControlPerPatchOutputComponents = 120; 878 props->limits.maxTessellationControlTotalOutputComponents = 2048; 879 props->limits.maxTessellationEvaluationInputComponents = 64; 880 props->limits.maxTessellationEvaluationOutputComponents = 64; 881 props->limits.maxGeometryShaderInvocations = 32; 882 props->limits.maxGeometryInputComponents = 64; 883 props->limits.maxGeometryOutputComponents = 64; 884 props->limits.maxGeometryOutputVertices = 256; 885 props->limits.maxGeometryTotalOutputComponents = 1024; 886 props->limits.maxFragmentInputComponents = 64; 887 props->limits.maxFragmentOutputAttachments = 4; 888 props->limits.maxFragmentDualSrcAttachments = 1; 889 props->limits.maxFragmentCombinedOutputResources = 4; 890 props->limits.maxComputeSharedMemorySize = 16384; 891 props->limits.maxComputeWorkGroupCount[0] = 65535; 892 props->limits.maxComputeWorkGroupCount[1] = 65535; 893 props->limits.maxComputeWorkGroupCount[2] = 65535; 894 props->limits.maxComputeWorkGroupInvocations = 128; 895 props->limits.maxComputeWorkGroupSize[0] = 128; 896 props->limits.maxComputeWorkGroupSize[1] = 128; 897 props->limits.maxComputeWorkGroupSize[2] = 128; 898 props->limits.subPixelPrecisionBits = 4; 899 props->limits.subTexelPrecisionBits = 4; 900 props->limits.mipmapPrecisionBits = 4; 901 props->limits.maxDrawIndexedIndexValue = 0xffffffffu; 902 props->limits.maxDrawIndirectCount = (1u<<16) - 1u; 903 props->limits.maxSamplerLodBias = 2.0f; 904 props->limits.maxSamplerAnisotropy = 16.0f; 905 props->limits.maxViewports = 16; 906 props->limits.maxViewportDimensions[0] = 4096; 907 props->limits.maxViewportDimensions[1] = 4096; 908 props->limits.viewportBoundsRange[0] = -8192.f; 909 props->limits.viewportBoundsRange[1] = 8191.f; 910 props->limits.viewportSubPixelBits = 0; 911 props->limits.minMemoryMapAlignment = 64; 912 props->limits.minTexelBufferOffsetAlignment = 256; 913 props->limits.minUniformBufferOffsetAlignment = 256; 914 props->limits.minStorageBufferOffsetAlignment = 256; 915 props->limits.minTexelOffset = -8; 916 props->limits.maxTexelOffset = 7; 917 props->limits.minTexelGatherOffset = -8; 918 props->limits.maxTexelGatherOffset = 7; 919 props->limits.minInterpolationOffset = -0.5f; 920 props->limits.maxInterpolationOffset = 0.5f; // -1ulp 921 props->limits.subPixelInterpolationOffsetBits = 4; 922 props->limits.maxFramebufferWidth = 4096; 923 props->limits.maxFramebufferHeight = 4096; 924 props->limits.maxFramebufferLayers = 256; 925 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 926 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 927 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 928 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 929 props->limits.maxColorAttachments = 4; 930 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 931 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT; 932 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 933 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 934 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 935 props->limits.maxSampleMaskWords = 1; 936 props->limits.timestampComputeAndGraphics = VK_TRUE; 937 props->limits.timestampPeriod = 1.0f; 938 props->limits.maxClipDistances = 8; 939 props->limits.maxCullDistances = 8; 940 props->limits.maxCombinedClipAndCullDistances = 8; 941 props->limits.discreteQueuePriorities = 2; 942 props->limits.pointSizeRange[0] = 1.0f; 943 props->limits.pointSizeRange[1] = 64.0f; // -1ulp 944 props->limits.lineWidthRange[0] = 1.0f; 945 props->limits.lineWidthRange[1] = 8.0f; // -1ulp 946 props->limits.pointSizeGranularity = 1.0f; 947 props->limits.lineWidthGranularity = 1.0f; 948 props->limits.strictLines = 0; 949 props->limits.standardSampleLocations = VK_TRUE; 950 props->limits.optimalBufferCopyOffsetAlignment = 256; 951 props->limits.optimalBufferCopyRowPitchAlignment = 256; 952 props->limits.nonCoherentAtomSize = 128; 953 } 954 955 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props) 956 { 957 if (props && *count >= 1u) 958 { 959 deMemset(props, 0, sizeof(VkQueueFamilyProperties)); 960 961 props->queueCount = 4u; 962 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT; 963 props->timestampValidBits = 64; 964 } 965 966 *count = 1u; 967 } 968 969 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props) 970 { 971 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties)); 972 973 props->memoryTypeCount = 1u; 974 props->memoryTypes[0].heapIndex = 0u; 975 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT 976 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT 977 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 978 979 props->memoryHeapCount = 1u; 980 props->memoryHeaps[0].size = 1ull << 31; 981 props->memoryHeaps[0].flags = 0u; 982 } 983 984 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) 985 { 986 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT 987 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT 988 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT 989 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT 990 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT 991 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT 992 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT 993 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT 994 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT 995 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT 996 | VK_FORMAT_FEATURE_BLIT_SRC_BIT 997 | VK_FORMAT_FEATURE_BLIT_DST_BIT 998 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT 999 | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT 1000 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT 1001 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT 1002 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT 1003 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT 1004 | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT; 1005 1006 pFormatProperties->linearTilingFeatures = allFeatures; 1007 pFormatProperties->optimalTilingFeatures = allFeatures; 1008 pFormatProperties->bufferFeatures = allFeatures; 1009 1010 if (isYCbCrFormat(format) && getPlaneCount(format) > 1) 1011 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT; 1012 } 1013 1014 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) 1015 { 1016 DE_UNREF(physicalDevice); 1017 DE_UNREF(format); 1018 DE_UNREF(type); 1019 DE_UNREF(tiling); 1020 DE_UNREF(usage); 1021 DE_UNREF(flags); 1022 1023 pImageFormatProperties->maxArrayLayers = 8; 1024 pImageFormatProperties->maxExtent.width = 4096; 1025 pImageFormatProperties->maxExtent.height = 4096; 1026 pImageFormatProperties->maxExtent.depth = 4096; 1027 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1; 1028 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u; 1029 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT; 1030 1031 return VK_SUCCESS; 1032 } 1033 1034 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue) 1035 { 1036 DE_UNREF(device); 1037 DE_UNREF(queueFamilyIndex); 1038 1039 if (pQueue) 1040 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1); 1041 } 1042 1043 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements) 1044 { 1045 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal()); 1046 1047 requirements->memoryTypeBits = 1u; 1048 requirements->size = buffer->getSize(); 1049 requirements->alignment = (VkDeviceSize)1u; 1050 } 1051 1052 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples) 1053 { 1054 return (VkDeviceSize)getPixelSize(mapVkFormat(format)) 1055 * (VkDeviceSize)extent.width 1056 * (VkDeviceSize)extent.height 1057 * (VkDeviceSize)extent.depth 1058 * (VkDeviceSize)samples; 1059 } 1060 1061 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent) 1062 { 1063 try 1064 { 1065 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format); 1066 const size_t blockSize = tcu::getBlockSize(tcuFormat); 1067 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat); 1068 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x()); 1069 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y()); 1070 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z()); 1071 1072 return blockSize*numBlocksX*numBlocksY*numBlocksZ; 1073 } 1074 catch (...) 1075 { 1076 return 0; // Unsupported compressed format 1077 } 1078 } 1079 1080 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent) 1081 { 1082 const PlanarFormatDescription desc = getPlanarFormatDescription(format); 1083 VkDeviceSize totalSize = 0; 1084 1085 DE_ASSERT(extent.depth == 1); 1086 1087 for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx) 1088 { 1089 const deUint32 planeW = extent.width / desc.planes[planeNdx].widthDivisor; 1090 const deUint32 planeH = extent.height / desc.planes[planeNdx].heightDivisor; 1091 const deUint32 elementSize = desc.planes[planeNdx].elementSizeBytes; 1092 1093 totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize); 1094 totalSize += planeW * planeH * elementSize; 1095 } 1096 1097 return totalSize; 1098 } 1099 1100 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements) 1101 { 1102 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal()); 1103 1104 requirements->memoryTypeBits = 1u; 1105 requirements->alignment = 16u; 1106 1107 if (isCompressedFormat(image->getFormat())) 1108 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent()); 1109 else if (isYCbCrFormat(image->getFormat())) 1110 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent()); 1111 else 1112 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples()); 1113 } 1114 1115 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) 1116 { 1117 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext); 1118 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext); 1119 1120 if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0) 1121 || (importInfo && importInfo->buffer.internal)) 1122 { 1123 #if defined(USE_ANDROID_O_HARDWARE_BUFFER) 1124 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator))); 1125 #else 1126 return VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR; 1127 #endif 1128 } 1129 else 1130 { 1131 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator))); 1132 } 1133 } 1134 1135 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) 1136 { 1137 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal()); 1138 1139 DE_UNREF(size); 1140 DE_UNREF(flags); 1141 1142 *ppData = (deUint8*)memory->map() + offset; 1143 1144 return VK_SUCCESS; 1145 } 1146 1147 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle) 1148 { 1149 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal()); 1150 1151 DE_UNREF(device); 1152 1153 memory->unmap(); 1154 } 1155 1156 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer) 1157 { 1158 DE_UNREF(device); 1159 1160 #if defined(USE_ANDROID_O_HARDWARE_BUFFER) 1161 DeviceMemory* const memory = reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal()); 1162 ExternalDeviceMemoryAndroid* const androidMemory = static_cast<ExternalDeviceMemoryAndroid*>(memory); 1163 1164 AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer(); 1165 AHardwareBuffer_acquire(hwbuffer); 1166 pBuffer->internal = hwbuffer; 1167 #else 1168 DE_UNREF(pInfo); 1169 DE_UNREF(pBuffer); 1170 #endif 1171 1172 return VK_SUCCESS; 1173 } 1174 1175 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) 1176 { 1177 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal()); 1178 1179 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx) 1180 { 1181 try 1182 { 1183 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]); 1184 } 1185 catch (const std::bad_alloc&) 1186 { 1187 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++) 1188 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal()); 1189 1190 return VK_ERROR_OUT_OF_HOST_MEMORY; 1191 } 1192 catch (VkResult res) 1193 { 1194 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++) 1195 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal()); 1196 1197 return res; 1198 } 1199 } 1200 1201 return VK_SUCCESS; 1202 } 1203 1204 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets) 1205 { 1206 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal()); 1207 1208 for (deUint32 ndx = 0; ndx < count; ++ndx) 1209 poolImpl->free(pDescriptorSets[ndx]); 1210 } 1211 1212 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags) 1213 { 1214 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal()); 1215 1216 poolImpl->reset(); 1217 1218 return VK_SUCCESS; 1219 } 1220 1221 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) 1222 { 1223 DE_UNREF(device); 1224 1225 if (pAllocateInfo && pCommandBuffers) 1226 { 1227 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal()); 1228 1229 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx) 1230 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level); 1231 } 1232 1233 return VK_SUCCESS; 1234 } 1235 1236 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) 1237 { 1238 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal()); 1239 1240 DE_UNREF(device); 1241 1242 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx) 1243 poolImpl->free(pCommandBuffers[ndx]); 1244 } 1245 1246 1247 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) 1248 { 1249 DE_UNREF(pAllocator); 1250 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator))); 1251 } 1252 1253 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) 1254 { 1255 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx) 1256 { 1257 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator); 1258 } 1259 1260 return VK_SUCCESS; 1261 } 1262 1263 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) 1264 { 1265 DE_UNREF(physicalDevice); 1266 DE_UNREF(pExternalBufferInfo); 1267 1268 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0; 1269 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0; 1270 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0; 1271 1272 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) 1273 { 1274 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR; 1275 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID; 1276 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID; 1277 } 1278 } 1279 1280 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) 1281 { 1282 const VkPhysicalDeviceExternalImageFormatInfo* const externalInfo = findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext); 1283 VkExternalImageFormatProperties* const externalProperties = findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext); 1284 VkResult result; 1285 1286 result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties); 1287 if (result != VK_SUCCESS) 1288 return result; 1289 1290 if (externalInfo && externalInfo->handleType != 0) 1291 { 1292 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) 1293 return VK_ERROR_FORMAT_NOT_SUPPORTED; 1294 1295 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM 1296 || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM 1297 || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16 1298 || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT 1299 || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32)) 1300 { 1301 return VK_ERROR_FORMAT_NOT_SUPPORTED; 1302 } 1303 1304 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D) 1305 return VK_ERROR_FORMAT_NOT_SUPPORTED; 1306 1307 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT 1308 | VK_IMAGE_USAGE_TRANSFER_DST_BIT 1309 | VK_IMAGE_USAGE_SAMPLED_BIT 1310 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) 1311 != 0) 1312 { 1313 return VK_ERROR_FORMAT_NOT_SUPPORTED; 1314 } 1315 1316 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT 1317 /*| VK_IMAGE_CREATE_PROTECTED_BIT_KHR*/ 1318 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR*/)) 1319 != 0) 1320 { 1321 return VK_ERROR_FORMAT_NOT_SUPPORTED; 1322 } 1323 1324 if (externalProperties) 1325 { 1326 externalProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR 1327 | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR 1328 | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR; 1329 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID; 1330 externalProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID; 1331 } 1332 } 1333 1334 return VK_SUCCESS; 1335 } 1336 1337 // \note getInstanceProcAddr is a little bit special: 1338 // vkNullDriverImpl.inl needs it to define s_platformFunctions but 1339 // getInstanceProcAddr() implementation needs other entry points from 1340 // vkNullDriverImpl.inl. 1341 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName); 1342 1343 #include "vkNullDriverImpl.inl" 1344 1345 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName) 1346 { 1347 if (instance) 1348 { 1349 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName); 1350 } 1351 else 1352 { 1353 const std::string name = pName; 1354 1355 if (name == "vkCreateInstance") 1356 return (PFN_vkVoidFunction)createInstance; 1357 else if (name == "vkEnumerateInstanceExtensionProperties") 1358 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties; 1359 else if (name == "vkEnumerateInstanceLayerProperties") 1360 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties; 1361 else 1362 return (PFN_vkVoidFunction)DE_NULL; 1363 } 1364 } 1365 1366 } // extern "C" 1367 1368 Instance::Instance (const VkInstanceCreateInfo*) 1369 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions)) 1370 { 1371 } 1372 1373 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*) 1374 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions)) 1375 { 1376 } 1377 1378 class NullDriverLibrary : public Library 1379 { 1380 public: 1381 NullDriverLibrary (void) 1382 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions)) 1383 , m_driver (m_library) 1384 {} 1385 1386 const PlatformInterface& getPlatformInterface (void) const { return m_driver; } 1387 const tcu::FunctionLibrary& getFunctionLibrary (void) const { return m_library; } 1388 private: 1389 const tcu::StaticFunctionLibrary m_library; 1390 const PlatformDriver m_driver; 1391 }; 1392 1393 } // anonymous 1394 1395 Library* createNullDriver (void) 1396 { 1397 return new NullDriverLibrary(); 1398 } 1399 1400 } // vk 1401