Home | History | Annotate | Download | only in nulldrv
      1 /*
      2  * Copyright 2015 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <hardware/hwvulkan.h>
     18 
     19 #include <inttypes.h>
     20 #include <stdlib.h>
     21 #include <string.h>
     22 
     23 #include <algorithm>
     24 #include <array>
     25 
     26 #include <log/log.h>
     27 #include <utils/Errors.h>
     28 
     29 #include "null_driver_gen.h"
     30 
     31 using namespace null_driver;
     32 
     33 struct VkPhysicalDevice_T {
     34     hwvulkan_dispatch_t dispatch;
     35 };
     36 
     37 struct VkInstance_T {
     38     hwvulkan_dispatch_t dispatch;
     39     VkAllocationCallbacks allocator;
     40     VkPhysicalDevice_T physical_device;
     41     uint64_t next_callback_handle;
     42 };
     43 
     44 struct VkQueue_T {
     45     hwvulkan_dispatch_t dispatch;
     46 };
     47 
     48 struct VkCommandBuffer_T {
     49     hwvulkan_dispatch_t dispatch;
     50 };
     51 
     52 namespace {
     53 // Handles for non-dispatchable objects are either pointers, or arbitrary
     54 // 64-bit non-zero values. We only use pointers when we need to keep state for
     55 // the object even in a null driver. For the rest, we form a handle as:
     56 //   [63:63] = 1 to distinguish from pointer handles*
     57 //   [62:56] = non-zero handle type enum value
     58 //   [55: 0] = per-handle-type incrementing counter
     59 // * This works because virtual addresses with the high bit set are reserved
     60 // for kernel data in all ABIs we run on.
     61 //
     62 // We never reclaim handles on vkDestroy*. It's not even necessary for us to
     63 // have distinct handles for live objects, and practically speaking we won't
     64 // ever create 2^56 objects of the same type from a single VkDevice in a null
     65 // driver.
     66 //
     67 // Using a namespace here instead of 'enum class' since we want scoped
     68 // constants but also want implicit conversions to integral types.
     69 namespace HandleType {
     70 enum Enum {
     71     kBufferView,
     72     kDebugReportCallbackEXT,
     73     kDescriptorPool,
     74     kDescriptorSet,
     75     kDescriptorSetLayout,
     76     kEvent,
     77     kFence,
     78     kFramebuffer,
     79     kImageView,
     80     kPipeline,
     81     kPipelineCache,
     82     kPipelineLayout,
     83     kQueryPool,
     84     kRenderPass,
     85     kSampler,
     86     kSemaphore,
     87     kShaderModule,
     88 
     89     kNumTypes
     90 };
     91 }  // namespace HandleType
     92 
     93 const VkDeviceSize kMaxDeviceMemory = 0x10000000;  // 256 MiB, arbitrary
     94 
     95 }  // anonymous namespace
     96 
     97 struct VkDevice_T {
     98     hwvulkan_dispatch_t dispatch;
     99     VkAllocationCallbacks allocator;
    100     VkInstance_T* instance;
    101     VkQueue_T queue;
    102     std::array<uint64_t, HandleType::kNumTypes> next_handle;
    103 };
    104 
    105 // -----------------------------------------------------------------------------
    106 // Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
    107 // later.
    108 
    109 namespace {
    110 int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
    111 hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
    112 }  // namespace
    113 
    114 #pragma clang diagnostic push
    115 #pragma clang diagnostic ignored "-Wmissing-variable-declarations"
    116 __attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
    117     .common =
    118         {
    119             .tag = HARDWARE_MODULE_TAG,
    120             .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
    121             .hal_api_version = HARDWARE_HAL_API_VERSION,
    122             .id = HWVULKAN_HARDWARE_MODULE_ID,
    123             .name = "Null Vulkan Driver",
    124             .author = "The Android Open Source Project",
    125             .methods = &nulldrv_module_methods,
    126         },
    127 };
    128 #pragma clang diagnostic pop
    129 
    130 // -----------------------------------------------------------------------------
    131 
    132 namespace {
    133 
    134 int CloseDevice(struct hw_device_t* /*device*/) {
    135     // nothing to do - opening a device doesn't allocate any resources
    136     return 0;
    137 }
    138 
    139 hwvulkan_device_t nulldrv_device = {
    140     .common =
    141         {
    142             .tag = HARDWARE_DEVICE_TAG,
    143             .version = HWVULKAN_DEVICE_API_VERSION_0_1,
    144             .module = &HAL_MODULE_INFO_SYM.common,
    145             .close = CloseDevice,
    146         },
    147     .EnumerateInstanceExtensionProperties =
    148         EnumerateInstanceExtensionProperties,
    149     .CreateInstance = CreateInstance,
    150     .GetInstanceProcAddr = GetInstanceProcAddr};
    151 
    152 int OpenDevice(const hw_module_t* /*module*/,
    153                const char* id,
    154                hw_device_t** device) {
    155     if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
    156         *device = &nulldrv_device.common;
    157         return 0;
    158     }
    159     return -ENOENT;
    160 }
    161 
    162 VkInstance_T* GetInstanceFromPhysicalDevice(
    163     VkPhysicalDevice_T* physical_device) {
    164     return reinterpret_cast<VkInstance_T*>(
    165         reinterpret_cast<uintptr_t>(physical_device) -
    166         offsetof(VkInstance_T, physical_device));
    167 }
    168 
    169 uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
    170     const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
    171     ALOGE_IF(*next_handle == kHandleMask,
    172              "non-dispatchable handles of type=%" PRIu64
    173              " are about to overflow",
    174              type);
    175     return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
    176            ((*next_handle)++ & kHandleMask);
    177 }
    178 
    179 template <class Handle>
    180 Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
    181     return reinterpret_cast<Handle>(
    182         AllocHandle(type, &instance->next_callback_handle));
    183 }
    184 
    185 template <class Handle>
    186 Handle AllocHandle(VkDevice device, HandleType::Enum type) {
    187     return reinterpret_cast<Handle>(
    188         AllocHandle(type, &device->next_handle[type]));
    189 }
    190 
    191 VKAPI_ATTR void* DefaultAllocate(void*,
    192                                  size_t size,
    193                                  size_t alignment,
    194                                  VkSystemAllocationScope) {
    195     void* ptr = nullptr;
    196     // Vulkan requires 'alignment' to be a power of two, but posix_memalign
    197     // additionally requires that it be at least sizeof(void*).
    198     int ret = posix_memalign(&ptr, std::max(alignment, sizeof(void*)), size);
    199     return ret == 0 ? ptr : nullptr;
    200 }
    201 
    202 VKAPI_ATTR void* DefaultReallocate(void*,
    203                                    void* ptr,
    204                                    size_t size,
    205                                    size_t alignment,
    206                                    VkSystemAllocationScope) {
    207     if (size == 0) {
    208         free(ptr);
    209         return nullptr;
    210     }
    211 
    212     // TODO(jessehall): Right now we never shrink allocations; if the new
    213     // request is smaller than the existing chunk, we just continue using it.
    214     // The null driver never reallocs, so this doesn't matter. If that changes,
    215     // or if this code is copied into some other project, this should probably
    216     // have a heuristic to allocate-copy-free when doing so will save "enough"
    217     // space.
    218     size_t old_size = ptr ? malloc_usable_size(ptr) : 0;
    219     if (size <= old_size)
    220         return ptr;
    221 
    222     void* new_ptr = nullptr;
    223     if (posix_memalign(&new_ptr, std::max(alignment, sizeof(void*)), size) != 0)
    224         return nullptr;
    225     if (ptr) {
    226         memcpy(new_ptr, ptr, std::min(old_size, size));
    227         free(ptr);
    228     }
    229     return new_ptr;
    230 }
    231 
    232 VKAPI_ATTR void DefaultFree(void*, void* ptr) {
    233     free(ptr);
    234 }
    235 
    236 const VkAllocationCallbacks kDefaultAllocCallbacks = {
    237     .pUserData = nullptr,
    238     .pfnAllocation = DefaultAllocate,
    239     .pfnReallocation = DefaultReallocate,
    240     .pfnFree = DefaultFree,
    241 };
    242 
    243 }  // namespace
    244 
    245 namespace null_driver {
    246 
    247 #define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
    248     T* Get##T##FromHandle(Vk##T h);                     \
    249     T* Get##T##FromHandle(Vk##T h) {                    \
    250         return reinterpret_cast<T*>(uintptr_t(h));      \
    251     }                                                   \
    252     Vk##T GetHandleTo##T(const T* obj);                 \
    253     Vk##T GetHandleTo##T(const T* obj) {                \
    254         return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
    255     }
    256 
    257 // -----------------------------------------------------------------------------
    258 // Global
    259 
    260 VKAPI_ATTR
    261 VkResult EnumerateInstanceExtensionProperties(
    262     const char* layer_name,
    263     uint32_t* count,
    264     VkExtensionProperties* properties) {
    265     if (layer_name) {
    266         ALOGW(
    267             "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
    268             "with a layer name ('%s')",
    269             layer_name);
    270     }
    271 
    272     const VkExtensionProperties kExtensions[] = {
    273         {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION}};
    274     const uint32_t kExtensionsCount =
    275         sizeof(kExtensions) / sizeof(kExtensions[0]);
    276 
    277     if (!properties || *count > kExtensionsCount)
    278         *count = kExtensionsCount;
    279     if (properties)
    280         std::copy(kExtensions, kExtensions + *count, properties);
    281     return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
    282 }
    283 
    284 VKAPI_ATTR
    285 VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
    286                         const VkAllocationCallbacks* allocator,
    287                         VkInstance* out_instance) {
    288     if (!allocator)
    289         allocator = &kDefaultAllocCallbacks;
    290 
    291     VkInstance_T* instance =
    292         static_cast<VkInstance_T*>(allocator->pfnAllocation(
    293             allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
    294             VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
    295     if (!instance)
    296         return VK_ERROR_OUT_OF_HOST_MEMORY;
    297 
    298     instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
    299     instance->allocator = *allocator;
    300     instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
    301     instance->next_callback_handle = 0;
    302 
    303     for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
    304         if (strcmp(create_info->ppEnabledExtensionNames[i],
    305                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0) {
    306             ALOGV("instance extension '%s' requested",
    307                   create_info->ppEnabledExtensionNames[i]);
    308         } else if (strcmp(create_info->ppEnabledExtensionNames[i],
    309                    VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
    310             ALOGV("instance extension '%s' requested",
    311                   create_info->ppEnabledExtensionNames[i]);
    312         } else {
    313             ALOGW("unsupported extension '%s' requested",
    314                   create_info->ppEnabledExtensionNames[i]);
    315         }
    316     }
    317 
    318     *out_instance = instance;
    319     return VK_SUCCESS;
    320 }
    321 
    322 VKAPI_ATTR
    323 PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
    324     return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
    325 }
    326 
    327 VKAPI_ATTR
    328 PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
    329     return GetInstanceProcAddr(name);
    330 }
    331 
    332 // -----------------------------------------------------------------------------
    333 // Instance
    334 
    335 void DestroyInstance(VkInstance instance,
    336                      const VkAllocationCallbacks* /*allocator*/) {
    337     instance->allocator.pfnFree(instance->allocator.pUserData, instance);
    338 }
    339 
    340 // -----------------------------------------------------------------------------
    341 // PhysicalDevice
    342 
    343 VkResult EnumeratePhysicalDevices(VkInstance instance,
    344                                   uint32_t* physical_device_count,
    345                                   VkPhysicalDevice* physical_devices) {
    346     if (physical_devices && *physical_device_count >= 1)
    347         physical_devices[0] = &instance->physical_device;
    348     *physical_device_count = 1;
    349     return VK_SUCCESS;
    350 }
    351 
    352 VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
    353                                         uint32_t* count,
    354                                         VkLayerProperties* /*properties*/) {
    355     ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
    356     *count = 0;
    357     return VK_SUCCESS;
    358 }
    359 
    360 VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
    361                                             const char* layer_name,
    362                                             uint32_t* count,
    363                                             VkExtensionProperties* properties) {
    364     if (layer_name) {
    365         ALOGW(
    366             "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
    367             "with a layer name ('%s')",
    368             layer_name);
    369         *count = 0;
    370         return VK_SUCCESS;
    371     }
    372 
    373     const VkExtensionProperties kExtensions[] = {
    374         {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
    375          VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
    376     const uint32_t kExtensionsCount =
    377         sizeof(kExtensions) / sizeof(kExtensions[0]);
    378 
    379     if (!properties || *count > kExtensionsCount)
    380         *count = kExtensionsCount;
    381     if (properties)
    382         std::copy(kExtensions, kExtensions + *count, properties);
    383     return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
    384 }
    385 
    386 void GetPhysicalDeviceProperties(VkPhysicalDevice,
    387                                  VkPhysicalDeviceProperties* properties) {
    388     properties->apiVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
    389     properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
    390     properties->vendorID = 0;
    391     properties->deviceID = 0;
    392     properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
    393     strcpy(properties->deviceName, "Android Vulkan Null Driver");
    394     memset(properties->pipelineCacheUUID, 0,
    395            sizeof(properties->pipelineCacheUUID));
    396     properties->limits = VkPhysicalDeviceLimits{
    397         4096,     // maxImageDimension1D
    398         4096,     // maxImageDimension2D
    399         256,      // maxImageDimension3D
    400         4096,     // maxImageDimensionCube
    401         256,      // maxImageArrayLayers
    402         65536,    // maxTexelBufferElements
    403         16384,    // maxUniformBufferRange
    404         1 << 27,  // maxStorageBufferRange
    405         128,      // maxPushConstantsSize
    406         4096,     // maxMemoryAllocationCount
    407         4000,     // maxSamplerAllocationCount
    408         1,        // bufferImageGranularity
    409         0,        // sparseAddressSpaceSize
    410         4,        // maxBoundDescriptorSets
    411         16,       // maxPerStageDescriptorSamplers
    412         12,       // maxPerStageDescriptorUniformBuffers
    413         4,        // maxPerStageDescriptorStorageBuffers
    414         16,       // maxPerStageDescriptorSampledImages
    415         4,        // maxPerStageDescriptorStorageImages
    416         4,        // maxPerStageDescriptorInputAttachments
    417         128,      // maxPerStageResources
    418         96,       // maxDescriptorSetSamplers
    419         72,       // maxDescriptorSetUniformBuffers
    420         8,        // maxDescriptorSetUniformBuffersDynamic
    421         24,       // maxDescriptorSetStorageBuffers
    422         4,        // maxDescriptorSetStorageBuffersDynamic
    423         96,       // maxDescriptorSetSampledImages
    424         24,       // maxDescriptorSetStorageImages
    425         4,        // maxDescriptorSetInputAttachments
    426         16,       // maxVertexInputAttributes
    427         16,       // maxVertexInputBindings
    428         2047,     // maxVertexInputAttributeOffset
    429         2048,     // maxVertexInputBindingStride
    430         64,       // maxVertexOutputComponents
    431         0,        // maxTessellationGenerationLevel
    432         0,        // maxTessellationPatchSize
    433         0,        // maxTessellationControlPerVertexInputComponents
    434         0,        // maxTessellationControlPerVertexOutputComponents
    435         0,        // maxTessellationControlPerPatchOutputComponents
    436         0,        // maxTessellationControlTotalOutputComponents
    437         0,        // maxTessellationEvaluationInputComponents
    438         0,        // maxTessellationEvaluationOutputComponents
    439         0,        // maxGeometryShaderInvocations
    440         0,        // maxGeometryInputComponents
    441         0,        // maxGeometryOutputComponents
    442         0,        // maxGeometryOutputVertices
    443         0,        // maxGeometryTotalOutputComponents
    444         64,       // maxFragmentInputComponents
    445         4,        // maxFragmentOutputAttachments
    446         0,        // maxFragmentDualSrcAttachments
    447         4,        // maxFragmentCombinedOutputResources
    448         16384,    // maxComputeSharedMemorySize
    449         {65536, 65536, 65536},  // maxComputeWorkGroupCount[3]
    450         128,                    // maxComputeWorkGroupInvocations
    451         {128, 128, 64},         // maxComputeWorkGroupSize[3]
    452         4,                      // subPixelPrecisionBits
    453         4,                      // subTexelPrecisionBits
    454         4,                      // mipmapPrecisionBits
    455         UINT32_MAX,             // maxDrawIndexedIndexValue
    456         1,                      // maxDrawIndirectCount
    457         2,                      // maxSamplerLodBias
    458         1,                      // maxSamplerAnisotropy
    459         1,                      // maxViewports
    460         {4096, 4096},           // maxViewportDimensions[2]
    461         {-8192.0f, 8191.0f},    // viewportBoundsRange[2]
    462         0,                      // viewportSubPixelBits
    463         64,                     // minMemoryMapAlignment
    464         256,                    // minTexelBufferOffsetAlignment
    465         256,                    // minUniformBufferOffsetAlignment
    466         256,                    // minStorageBufferOffsetAlignment
    467         -8,                     // minTexelOffset
    468         7,                      // maxTexelOffset
    469         0,                      // minTexelGatherOffset
    470         0,                      // maxTexelGatherOffset
    471         0.0f,                   // minInterpolationOffset
    472         0.0f,                   // maxInterpolationOffset
    473         0,                      // subPixelInterpolationOffsetBits
    474         4096,                   // maxFramebufferWidth
    475         4096,                   // maxFramebufferHeight
    476         256,                    // maxFramebufferLayers
    477         VK_SAMPLE_COUNT_1_BIT |
    478             VK_SAMPLE_COUNT_4_BIT,  // framebufferColorSampleCounts
    479         VK_SAMPLE_COUNT_1_BIT |
    480             VK_SAMPLE_COUNT_4_BIT,  // framebufferDepthSampleCounts
    481         VK_SAMPLE_COUNT_1_BIT |
    482             VK_SAMPLE_COUNT_4_BIT,  // framebufferStencilSampleCounts
    483         VK_SAMPLE_COUNT_1_BIT |
    484             VK_SAMPLE_COUNT_4_BIT,  // framebufferNoAttachmentsSampleCounts
    485         4,                          // maxColorAttachments
    486         VK_SAMPLE_COUNT_1_BIT |
    487             VK_SAMPLE_COUNT_4_BIT,  // sampledImageColorSampleCounts
    488         VK_SAMPLE_COUNT_1_BIT,      // sampledImageIntegerSampleCounts
    489         VK_SAMPLE_COUNT_1_BIT |
    490             VK_SAMPLE_COUNT_4_BIT,  // sampledImageDepthSampleCounts
    491         VK_SAMPLE_COUNT_1_BIT |
    492             VK_SAMPLE_COUNT_4_BIT,  // sampledImageStencilSampleCounts
    493         VK_SAMPLE_COUNT_1_BIT,      // storageImageSampleCounts
    494         1,                          // maxSampleMaskWords
    495         VK_TRUE,                    // timestampComputeAndGraphics
    496         1,                          // timestampPeriod
    497         0,                          // maxClipDistances
    498         0,                          // maxCullDistances
    499         0,                          // maxCombinedClipAndCullDistances
    500         2,                          // discreteQueuePriorities
    501         {1.0f, 1.0f},               // pointSizeRange[2]
    502         {1.0f, 1.0f},               // lineWidthRange[2]
    503         0.0f,                       // pointSizeGranularity
    504         0.0f,                       // lineWidthGranularity
    505         VK_TRUE,                    // strictLines
    506         VK_TRUE,                    // standardSampleLocations
    507         1,                          // optimalBufferCopyOffsetAlignment
    508         1,                          // optimalBufferCopyRowPitchAlignment
    509         64,                         // nonCoherentAtomSize
    510     };
    511 }
    512 
    513 void GetPhysicalDeviceProperties2KHR(VkPhysicalDevice physical_device,
    514                                   VkPhysicalDeviceProperties2KHR* properties) {
    515     GetPhysicalDeviceProperties(physical_device, &properties->properties);
    516 
    517     while (properties->pNext) {
    518         properties = reinterpret_cast<VkPhysicalDeviceProperties2KHR *>(properties->pNext);
    519 
    520 #pragma clang diagnostic push
    521 #pragma clang diagnostic ignored "-Wold-style-cast"
    522         switch ((VkFlags)properties->sType) {
    523         case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: {
    524             VkPhysicalDevicePresentationPropertiesANDROID *presentation_properties =
    525                 reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(properties);
    526 #pragma clang diagnostic pop
    527 
    528                 // Claim that we do all the right things for the loader to
    529                 // expose KHR_shared_presentable_image on our behalf.
    530                 presentation_properties->sharedImage = VK_TRUE;
    531             } break;
    532 
    533         default:
    534             // Silently ignore other extension query structs
    535             break;
    536         }
    537     }
    538 }
    539 
    540 void GetPhysicalDeviceQueueFamilyProperties(
    541     VkPhysicalDevice,
    542     uint32_t* count,
    543     VkQueueFamilyProperties* properties) {
    544     if (!properties || *count > 1)
    545         *count = 1;
    546     if (properties && *count == 1) {
    547         properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
    548                                  VK_QUEUE_TRANSFER_BIT;
    549         properties->queueCount = 1;
    550         properties->timestampValidBits = 64;
    551         properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
    552     }
    553 }
    554 
    555 void GetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physical_device, uint32_t* count, VkQueueFamilyProperties2KHR* properties) {
    556     // note: even though multiple structures, this is safe to forward in this
    557     // case since we only expose one queue family.
    558     GetPhysicalDeviceQueueFamilyProperties(physical_device, count, properties ? &properties->queueFamilyProperties : nullptr);
    559 }
    560 
    561 void GetPhysicalDeviceMemoryProperties(
    562     VkPhysicalDevice,
    563     VkPhysicalDeviceMemoryProperties* properties) {
    564     properties->memoryTypeCount = 1;
    565     properties->memoryTypes[0].propertyFlags =
    566         VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
    567         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
    568         VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
    569         VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
    570     properties->memoryTypes[0].heapIndex = 0;
    571     properties->memoryHeapCount = 1;
    572     properties->memoryHeaps[0].size = kMaxDeviceMemory;
    573     properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
    574 }
    575 
    576 void GetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceMemoryProperties2KHR* properties) {
    577     GetPhysicalDeviceMemoryProperties(physical_device, &properties->memoryProperties);
    578 }
    579 
    580 void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
    581                                VkPhysicalDeviceFeatures* features) {
    582     *features = VkPhysicalDeviceFeatures{
    583         VK_TRUE,   // robustBufferAccess
    584         VK_FALSE,  // fullDrawIndexUint32
    585         VK_FALSE,  // imageCubeArray
    586         VK_FALSE,  // independentBlend
    587         VK_FALSE,  // geometryShader
    588         VK_FALSE,  // tessellationShader
    589         VK_FALSE,  // sampleRateShading
    590         VK_FALSE,  // dualSrcBlend
    591         VK_FALSE,  // logicOp
    592         VK_FALSE,  // multiDrawIndirect
    593         VK_FALSE,  // drawIndirectFirstInstance
    594         VK_FALSE,  // depthClamp
    595         VK_FALSE,  // depthBiasClamp
    596         VK_FALSE,  // fillModeNonSolid
    597         VK_FALSE,  // depthBounds
    598         VK_FALSE,  // wideLines
    599         VK_FALSE,  // largePoints
    600         VK_FALSE,  // alphaToOne
    601         VK_FALSE,  // multiViewport
    602         VK_FALSE,  // samplerAnisotropy
    603         VK_FALSE,  // textureCompressionETC2
    604         VK_FALSE,  // textureCompressionASTC_LDR
    605         VK_FALSE,  // textureCompressionBC
    606         VK_FALSE,  // occlusionQueryPrecise
    607         VK_FALSE,  // pipelineStatisticsQuery
    608         VK_FALSE,  // vertexPipelineStoresAndAtomics
    609         VK_FALSE,  // fragmentStoresAndAtomics
    610         VK_FALSE,  // shaderTessellationAndGeometryPointSize
    611         VK_FALSE,  // shaderImageGatherExtended
    612         VK_FALSE,  // shaderStorageImageExtendedFormats
    613         VK_FALSE,  // shaderStorageImageMultisample
    614         VK_FALSE,  // shaderStorageImageReadWithoutFormat
    615         VK_FALSE,  // shaderStorageImageWriteWithoutFormat
    616         VK_FALSE,  // shaderUniformBufferArrayDynamicIndexing
    617         VK_FALSE,  // shaderSampledImageArrayDynamicIndexing
    618         VK_FALSE,  // shaderStorageBufferArrayDynamicIndexing
    619         VK_FALSE,  // shaderStorageImageArrayDynamicIndexing
    620         VK_FALSE,  // shaderClipDistance
    621         VK_FALSE,  // shaderCullDistance
    622         VK_FALSE,  // shaderFloat64
    623         VK_FALSE,  // shaderInt64
    624         VK_FALSE,  // shaderInt16
    625         VK_FALSE,  // shaderResourceResidency
    626         VK_FALSE,  // shaderResourceMinLod
    627         VK_FALSE,  // sparseBinding
    628         VK_FALSE,  // sparseResidencyBuffer
    629         VK_FALSE,  // sparseResidencyImage2D
    630         VK_FALSE,  // sparseResidencyImage3D
    631         VK_FALSE,  // sparseResidency2Samples
    632         VK_FALSE,  // sparseResidency4Samples
    633         VK_FALSE,  // sparseResidency8Samples
    634         VK_FALSE,  // sparseResidency16Samples
    635         VK_FALSE,  // sparseResidencyAliased
    636         VK_FALSE,  // variableMultisampleRate
    637         VK_FALSE,  // inheritedQueries
    638     };
    639 }
    640 
    641 void GetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceFeatures2KHR* features) {
    642     GetPhysicalDeviceFeatures(physical_device, &features->features);
    643 }
    644 
    645 // -----------------------------------------------------------------------------
    646 // Device
    647 
    648 VkResult CreateDevice(VkPhysicalDevice physical_device,
    649                       const VkDeviceCreateInfo* create_info,
    650                       const VkAllocationCallbacks* allocator,
    651                       VkDevice* out_device) {
    652     VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
    653     if (!allocator)
    654         allocator = &instance->allocator;
    655     VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
    656         allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
    657         VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
    658     if (!device)
    659         return VK_ERROR_OUT_OF_HOST_MEMORY;
    660 
    661     device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
    662     device->allocator = *allocator;
    663     device->instance = instance;
    664     device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
    665     std::fill(device->next_handle.begin(), device->next_handle.end(),
    666               UINT64_C(0));
    667 
    668     for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
    669         if (strcmp(create_info->ppEnabledExtensionNames[i],
    670                    VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
    671             ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
    672         }
    673     }
    674 
    675     *out_device = device;
    676     return VK_SUCCESS;
    677 }
    678 
    679 void DestroyDevice(VkDevice device,
    680                    const VkAllocationCallbacks* /*allocator*/) {
    681     if (!device)
    682         return;
    683     device->allocator.pfnFree(device->allocator.pUserData, device);
    684 }
    685 
    686 void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
    687     *queue = &device->queue;
    688 }
    689 
    690 // -----------------------------------------------------------------------------
    691 // CommandPool
    692 
    693 struct CommandPool {
    694     typedef VkCommandPool HandleType;
    695     VkAllocationCallbacks allocator;
    696 };
    697 DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
    698 
    699 VkResult CreateCommandPool(VkDevice device,
    700                            const VkCommandPoolCreateInfo* /*create_info*/,
    701                            const VkAllocationCallbacks* allocator,
    702                            VkCommandPool* cmd_pool) {
    703     if (!allocator)
    704         allocator = &device->allocator;
    705     CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
    706         allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
    707         VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
    708     if (!pool)
    709         return VK_ERROR_OUT_OF_HOST_MEMORY;
    710     pool->allocator = *allocator;
    711     *cmd_pool = GetHandleToCommandPool(pool);
    712     return VK_SUCCESS;
    713 }
    714 
    715 void DestroyCommandPool(VkDevice /*device*/,
    716                         VkCommandPool cmd_pool,
    717                         const VkAllocationCallbacks* /*allocator*/) {
    718     CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
    719     pool->allocator.pfnFree(pool->allocator.pUserData, pool);
    720 }
    721 
    722 // -----------------------------------------------------------------------------
    723 // CmdBuffer
    724 
    725 VkResult AllocateCommandBuffers(VkDevice /*device*/,
    726                                 const VkCommandBufferAllocateInfo* alloc_info,
    727                                 VkCommandBuffer* cmdbufs) {
    728     VkResult result = VK_SUCCESS;
    729     CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
    730     std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
    731     for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
    732         cmdbufs[i] =
    733             static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
    734                 pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
    735                 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
    736         if (!cmdbufs[i]) {
    737             result = VK_ERROR_OUT_OF_HOST_MEMORY;
    738             break;
    739         }
    740         cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
    741     }
    742     if (result != VK_SUCCESS) {
    743         for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
    744             if (!cmdbufs[i])
    745                 break;
    746             pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
    747         }
    748     }
    749     return result;
    750 }
    751 
    752 void FreeCommandBuffers(VkDevice /*device*/,
    753                         VkCommandPool cmd_pool,
    754                         uint32_t count,
    755                         const VkCommandBuffer* cmdbufs) {
    756     CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
    757     for (uint32_t i = 0; i < count; i++)
    758         pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
    759 }
    760 
    761 // -----------------------------------------------------------------------------
    762 // DeviceMemory
    763 
    764 struct DeviceMemory {
    765     typedef VkDeviceMemory HandleType;
    766     VkDeviceSize size;
    767     alignas(16) uint8_t data[0];
    768 };
    769 DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
    770 
    771 VkResult AllocateMemory(VkDevice device,
    772                         const VkMemoryAllocateInfo* alloc_info,
    773                         const VkAllocationCallbacks* allocator,
    774                         VkDeviceMemory* mem_handle) {
    775     if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
    776         return VK_ERROR_OUT_OF_HOST_MEMORY;
    777     if (!allocator)
    778         allocator = &device->allocator;
    779 
    780     size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
    781     DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
    782         allocator->pUserData, size, alignof(DeviceMemory),
    783         VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
    784     if (!mem)
    785         return VK_ERROR_OUT_OF_HOST_MEMORY;
    786     mem->size = size;
    787     *mem_handle = GetHandleToDeviceMemory(mem);
    788     return VK_SUCCESS;
    789 }
    790 
    791 void FreeMemory(VkDevice device,
    792                 VkDeviceMemory mem_handle,
    793                 const VkAllocationCallbacks* allocator) {
    794     if (!allocator)
    795         allocator = &device->allocator;
    796     DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
    797     allocator->pfnFree(allocator->pUserData, mem);
    798 }
    799 
    800 VkResult MapMemory(VkDevice,
    801                    VkDeviceMemory mem_handle,
    802                    VkDeviceSize offset,
    803                    VkDeviceSize,
    804                    VkMemoryMapFlags,
    805                    void** out_ptr) {
    806     DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
    807     *out_ptr = &mem->data[0] + offset;
    808     return VK_SUCCESS;
    809 }
    810 
    811 // -----------------------------------------------------------------------------
    812 // Buffer
    813 
    814 struct Buffer {
    815     typedef VkBuffer HandleType;
    816     VkDeviceSize size;
    817 };
    818 DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
    819 
    820 VkResult CreateBuffer(VkDevice device,
    821                       const VkBufferCreateInfo* create_info,
    822                       const VkAllocationCallbacks* allocator,
    823                       VkBuffer* buffer_handle) {
    824     ALOGW_IF(create_info->size > kMaxDeviceMemory,
    825              "CreateBuffer: requested size 0x%" PRIx64
    826              " exceeds max device memory size 0x%" PRIx64,
    827              create_info->size, kMaxDeviceMemory);
    828     if (!allocator)
    829         allocator = &device->allocator;
    830     Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
    831         allocator->pUserData, sizeof(Buffer), alignof(Buffer),
    832         VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
    833     if (!buffer)
    834         return VK_ERROR_OUT_OF_HOST_MEMORY;
    835     buffer->size = create_info->size;
    836     *buffer_handle = GetHandleToBuffer(buffer);
    837     return VK_SUCCESS;
    838 }
    839 
    840 void GetBufferMemoryRequirements(VkDevice,
    841                                  VkBuffer buffer_handle,
    842                                  VkMemoryRequirements* requirements) {
    843     Buffer* buffer = GetBufferFromHandle(buffer_handle);
    844     requirements->size = buffer->size;
    845     requirements->alignment = 16;  // allow fast Neon/SSE memcpy
    846     requirements->memoryTypeBits = 0x1;
    847 }
    848 
    849 void DestroyBuffer(VkDevice device,
    850                    VkBuffer buffer_handle,
    851                    const VkAllocationCallbacks* allocator) {
    852     if (!allocator)
    853         allocator = &device->allocator;
    854     Buffer* buffer = GetBufferFromHandle(buffer_handle);
    855     allocator->pfnFree(allocator->pUserData, buffer);
    856 }
    857 
    858 // -----------------------------------------------------------------------------
    859 // Image
    860 
    861 struct Image {
    862     typedef VkImage HandleType;
    863     VkDeviceSize size;
    864 };
    865 DEFINE_OBJECT_HANDLE_CONVERSION(Image)
    866 
    867 VkResult CreateImage(VkDevice device,
    868                      const VkImageCreateInfo* create_info,
    869                      const VkAllocationCallbacks* allocator,
    870                      VkImage* image_handle) {
    871     if (create_info->imageType != VK_IMAGE_TYPE_2D ||
    872         create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
    873         create_info->mipLevels != 1) {
    874         ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
    875               create_info->imageType, create_info->format,
    876               create_info->mipLevels);
    877         return VK_ERROR_OUT_OF_HOST_MEMORY;
    878     }
    879 
    880     VkDeviceSize size =
    881         VkDeviceSize(create_info->extent.width * create_info->extent.height) *
    882         create_info->arrayLayers * create_info->samples * 4u;
    883     ALOGW_IF(size > kMaxDeviceMemory,
    884              "CreateImage: image size 0x%" PRIx64
    885              " exceeds max device memory size 0x%" PRIx64,
    886              size, kMaxDeviceMemory);
    887 
    888     if (!allocator)
    889         allocator = &device->allocator;
    890     Image* image = static_cast<Image*>(allocator->pfnAllocation(
    891         allocator->pUserData, sizeof(Image), alignof(Image),
    892         VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
    893     if (!image)
    894         return VK_ERROR_OUT_OF_HOST_MEMORY;
    895     image->size = size;
    896     *image_handle = GetHandleToImage(image);
    897     return VK_SUCCESS;
    898 }
    899 
    900 void GetImageMemoryRequirements(VkDevice,
    901                                 VkImage image_handle,
    902                                 VkMemoryRequirements* requirements) {
    903     Image* image = GetImageFromHandle(image_handle);
    904     requirements->size = image->size;
    905     requirements->alignment = 16;  // allow fast Neon/SSE memcpy
    906     requirements->memoryTypeBits = 0x1;
    907 }
    908 
    909 void DestroyImage(VkDevice device,
    910                   VkImage image_handle,
    911                   const VkAllocationCallbacks* allocator) {
    912     if (!allocator)
    913         allocator = &device->allocator;
    914     Image* image = GetImageFromHandle(image_handle);
    915     allocator->pfnFree(allocator->pUserData, image);
    916 }
    917 
    918 VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
    919                                          VkFormat,
    920                                          VkImageUsageFlags,
    921                                          int* grallocUsage) {
    922     // The null driver never reads or writes the gralloc buffer
    923     *grallocUsage = 0;
    924     return VK_SUCCESS;
    925 }
    926 
    927 VkResult GetSwapchainGrallocUsage2ANDROID(VkDevice,
    928                                           VkFormat,
    929                                           VkImageUsageFlags,
    930                                           VkSwapchainImageUsageFlagsANDROID,
    931                                           uint64_t* grallocConsumerUsage,
    932                                           uint64_t* grallocProducerUsage) {
    933     // The null driver never reads or writes the gralloc buffer
    934     *grallocConsumerUsage = 0;
    935     *grallocProducerUsage = 0;
    936     return VK_SUCCESS;
    937 }
    938 
    939 VkResult AcquireImageANDROID(VkDevice,
    940                              VkImage,
    941                              int fence,
    942                              VkSemaphore,
    943                              VkFence) {
    944     close(fence);
    945     return VK_SUCCESS;
    946 }
    947 
    948 VkResult QueueSignalReleaseImageANDROID(VkQueue,
    949                                         uint32_t,
    950                                         const VkSemaphore*,
    951                                         VkImage,
    952                                         int* fence) {
    953     *fence = -1;
    954     return VK_SUCCESS;
    955 }
    956 
    957 // -----------------------------------------------------------------------------
    958 // No-op types
    959 
    960 VkResult CreateBufferView(VkDevice device,
    961                           const VkBufferViewCreateInfo*,
    962                           const VkAllocationCallbacks* /*allocator*/,
    963                           VkBufferView* view) {
    964     *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
    965     return VK_SUCCESS;
    966 }
    967 
    968 VkResult CreateDescriptorPool(VkDevice device,
    969                               const VkDescriptorPoolCreateInfo*,
    970                               const VkAllocationCallbacks* /*allocator*/,
    971                               VkDescriptorPool* pool) {
    972     *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
    973     return VK_SUCCESS;
    974 }
    975 
    976 VkResult AllocateDescriptorSets(VkDevice device,
    977                                 const VkDescriptorSetAllocateInfo* alloc_info,
    978                                 VkDescriptorSet* descriptor_sets) {
    979     for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
    980         descriptor_sets[i] =
    981             AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
    982     return VK_SUCCESS;
    983 }
    984 
    985 VkResult CreateDescriptorSetLayout(VkDevice device,
    986                                    const VkDescriptorSetLayoutCreateInfo*,
    987                                    const VkAllocationCallbacks* /*allocator*/,
    988                                    VkDescriptorSetLayout* layout) {
    989     *layout = AllocHandle<VkDescriptorSetLayout>(
    990         device, HandleType::kDescriptorSetLayout);
    991     return VK_SUCCESS;
    992 }
    993 
    994 VkResult CreateEvent(VkDevice device,
    995                      const VkEventCreateInfo*,
    996                      const VkAllocationCallbacks* /*allocator*/,
    997                      VkEvent* event) {
    998     *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
    999     return VK_SUCCESS;
   1000 }
   1001 
   1002 VkResult CreateFence(VkDevice device,
   1003                      const VkFenceCreateInfo*,
   1004                      const VkAllocationCallbacks* /*allocator*/,
   1005                      VkFence* fence) {
   1006     *fence = AllocHandle<VkFence>(device, HandleType::kFence);
   1007     return VK_SUCCESS;
   1008 }
   1009 
   1010 VkResult CreateFramebuffer(VkDevice device,
   1011                            const VkFramebufferCreateInfo*,
   1012                            const VkAllocationCallbacks* /*allocator*/,
   1013                            VkFramebuffer* framebuffer) {
   1014     *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
   1015     return VK_SUCCESS;
   1016 }
   1017 
   1018 VkResult CreateImageView(VkDevice device,
   1019                          const VkImageViewCreateInfo*,
   1020                          const VkAllocationCallbacks* /*allocator*/,
   1021                          VkImageView* view) {
   1022     *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
   1023     return VK_SUCCESS;
   1024 }
   1025 
   1026 VkResult CreateGraphicsPipelines(VkDevice device,
   1027                                  VkPipelineCache,
   1028                                  uint32_t count,
   1029                                  const VkGraphicsPipelineCreateInfo*,
   1030                                  const VkAllocationCallbacks* /*allocator*/,
   1031                                  VkPipeline* pipelines) {
   1032     for (uint32_t i = 0; i < count; i++)
   1033         pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
   1034     return VK_SUCCESS;
   1035 }
   1036 
   1037 VkResult CreateComputePipelines(VkDevice device,
   1038                                 VkPipelineCache,
   1039                                 uint32_t count,
   1040                                 const VkComputePipelineCreateInfo*,
   1041                                 const VkAllocationCallbacks* /*allocator*/,
   1042                                 VkPipeline* pipelines) {
   1043     for (uint32_t i = 0; i < count; i++)
   1044         pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
   1045     return VK_SUCCESS;
   1046 }
   1047 
   1048 VkResult CreatePipelineCache(VkDevice device,
   1049                              const VkPipelineCacheCreateInfo*,
   1050                              const VkAllocationCallbacks* /*allocator*/,
   1051                              VkPipelineCache* cache) {
   1052     *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
   1053     return VK_SUCCESS;
   1054 }
   1055 
   1056 VkResult CreatePipelineLayout(VkDevice device,
   1057                               const VkPipelineLayoutCreateInfo*,
   1058                               const VkAllocationCallbacks* /*allocator*/,
   1059                               VkPipelineLayout* layout) {
   1060     *layout =
   1061         AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
   1062     return VK_SUCCESS;
   1063 }
   1064 
   1065 VkResult CreateQueryPool(VkDevice device,
   1066                          const VkQueryPoolCreateInfo*,
   1067                          const VkAllocationCallbacks* /*allocator*/,
   1068                          VkQueryPool* pool) {
   1069     *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
   1070     return VK_SUCCESS;
   1071 }
   1072 
   1073 VkResult CreateRenderPass(VkDevice device,
   1074                           const VkRenderPassCreateInfo*,
   1075                           const VkAllocationCallbacks* /*allocator*/,
   1076                           VkRenderPass* renderpass) {
   1077     *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
   1078     return VK_SUCCESS;
   1079 }
   1080 
   1081 VkResult CreateSampler(VkDevice device,
   1082                        const VkSamplerCreateInfo*,
   1083                        const VkAllocationCallbacks* /*allocator*/,
   1084                        VkSampler* sampler) {
   1085     *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
   1086     return VK_SUCCESS;
   1087 }
   1088 
   1089 VkResult CreateSemaphore(VkDevice device,
   1090                          const VkSemaphoreCreateInfo*,
   1091                          const VkAllocationCallbacks* /*allocator*/,
   1092                          VkSemaphore* semaphore) {
   1093     *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
   1094     return VK_SUCCESS;
   1095 }
   1096 
   1097 VkResult CreateShaderModule(VkDevice device,
   1098                             const VkShaderModuleCreateInfo*,
   1099                             const VkAllocationCallbacks* /*allocator*/,
   1100                             VkShaderModule* module) {
   1101     *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
   1102     return VK_SUCCESS;
   1103 }
   1104 
   1105 VkResult CreateDebugReportCallbackEXT(VkInstance instance,
   1106                                       const VkDebugReportCallbackCreateInfoEXT*,
   1107                                       const VkAllocationCallbacks*,
   1108                                       VkDebugReportCallbackEXT* callback) {
   1109     *callback = AllocHandle<VkDebugReportCallbackEXT>(
   1110         instance, HandleType::kDebugReportCallbackEXT);
   1111     return VK_SUCCESS;
   1112 }
   1113 
   1114 // -----------------------------------------------------------------------------
   1115 // No-op entrypoints
   1116 
   1117 // clang-format off
   1118 #pragma clang diagnostic push
   1119 #pragma clang diagnostic ignored "-Wunused-parameter"
   1120 
   1121 void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
   1122     ALOGV("TODO: vk%s", __FUNCTION__);
   1123 }
   1124 
   1125 void GetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) {
   1126     ALOGV("TODO: vk%s", __FUNCTION__);
   1127 }
   1128 
   1129 VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
   1130     ALOGV("TODO: vk%s", __FUNCTION__);
   1131     return VK_SUCCESS;
   1132 }
   1133 
   1134 VkResult GetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
   1135                                                     const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo,
   1136                                                     VkImageFormatProperties2KHR* pImageFormatProperties) {
   1137     ALOGV("TODO: vk%s", __FUNCTION__);
   1138     return VK_SUCCESS;
   1139 }
   1140 
   1141 VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
   1142     ALOGV("TODO: vk%s", __FUNCTION__);
   1143     return VK_SUCCESS;
   1144 }
   1145 
   1146 VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
   1147     return VK_SUCCESS;
   1148 }
   1149 
   1150 VkResult QueueWaitIdle(VkQueue queue) {
   1151     ALOGV("TODO: vk%s", __FUNCTION__);
   1152     return VK_SUCCESS;
   1153 }
   1154 
   1155 VkResult DeviceWaitIdle(VkDevice device) {
   1156     ALOGV("TODO: vk%s", __FUNCTION__);
   1157     return VK_SUCCESS;
   1158 }
   1159 
   1160 void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
   1161 }
   1162 
   1163 VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
   1164     ALOGV("TODO: vk%s", __FUNCTION__);
   1165     return VK_SUCCESS;
   1166 }
   1167 
   1168 VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
   1169     ALOGV("TODO: vk%s", __FUNCTION__);
   1170     return VK_SUCCESS;
   1171 }
   1172 
   1173 void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
   1174     ALOGV("TODO: vk%s", __FUNCTION__);
   1175 }
   1176 
   1177 VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
   1178     return VK_SUCCESS;
   1179 }
   1180 
   1181 VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
   1182     return VK_SUCCESS;
   1183 }
   1184 
   1185 void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
   1186     ALOGV("TODO: vk%s", __FUNCTION__);
   1187 }
   1188 
   1189 void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
   1190     ALOGV("TODO: vk%s", __FUNCTION__);
   1191 }
   1192 
   1193 void GetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
   1194                                                       VkPhysicalDeviceSparseImageFormatInfo2KHR const* pInfo,
   1195                                                       unsigned int* pNumProperties,
   1196                                                       VkSparseImageFormatProperties2KHR* pProperties) {
   1197     ALOGV("TODO: vk%s", __FUNCTION__);
   1198 }
   1199 
   1200 
   1201 VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
   1202     ALOGV("TODO: vk%s", __FUNCTION__);
   1203     return VK_SUCCESS;
   1204 }
   1205 
   1206 void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
   1207 }
   1208 
   1209 VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
   1210     return VK_SUCCESS;
   1211 }
   1212 
   1213 VkResult GetFenceStatus(VkDevice device, VkFence fence) {
   1214     ALOGV("TODO: vk%s", __FUNCTION__);
   1215     return VK_SUCCESS;
   1216 }
   1217 
   1218 VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
   1219     return VK_SUCCESS;
   1220 }
   1221 
   1222 void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
   1223 }
   1224 
   1225 void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
   1226 }
   1227 
   1228 VkResult GetEventStatus(VkDevice device, VkEvent event) {
   1229     ALOGV("TODO: vk%s", __FUNCTION__);
   1230     return VK_SUCCESS;
   1231 }
   1232 
   1233 VkResult SetEvent(VkDevice device, VkEvent event) {
   1234     ALOGV("TODO: vk%s", __FUNCTION__);
   1235     return VK_SUCCESS;
   1236 }
   1237 
   1238 VkResult ResetEvent(VkDevice device, VkEvent event) {
   1239     ALOGV("TODO: vk%s", __FUNCTION__);
   1240     return VK_SUCCESS;
   1241 }
   1242 
   1243 void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
   1244 }
   1245 
   1246 VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
   1247     ALOGV("TODO: vk%s", __FUNCTION__);
   1248     return VK_SUCCESS;
   1249 }
   1250 
   1251 void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
   1252 }
   1253 
   1254 void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
   1255     ALOGV("TODO: vk%s", __FUNCTION__);
   1256 }
   1257 
   1258 void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
   1259 }
   1260 
   1261 void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
   1262 }
   1263 
   1264 void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
   1265 }
   1266 
   1267 VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
   1268     ALOGV("TODO: vk%s", __FUNCTION__);
   1269     return VK_SUCCESS;
   1270 }
   1271 
   1272 VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
   1273     ALOGV("TODO: vk%s", __FUNCTION__);
   1274     return VK_SUCCESS;
   1275 }
   1276 
   1277 void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
   1278 }
   1279 
   1280 void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
   1281 }
   1282 
   1283 void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
   1284 }
   1285 
   1286 void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
   1287 }
   1288 
   1289 void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
   1290 }
   1291 
   1292 VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
   1293     ALOGV("TODO: vk%s", __FUNCTION__);
   1294     return VK_SUCCESS;
   1295 }
   1296 
   1297 void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
   1298     ALOGV("TODO: vk%s", __FUNCTION__);
   1299 }
   1300 
   1301 VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
   1302     ALOGV("TODO: vk%s", __FUNCTION__);
   1303     return VK_SUCCESS;
   1304 }
   1305 
   1306 void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
   1307 }
   1308 
   1309 void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
   1310 }
   1311 
   1312 void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
   1313     ALOGV("TODO: vk%s", __FUNCTION__);
   1314 }
   1315 
   1316 VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
   1317     ALOGV("TODO: vk%s", __FUNCTION__);
   1318     return VK_SUCCESS;
   1319 }
   1320 
   1321 VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
   1322     return VK_SUCCESS;
   1323 }
   1324 
   1325 VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
   1326     return VK_SUCCESS;
   1327 }
   1328 
   1329 VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
   1330     ALOGV("TODO: vk%s", __FUNCTION__);
   1331     return VK_SUCCESS;
   1332 }
   1333 
   1334 void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
   1335 }
   1336 
   1337 void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
   1338 }
   1339 
   1340 void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
   1341 }
   1342 
   1343 void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
   1344 }
   1345 
   1346 void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
   1347 }
   1348 
   1349 void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
   1350 }
   1351 
   1352 void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
   1353 }
   1354 
   1355 void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
   1356 }
   1357 
   1358 void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
   1359 }
   1360 
   1361 void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
   1362 }
   1363 
   1364 void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
   1365 }
   1366 
   1367 void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
   1368 }
   1369 
   1370 void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
   1371 }
   1372 
   1373 void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
   1374 }
   1375 
   1376 void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
   1377 }
   1378 
   1379 void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
   1380 }
   1381 
   1382 void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
   1383 }
   1384 
   1385 void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
   1386 }
   1387 
   1388 void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
   1389 }
   1390 
   1391 void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
   1392 }
   1393 
   1394 void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
   1395 }
   1396 
   1397 void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
   1398 }
   1399 
   1400 void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
   1401 }
   1402 
   1403 void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
   1404 }
   1405 
   1406 void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const void* pData) {
   1407 }
   1408 
   1409 void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
   1410 }
   1411 
   1412 void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
   1413 }
   1414 
   1415 void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
   1416 }
   1417 
   1418 void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
   1419 }
   1420 
   1421 void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
   1422 }
   1423 
   1424 void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
   1425 }
   1426 
   1427 void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
   1428 }
   1429 
   1430 void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
   1431 }
   1432 
   1433 void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
   1434 }
   1435 
   1436 void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
   1437 }
   1438 
   1439 void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
   1440 }
   1441 
   1442 void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
   1443 }
   1444 
   1445 void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
   1446 }
   1447 
   1448 void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
   1449 }
   1450 
   1451 void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
   1452 }
   1453 
   1454 void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
   1455 }
   1456 
   1457 void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
   1458 }
   1459 
   1460 void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
   1461 }
   1462 
   1463 void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
   1464 }
   1465 
   1466 void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
   1467 }
   1468 
   1469 void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
   1470 }
   1471 
   1472 #pragma clang diagnostic pop
   1473 // clang-format on
   1474 
   1475 }  // namespace null_driver
   1476