1 /* 2 * Copyright 2018 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 // This is a GPU-backend specific test. It relies on static intializers to work 9 10 #include "SkTypes.h" 11 12 #ifdef SKQP_BUILD_HARDWAREBUFFER_TEST 13 #if SK_SUPPORT_GPU && defined(SK_VULKAN) 14 15 #include "GrBackendSemaphore.h" 16 #include "GrContext.h" 17 #include "GrContextFactory.h" 18 #include "GrContextPriv.h" 19 #include "GrGpu.h" 20 #include "GrProxyProvider.h" 21 #include "GrTest.h" 22 #include "SkAutoMalloc.h" 23 #include "SkCanvas.h" 24 #include "SkGr.h" 25 #include "SkImage.h" 26 #include "SkSurface.h" 27 #include "Test.h" 28 #include "../tools/gpu/vk/VkTestUtils.h" 29 #include "gl/GrGLDefines.h" 30 #include "gl/GrGLUtil.h" 31 #include "vk/GrVkBackendContext.h" 32 #include "vk/GrVkExtensions.h" 33 34 #include <android/hardware_buffer.h> 35 #include <cinttypes> 36 37 #include <EGL/egl.h> 38 #include <EGL/eglext.h> 39 #include <GLES/gl.h> 40 #include <GLES/glext.h> 41 42 static const int DEV_W = 16, DEV_H = 16; 43 44 class BaseTestHelper { 45 public: 46 virtual ~BaseTestHelper() {} 47 48 virtual bool init(skiatest::Reporter* reporter) = 0; 49 50 virtual void cleanup() = 0; 51 virtual void releaseImage() = 0; 52 53 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter, 54 AHardwareBuffer* buffer) = 0; 55 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter, 56 AHardwareBuffer* buffer) = 0; 57 58 virtual void doClientSync() = 0; 59 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0; 60 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle, 61 sk_sp<SkSurface>) = 0; 62 63 virtual void makeCurrent() = 0; 64 65 virtual GrContext* grContext() = 0; 66 67 int getFdHandle() { return fFdHandle; } 68 69 protected: 70 BaseTestHelper() {} 71 72 int fFdHandle = 0; 73 }; 74 75 class EGLTestHelper : public BaseTestHelper { 76 public: 77 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {} 78 79 ~EGLTestHelper() override {} 80 81 void releaseImage() override { 82 this->makeCurrent(); 83 if (!fGLCtx) { 84 return; 85 } 86 if (EGL_NO_IMAGE_KHR != fImage) { 87 fGLCtx->destroyEGLImage(fImage); 88 fImage = EGL_NO_IMAGE_KHR; 89 } 90 if (fTexID) { 91 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID)); 92 fTexID = 0; 93 } 94 } 95 96 void cleanup() override { 97 this->releaseImage(); 98 } 99 100 bool init(skiatest::Reporter* reporter) override; 101 102 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter, 103 AHardwareBuffer* buffer) override; 104 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter, 105 AHardwareBuffer* buffer) override; 106 107 void doClientSync() override; 108 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override; 109 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle, 110 sk_sp<SkSurface>) override; 111 112 void makeCurrent() override { fGLCtx->makeCurrent(); } 113 114 GrContext* grContext() override { return fGrContext; } 115 116 private: 117 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer); 118 119 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*); 120 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer, 121 const EGLint*); 122 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*); 123 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID; 124 EGLCreateImageKHRProc fEGLCreateImageKHR; 125 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES; 126 127 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR; 128 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR; 129 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR; 130 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID; 131 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR; 132 133 EGLImageKHR fImage = EGL_NO_IMAGE_KHR; 134 GrGLuint fTexID = 0; 135 136 sk_gpu_test::GrContextFactory fFactory; 137 sk_gpu_test::ContextInfo fGLESContextInfo; 138 139 sk_gpu_test::GLTestContext* fGLCtx = nullptr; 140 GrContext* fGrContext = nullptr; 141 }; 142 143 bool EGLTestHelper::init(skiatest::Reporter* reporter) { 144 fGLESContextInfo = fFactory.getContextInfo(sk_gpu_test::GrContextFactory::kGLES_ContextType); 145 fGrContext = fGLESContextInfo.grContext(); 146 fGLCtx = fGLESContextInfo.glContext(); 147 if (!fGrContext || !fGLCtx) { 148 return false; 149 } 150 151 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) { 152 return false; 153 } 154 155 // Confirm we have egl and the needed extensions 156 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") || 157 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") || 158 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") || 159 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") || 160 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync")) { 161 return false; 162 } 163 164 fEGLGetNativeClientBufferANDROID = 165 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID"); 166 if (!fEGLGetNativeClientBufferANDROID) { 167 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc"); 168 return false; 169 } 170 171 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR"); 172 if (!fEGLCreateImageKHR) { 173 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR"); 174 return false; 175 } 176 177 fEGLImageTargetTexture2DOES = 178 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES"); 179 if (!fEGLImageTargetTexture2DOES) { 180 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES"); 181 return false; 182 } 183 184 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR"); 185 if (!fEGLCreateSyncKHR) { 186 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR"); 187 return false; 188 189 } 190 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR"); 191 if (!fEGLWaitSyncKHR) { 192 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR"); 193 return false; 194 195 } 196 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR"); 197 if (!fEGLGetSyncAttribKHR) { 198 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR"); 199 return false; 200 201 } 202 fEGLDupNativeFenceFDANDROID = 203 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID"); 204 if (!fEGLDupNativeFenceFDANDROID) { 205 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID"); 206 return false; 207 208 } 209 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR"); 210 if (!fEGLDestroySyncKHR) { 211 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR"); 212 return false; 213 214 } 215 216 return true; 217 } 218 219 bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) { 220 GrGLClearErr(fGLCtx->gl()); 221 222 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer); 223 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, 224 EGL_NONE }; 225 EGLDisplay eglDisplay = eglGetCurrentDisplay(); 226 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT, 227 EGL_NATIVE_BUFFER_ANDROID, 228 eglClientBuffer, eglAttribs); 229 if (EGL_NO_IMAGE_KHR == fImage) { 230 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() ); 231 return false; 232 } 233 234 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID)); 235 if (!fTexID) { 236 ERRORF(reporter, "Failed to create GL Texture"); 237 return false; 238 } 239 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID)); 240 if (GR_GL_GET_ERROR(fGLCtx->gl()) != GR_GL_NO_ERROR) { 241 ERRORF(reporter, "Failed to bind GL Texture"); 242 return false; 243 } 244 245 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage); 246 GLenum status = GL_NO_ERROR; 247 if ((status = glGetError()) != GL_NO_ERROR) { 248 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) status); 249 return false; 250 } 251 252 fGrContext->resetContext(kTextureBinding_GrGLBackendState); 253 return true; 254 } 255 256 sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter, 257 AHardwareBuffer* buffer) { 258 if (!this->importHardwareBuffer(reporter, buffer)) { 259 return nullptr; 260 } 261 GrGLTextureInfo textureInfo; 262 textureInfo.fTarget = GR_GL_TEXTURE_2D; 263 textureInfo.fID = fTexID; 264 textureInfo.fFormat = GR_GL_RGBA8; 265 266 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo); 267 REPORTER_ASSERT(reporter, backendTex.isValid()); 268 269 sk_sp<SkImage> image = SkImage::MakeFromTexture(fGrContext, 270 backendTex, 271 kTopLeft_GrSurfaceOrigin, 272 kRGBA_8888_SkColorType, 273 kPremul_SkAlphaType, 274 nullptr); 275 276 if (!image) { 277 ERRORF(reporter, "Failed to make wrapped GL SkImage"); 278 return nullptr; 279 } 280 281 return image; 282 } 283 284 sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter, 285 AHardwareBuffer* buffer) { 286 if (!this->importHardwareBuffer(reporter, buffer)) { 287 return nullptr; 288 } 289 GrGLTextureInfo textureInfo; 290 textureInfo.fTarget = GR_GL_TEXTURE_2D; 291 textureInfo.fID = fTexID; 292 textureInfo.fFormat = GR_GL_RGBA8; 293 294 GrBackendTexture backendTex(DEV_W, DEV_H, GrMipMapped::kNo, textureInfo); 295 REPORTER_ASSERT(reporter, backendTex.isValid()); 296 297 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext, 298 backendTex, 299 kTopLeft_GrSurfaceOrigin, 300 0, 301 kRGBA_8888_SkColorType, 302 nullptr, nullptr); 303 304 if (!surface) { 305 ERRORF(reporter, "Failed to make wrapped GL SkSurface"); 306 return nullptr; 307 } 308 309 return surface; 310 } 311 312 bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, 313 sk_sp<SkSurface> surface) { 314 EGLDisplay eglDisplay = eglGetCurrentDisplay(); 315 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr); 316 if (EGL_NO_SYNC_KHR == eglsync) { 317 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n"); 318 return false; 319 } 320 321 surface->flush(); 322 GR_GL_CALL(fGLCtx->gl(), Flush()); 323 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync); 324 325 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync); 326 if (EGL_TRUE != result) { 327 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result); 328 return false; 329 } 330 331 return true; 332 } 333 334 bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle, 335 sk_sp<SkSurface> surface) { 336 EGLDisplay eglDisplay = eglGetCurrentDisplay(); 337 EGLint attr[] = { 338 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle, 339 EGL_NONE 340 }; 341 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr); 342 if (EGL_NO_SYNC_KHR == eglsync) { 343 ERRORF(reporter, 344 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n"); 345 return false; 346 } 347 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0); 348 if (EGL_TRUE != result) { 349 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result); 350 // Don't return false yet, try to delete the sync first 351 } 352 result = fEGLDestroySyncKHR(eglDisplay, eglsync); 353 if (EGL_TRUE != result) { 354 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result); 355 return false; 356 } 357 return true; 358 } 359 360 void EGLTestHelper::doClientSync() { 361 sk_gpu_test::FenceSync* fenceSync = fGLCtx->fenceSync(); 362 sk_gpu_test::PlatformFence fence = fenceSync->insertFence(); 363 fenceSync->waitFence(fence); 364 fenceSync->deleteFence(fence); 365 } 366 367 #define DECLARE_VK_PROC(name) PFN_vk##name fVk##name 368 369 #define ACQUIRE_VK_PROC(name, instance, device) \ 370 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, instance, device)); \ 371 if (fVk##name == nullptr) { \ 372 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \ 373 return false; \ 374 } 375 376 #define ACQUIRE_INST_VK_PROC(name) \ 377 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fInst, VK_NULL_HANDLE)); \ 378 if (fVk##name == nullptr) { \ 379 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \ 380 fVkDestroyInstance(fInst, nullptr); \ 381 return false; \ 382 } 383 384 #define ACQUIRE_DEVICE_VK_PROC(name) \ 385 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \ 386 if (fVk##name == nullptr) { \ 387 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \ 388 fVkDestroyDevice(fDevice, nullptr); \ 389 fVkDestroyInstance(fInst, nullptr); \ 390 return false; \ 391 } 392 393 #ifdef SK_ENABLE_VK_LAYERS 394 const char* kMyDebugLayerNames[] = { 395 // elements of VK_LAYER_LUNARG_standard_validation 396 "VK_LAYER_GOOGLE_threading", 397 "VK_LAYER_LUNARG_parameter_validation", 398 "VK_LAYER_LUNARG_object_tracker", 399 "VK_LAYER_LUNARG_image", 400 "VK_LAYER_LUNARG_core_validation", 401 "VK_LAYER_LUNARG_swapchain", 402 "VK_LAYER_GOOGLE_unique_objects", 403 // not included in standard_validation 404 //"VK_LAYER_LUNARG_api_dump", 405 //"VK_LAYER_LUNARG_vktrace", 406 //"VK_LAYER_LUNARG_screenshot", 407 }; 408 #endif 409 410 class VulkanTestHelper : public BaseTestHelper { 411 public: 412 VulkanTestHelper() {} 413 414 ~VulkanTestHelper() override {} 415 416 void releaseImage() override { 417 if (VK_NULL_HANDLE == fDevice) { 418 return; 419 } 420 if (fImage != VK_NULL_HANDLE) { 421 SkASSERT(fVkDestroyImage); 422 fVkDestroyImage(fDevice, fImage, nullptr); 423 fImage = VK_NULL_HANDLE; 424 } 425 426 if (fMemory != VK_NULL_HANDLE) { 427 SkASSERT(fVkFreeMemory); 428 fVkFreeMemory(fDevice, fMemory, nullptr); 429 fMemory = VK_NULL_HANDLE; 430 } 431 } 432 void cleanup() override { 433 this->releaseImage(); 434 435 fGrContext.reset(); 436 fBackendContext.reset(); 437 438 fInst = VK_NULL_HANDLE; 439 fPhysDev = VK_NULL_HANDLE; 440 fDevice = VK_NULL_HANDLE; 441 } 442 443 bool init(skiatest::Reporter* reporter) override; 444 445 void doClientSync() override { 446 if (!fGrContext) { 447 return; 448 } 449 450 fGrContext->contextPriv().getGpu()->testingOnly_flushGpuAndSync(); 451 } 452 453 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override; 454 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle, 455 sk_sp<SkSurface>) override; 456 457 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter, 458 AHardwareBuffer* buffer) override; 459 460 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter, 461 AHardwareBuffer* buffer) override; 462 463 void makeCurrent() override {} 464 465 GrContext* grContext() override { return fGrContext.get(); } 466 467 private: 468 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter); 469 470 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite, 471 GrVkImageInfo* outImageInfo); 472 473 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*); 474 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&); 475 476 DECLARE_VK_PROC(EnumerateInstanceVersion); 477 DECLARE_VK_PROC(CreateInstance); 478 DECLARE_VK_PROC(DestroyInstance); 479 DECLARE_VK_PROC(EnumeratePhysicalDevices); 480 DECLARE_VK_PROC(GetPhysicalDeviceProperties); 481 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2); 482 DECLARE_VK_PROC(GetPhysicalDeviceQueueFamilyProperties); 483 DECLARE_VK_PROC(GetPhysicalDeviceFeatures); 484 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties); 485 DECLARE_VK_PROC(CreateDevice); 486 DECLARE_VK_PROC(GetDeviceQueue); 487 DECLARE_VK_PROC(DeviceWaitIdle); 488 DECLARE_VK_PROC(DestroyDevice); 489 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2); 490 DECLARE_VK_PROC(CreateImage); 491 DECLARE_VK_PROC(GetImageMemoryRequirements2); 492 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID); 493 DECLARE_VK_PROC(AllocateMemory); 494 DECLARE_VK_PROC(BindImageMemory2); 495 DECLARE_VK_PROC(DestroyImage); 496 DECLARE_VK_PROC(FreeMemory); 497 DECLARE_VK_PROC(CreateSemaphore); 498 DECLARE_VK_PROC(GetSemaphoreFdKHR); 499 DECLARE_VK_PROC(ImportSemaphoreFdKHR); 500 DECLARE_VK_PROC(DestroySemaphore); 501 502 VkInstance fInst = VK_NULL_HANDLE; 503 VkPhysicalDevice fPhysDev = VK_NULL_HANDLE; 504 VkDevice fDevice = VK_NULL_HANDLE; 505 506 VkImage fImage = VK_NULL_HANDLE; 507 VkDeviceMemory fMemory = VK_NULL_HANDLE; 508 509 sk_sp<GrVkBackendContext> fBackendContext; 510 sk_sp<GrContext> fGrContext; 511 }; 512 513 bool VulkanTestHelper::init(skiatest::Reporter* reporter) { 514 PFN_vkGetInstanceProcAddr instProc; 515 PFN_vkGetDeviceProcAddr devProc; 516 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) { 517 return false; 518 } 519 520 auto getProc = [&instProc, &devProc](const char* proc_name, 521 VkInstance instance, VkDevice device) { 522 if (device != VK_NULL_HANDLE) { 523 return devProc(device, proc_name); 524 } 525 return instProc(instance, proc_name); 526 }; 527 528 VkResult err; 529 530 ACQUIRE_VK_PROC(EnumerateInstanceVersion, VK_NULL_HANDLE, VK_NULL_HANDLE); 531 uint32_t instanceVersion = 0; 532 if (fVkEnumerateInstanceVersion) { 533 err = fVkEnumerateInstanceVersion(&instanceVersion); 534 if (err) { 535 ERRORF(reporter, "failed to enumerate instance version. Err: %d\n", err); 536 return false; 537 } 538 } 539 if (instanceVersion < VK_MAKE_VERSION(1, 1, 0)) { 540 return false; 541 } 542 543 const VkApplicationInfo app_info = { 544 VK_STRUCTURE_TYPE_APPLICATION_INFO, // sType 545 nullptr, // pNext 546 "vkHWBTest", // pApplicationName 547 0, // applicationVersion 548 "vkHWBTest", // pEngineName 549 0, // engineVerison 550 instanceVersion, // apiVersion 551 }; 552 553 GrVkExtensions extensions(getProc); 554 extensions.initInstance(instanceVersion); 555 556 SkTArray<const char*> instanceLayerNames; 557 SkTArray<const char*> instanceExtensionNames; 558 uint32_t extensionFlags = 0; 559 #ifdef SK_ENABLE_VK_LAYERS 560 for (size_t i = 0; i < SK_ARRAY_COUNT(kMyDebugLayerNames); ++i) { 561 if (extensions.hasInstanceLayer(kMyDebugLayerNames[i])) { 562 instanceLayerNames.push_back(kMyDebugLayerNames[i]); 563 } 564 } 565 if (extensions.hasInstanceExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) { 566 instanceExtensionNames.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME); 567 extensionFlags |= kEXT_debug_report_GrVkExtensionFlag; 568 } 569 #endif 570 571 const VkInstanceCreateInfo instance_create = { 572 VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, // sType 573 nullptr, // pNext 574 0, // flags 575 &app_info, // pApplicationInfo 576 (uint32_t) instanceLayerNames.count(), // enabledLayerNameCount 577 instanceLayerNames.begin(), // ppEnabledLayerNames 578 (uint32_t) instanceExtensionNames.count(), // enabledExtensionNameCount 579 instanceExtensionNames.begin(), // ppEnabledExtensionNames 580 }; 581 582 ACQUIRE_VK_PROC(CreateInstance, VK_NULL_HANDLE, VK_NULL_HANDLE); 583 err = fVkCreateInstance(&instance_create, nullptr, &fInst); 584 if (err < 0) { 585 ERRORF(reporter, "vkCreateInstance failed: %d\n", err); 586 return false; 587 } 588 589 ACQUIRE_VK_PROC(DestroyInstance, fInst, VK_NULL_HANDLE); 590 ACQUIRE_INST_VK_PROC(EnumeratePhysicalDevices); 591 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceProperties); 592 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceQueueFamilyProperties); 593 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceFeatures); 594 ACQUIRE_INST_VK_PROC(CreateDevice); 595 ACQUIRE_INST_VK_PROC(GetDeviceQueue); 596 ACQUIRE_INST_VK_PROC(DeviceWaitIdle); 597 ACQUIRE_INST_VK_PROC(DestroyDevice); 598 599 uint32_t gpuCount; 600 err = fVkEnumeratePhysicalDevices(fInst, &gpuCount, nullptr); 601 if (err) { 602 ERRORF(reporter, "vkEnumeratePhysicalDevices failed: %d\n", err); 603 fVkDestroyInstance(fInst, nullptr); 604 return false; 605 } 606 if (!gpuCount) { 607 // We can no physical devices so this isn't an error and failure in the test. 608 fVkDestroyInstance(fInst, nullptr); 609 return false; 610 } 611 // Just returning the first physical device instead of getting the whole array. 612 // TODO: find best match for our needs 613 gpuCount = 1; 614 err = fVkEnumeratePhysicalDevices(fInst, &gpuCount, &fPhysDev); 615 if (err) { 616 ERRORF(reporter, "vkEnumeratePhysicalDevices failed: %d\n", err); 617 fVkDestroyInstance(fInst, nullptr); 618 return false; 619 } 620 621 // query to get the initial queue props size 622 uint32_t queueCount; 623 fVkGetPhysicalDeviceQueueFamilyProperties(fPhysDev, &queueCount, nullptr); 624 if (!queueCount) { 625 ERRORF(reporter, "vkGetPhysicalDeviceQueueFamilyProperties returned no queues.\n"); 626 fVkDestroyInstance(fInst, nullptr); 627 return false; 628 } 629 630 SkAutoMalloc queuePropsAlloc(queueCount * sizeof(VkQueueFamilyProperties)); 631 // now get the actual queue props 632 VkQueueFamilyProperties* queueProps = (VkQueueFamilyProperties*)queuePropsAlloc.get(); 633 634 fVkGetPhysicalDeviceQueueFamilyProperties(fPhysDev, &queueCount, queueProps); 635 636 // iterate to find the graphics queue 637 uint32_t graphicsQueueIndex = queueCount; 638 for (uint32_t i = 0; i < queueCount; i++) { 639 if (queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) { 640 graphicsQueueIndex = i; 641 break; 642 } 643 } 644 if (graphicsQueueIndex == queueCount) { 645 ERRORF(reporter, "Could not find any supported graphics queues.\n"); 646 fVkDestroyInstance(fInst, nullptr); 647 return false; 648 } 649 650 VkPhysicalDeviceProperties physDevProperties; 651 fVkGetPhysicalDeviceProperties(fPhysDev, &physDevProperties); 652 int physDevVersion = physDevProperties.apiVersion; 653 654 if (physDevVersion < VK_MAKE_VERSION(1, 1, 0)) { 655 return false; 656 } 657 658 // Physical-Device-level functions added in 1.1 659 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2); 660 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2); 661 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties); 662 663 extensions.initDevice(physDevVersion, fInst, fPhysDev); 664 665 SkTArray<const char*> deviceLayerNames; 666 SkTArray<const char*> deviceExtensionNames; 667 #ifdef SK_ENABLE_VK_LAYERS 668 for (size_t i = 0; i < SK_ARRAY_COUNT(kMyDebugLayerNames); ++i) { 669 if (extensions.hasDeviceLayer(kMyDebugLayerNames[i])) { 670 deviceLayerNames.push_back(kMyDebugLayerNames[i]); 671 } 672 } 673 #endif 674 675 if (extensions.hasDeviceExtension( 676 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) { 677 deviceExtensionNames.push_back( 678 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME); 679 } else { 680 fVkDestroyInstance(fInst, nullptr); 681 return false; 682 } 683 684 if (extensions.hasDeviceExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) { 685 deviceExtensionNames.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME); 686 } else { 687 ERRORF(reporter, "Has HWB extension, but doesn't not have YCBCR coversion extension"); 688 fVkDestroyInstance(fInst, nullptr); 689 return false; 690 } 691 692 if (extensions.hasDeviceExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) { 693 deviceExtensionNames.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME); 694 } else { 695 fVkDestroyInstance(fInst, nullptr); 696 return false; 697 } 698 699 if (extensions.hasDeviceExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME)) { 700 deviceExtensionNames.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME); 701 } else { 702 SkDebugf("We don't have the extension for VK_EXT_QUEUE_FAMILY_FOREIGN\n"); 703 //fVkDestroyInstance(fInst, nullptr); 704 //return false; 705 } 706 707 // query to get the physical device properties 708 VkPhysicalDeviceFeatures deviceFeatures; 709 fVkGetPhysicalDeviceFeatures(fPhysDev, &deviceFeatures); 710 // this looks like it would slow things down, 711 // and we can't depend on it on all platforms 712 deviceFeatures.robustBufferAccess = VK_FALSE; 713 714 uint32_t featureFlags = 0; 715 if (deviceFeatures.geometryShader) { 716 featureFlags |= kGeometryShader_GrVkFeatureFlag; 717 } 718 if (deviceFeatures.dualSrcBlend) { 719 featureFlags |= kDualSrcBlend_GrVkFeatureFlag; 720 } 721 if (deviceFeatures.sampleRateShading) { 722 featureFlags |= kSampleRateShading_GrVkFeatureFlag; 723 } 724 725 float queuePriorities[1] = { 0.0 }; 726 // Here we assume no need for swapchain queue 727 // If one is needed, the client will need its own setup code 728 const VkDeviceQueueCreateInfo queueInfo[1] = { 729 { 730 VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // sType 731 nullptr, // pNext 732 0, // VkDeviceQueueCreateFlags 733 graphicsQueueIndex, // queueFamilyIndex 734 1, // queueCount 735 queuePriorities, // pQueuePriorities 736 } 737 }; 738 uint32_t queueInfoCount = 1; 739 740 const VkDeviceCreateInfo deviceInfo = { 741 VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, // sType 742 nullptr, // pNext 743 0, // VkDeviceCreateFlags 744 queueInfoCount, // queueCreateInfoCount 745 queueInfo, // pQueueCreateInfos 746 (uint32_t) deviceLayerNames.count(), // layerCount 747 deviceLayerNames.begin(), // ppEnabledLayerNames 748 (uint32_t) deviceExtensionNames.count(), // extensionCount 749 deviceExtensionNames.begin(), // ppEnabledExtensionNames 750 &deviceFeatures // ppEnabledFeatures 751 }; 752 753 err = fVkCreateDevice(fPhysDev, &deviceInfo, nullptr, &fDevice); 754 if (err) { 755 ERRORF(reporter, "CreateDevice failed: %d\n", err); 756 fVkDestroyInstance(fInst, nullptr); 757 return false; 758 } 759 760 ACQUIRE_DEVICE_VK_PROC(CreateImage); 761 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2); 762 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID); 763 ACQUIRE_DEVICE_VK_PROC(AllocateMemory); 764 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2); 765 ACQUIRE_DEVICE_VK_PROC(DestroyImage); 766 ACQUIRE_DEVICE_VK_PROC(FreeMemory); 767 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore); 768 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR); 769 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR); 770 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore); 771 772 VkQueue queue; 773 fVkGetDeviceQueue(fDevice, graphicsQueueIndex, 0, &queue); 774 775 // Setting up actual skia things now 776 auto interface = 777 sk_make_sp<GrVkInterface>(getProc, fInst, fDevice, extensionFlags); 778 if (!interface->validate(extensionFlags)) { 779 ERRORF(reporter, "Vulkan interface validation failed\n"); 780 fVkDeviceWaitIdle(fDevice); 781 fVkDestroyDevice(fDevice, nullptr); 782 fVkDestroyInstance(fInst, nullptr); 783 return false; 784 } 785 786 fBackendContext.reset(new GrVkBackendContext()); 787 fBackendContext->fInstance = fInst; 788 fBackendContext->fPhysicalDevice = fPhysDev; 789 fBackendContext->fDevice = fDevice; 790 fBackendContext->fQueue = queue; 791 fBackendContext->fGraphicsQueueIndex = graphicsQueueIndex; 792 fBackendContext->fMinAPIVersion = instanceVersion; 793 fBackendContext->fExtensions = extensionFlags; 794 fBackendContext->fFeatures = featureFlags; 795 fBackendContext->fInterface.reset(interface.release()); 796 fBackendContext->fOwnsInstanceAndDevice = true; 797 798 fGrContext = GrContext::MakeVulkan(fBackendContext); 799 REPORTER_ASSERT(reporter, fGrContext.get()); 800 801 return this->checkOptimalHardwareBuffer(reporter); 802 } 803 804 bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) { 805 VkResult err; 806 807 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo; 808 externalImageFormatInfo.sType = 809 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO; 810 externalImageFormatInfo.pNext = nullptr; 811 externalImageFormatInfo.handleType = 812 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID; 813 //externalImageFormatInfo.handType = 0x80; 814 815 // We will create the hardware buffer with gpu sampled so these usages should all be valid 816 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | 817 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | 818 VK_IMAGE_USAGE_TRANSFER_DST_BIT; 819 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo; 820 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2; 821 imageFormatInfo.pNext = &externalImageFormatInfo; 822 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM; 823 imageFormatInfo.type = VK_IMAGE_TYPE_2D; 824 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL; 825 imageFormatInfo.usage = usageFlags; 826 imageFormatInfo.flags = 0; 827 828 VkAndroidHardwareBufferUsageANDROID hwbUsage; 829 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID; 830 hwbUsage.pNext = nullptr; 831 832 VkExternalImageFormatProperties externalImgFormatProps; 833 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES; 834 externalImgFormatProps.pNext = &hwbUsage; 835 836 VkImageFormatProperties2 imgFormProps; 837 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2; 838 imgFormProps.pNext = &externalImgFormatProps; 839 840 err = fVkGetPhysicalDeviceImageFormatProperties2(fPhysDev, &imageFormatInfo, 841 &imgFormProps); 842 if (VK_SUCCESS != err) { 843 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err); 844 return false; 845 } 846 847 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties; 848 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width); 849 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height); 850 851 const VkExternalMemoryProperties& externalImageFormatProps = 852 externalImgFormatProps.externalMemoryProperties; 853 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT & 854 externalImageFormatProps.externalMemoryFeatures)); 855 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT & 856 externalImageFormatProps.externalMemoryFeatures)); 857 858 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & 859 hwbUsage.androidHardwareBufferUsage)); 860 861 return true; 862 } 863 864 bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, 865 AHardwareBuffer* buffer, 866 bool forWrite, 867 GrVkImageInfo* outImageInfo) { 868 VkResult err; 869 870 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; 871 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; 872 hwbFormatProps.pNext = nullptr; 873 874 VkAndroidHardwareBufferPropertiesANDROID hwbProps; 875 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; 876 hwbProps.pNext = &hwbFormatProps; 877 878 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps); 879 if (VK_SUCCESS != err) { 880 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndoird failed, err: %d", err); 881 return false; 882 } 883 884 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format); 885 REPORTER_ASSERT(reporter, 886 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) && 887 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) && 888 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures)); 889 if (forWrite) { 890 REPORTER_ASSERT(reporter, 891 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures)); 892 893 } 894 895 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format; 896 const VkExternalFormatANDROID externalFormatInfo { 897 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType 898 nullptr, // pNext 899 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat 900 }; 901 902 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo { 903 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType 904 &externalFormatInfo, // pNext 905 //nullptr, // pNext 906 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes 907 //0x80, // handleTypes 908 }; 909 910 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | 911 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | 912 VK_IMAGE_USAGE_TRANSFER_DST_BIT; 913 if (forWrite) { 914 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; 915 } 916 917 const VkImageCreateInfo imageCreateInfo = { 918 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType 919 &externalMemoryImageInfo, // pNext 920 0, // VkImageCreateFlags 921 VK_IMAGE_TYPE_2D, // VkImageType 922 hwbFormatProps.format, // VkFormat 923 { DEV_W, DEV_H, 1 }, // VkExtent3D 924 1, // mipLevels 925 1, // arrayLayers 926 VK_SAMPLE_COUNT_1_BIT, // samples 927 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling 928 usageFlags, // VkImageUsageFlags 929 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode 930 0, // queueFamilyCount 931 0, // pQueueFamilyIndices 932 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout 933 }; 934 935 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage); 936 if (VK_SUCCESS != err) { 937 ERRORF(reporter, "Create Image failed, err: %d", err); 938 return false; 939 } 940 941 VkImageMemoryRequirementsInfo2 memReqsInfo; 942 memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2; 943 memReqsInfo.pNext = nullptr; 944 memReqsInfo.image = fImage; 945 946 VkMemoryDedicatedRequirements dedicatedMemReqs; 947 dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS; 948 dedicatedMemReqs.pNext = nullptr; 949 950 VkMemoryRequirements2 memReqs; 951 memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2; 952 memReqs.pNext = &dedicatedMemReqs; 953 954 fVkGetImageMemoryRequirements2(fDevice, &memReqsInfo, &memReqs); 955 REPORTER_ASSERT(reporter, VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation); 956 957 VkPhysicalDeviceMemoryProperties2 phyDevMemProps; 958 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; 959 phyDevMemProps.pNext = nullptr; 960 961 uint32_t typeIndex = 0; 962 uint32_t heapIndex = 0; 963 bool foundHeap = false; 964 fVkGetPhysicalDeviceMemoryProperties2(fPhysDev, &phyDevMemProps); 965 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount; 966 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) { 967 if (hwbProps.memoryTypeBits & (1 << i)) { 968 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties; 969 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags & 970 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; 971 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { 972 typeIndex = i; 973 heapIndex = pdmp.memoryTypes[i].heapIndex; 974 foundHeap = true; 975 } 976 } 977 } 978 if (!foundHeap) { 979 ERRORF(reporter, "Failed to find valid heap for imported memory"); 980 return false; 981 } 982 983 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo; 984 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; 985 hwbImportInfo.pNext = nullptr; 986 hwbImportInfo.buffer = buffer; 987 988 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo; 989 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO; 990 dedicatedAllocInfo.pNext = &hwbImportInfo; 991 dedicatedAllocInfo.image = fImage; 992 dedicatedAllocInfo.buffer = VK_NULL_HANDLE; 993 994 VkMemoryAllocateInfo allocInfo = { 995 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType 996 &dedicatedAllocInfo, // pNext 997 hwbProps.allocationSize, // allocationSize 998 typeIndex, // memoryTypeIndex 999 }; 1000 1001 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory); 1002 if (VK_SUCCESS != err) { 1003 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err); 1004 return false; 1005 } 1006 1007 VkBindImageMemoryInfo bindImageInfo; 1008 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO; 1009 bindImageInfo.pNext = nullptr; 1010 bindImageInfo.image = fImage; 1011 bindImageInfo.memory = fMemory; 1012 bindImageInfo.memoryOffset = 0; 1013 1014 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo); 1015 if (VK_SUCCESS != err) { 1016 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err); 1017 return false; 1018 } 1019 1020 outImageInfo->fImage = fImage; 1021 outImageInfo->fAlloc = GrVkAlloc(fMemory, 0, hwbProps.allocationSize, 0); 1022 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL; 1023 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; 1024 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM; 1025 outImageInfo->fLevelCount = 1; 1026 outImageInfo->fInitialQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; 1027 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; 1028 return true; 1029 } 1030 1031 sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter, 1032 AHardwareBuffer* buffer) { 1033 GrVkImageInfo imageInfo; 1034 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) { 1035 return nullptr; 1036 } 1037 1038 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo); 1039 1040 sk_sp<SkImage> wrappedImage = SkImage::MakeFromTexture(fGrContext.get(), 1041 backendTex, 1042 kTopLeft_GrSurfaceOrigin, 1043 kRGBA_8888_SkColorType, 1044 kPremul_SkAlphaType, 1045 nullptr); 1046 1047 if (!wrappedImage.get()) { 1048 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage"); 1049 return nullptr; 1050 } 1051 1052 return wrappedImage; 1053 } 1054 1055 bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, 1056 sk_sp<SkSurface> surface) { 1057 GrBackendSemaphore semaphore; 1058 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) { 1059 return false; 1060 } 1061 GrSemaphoresSubmitted submitted = surface->flushAndSignalSemaphores(1, &semaphore); 1062 if (GrSemaphoresSubmitted::kNo == submitted) { 1063 ERRORF(reporter, "Failing call to flushAndSignalSemaphores on SkSurface"); 1064 return false; 1065 } 1066 SkASSERT(semaphore.isInitialized()); 1067 if (!this->exportSemaphore(reporter, semaphore)) { 1068 return false; 1069 } 1070 return true; 1071 } 1072 1073 bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter, 1074 GrBackendSemaphore* beSemaphore) { 1075 // Query supported info 1076 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo; 1077 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO; 1078 exSemInfo.pNext = nullptr; 1079 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT; 1080 1081 VkExternalSemaphoreProperties exSemProps; 1082 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES; 1083 exSemProps.pNext = nullptr; 1084 1085 fVkGetPhysicalDeviceExternalSemaphoreProperties(fPhysDev, &exSemInfo, &exSemProps); 1086 1087 if (!SkToBool(exSemProps.exportFromImportedHandleTypes & 1088 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) { 1089 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes"); 1090 return false; 1091 } 1092 if (!SkToBool(exSemProps.compatibleHandleTypes & 1093 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) { 1094 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes"); 1095 return false; 1096 } 1097 if (!SkToBool(exSemProps.externalSemaphoreFeatures & 1098 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) || 1099 !SkToBool(exSemProps.externalSemaphoreFeatures & 1100 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) { 1101 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature"); 1102 return false; 1103 } 1104 1105 VkExportSemaphoreCreateInfo exportInfo; 1106 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO; 1107 exportInfo.pNext = nullptr; 1108 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT; 1109 1110 VkSemaphoreCreateInfo semaphoreInfo; 1111 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 1112 semaphoreInfo.pNext = &exportInfo; 1113 semaphoreInfo.flags = 0; 1114 1115 VkSemaphore semaphore; 1116 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore); 1117 if (VK_SUCCESS != err) { 1118 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err); 1119 return false; 1120 } 1121 beSemaphore->initVulkan(semaphore); 1122 return true; 1123 } 1124 1125 bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter, 1126 const GrBackendSemaphore& beSemaphore) { 1127 VkSemaphore semaphore = beSemaphore.vkSemaphore(); 1128 if (VK_NULL_HANDLE == semaphore) { 1129 ERRORF(reporter, "Invalid vulkan handle in export call"); 1130 return false; 1131 } 1132 1133 VkSemaphoreGetFdInfoKHR getFdInfo; 1134 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR; 1135 getFdInfo.pNext = nullptr; 1136 getFdInfo.semaphore = semaphore; 1137 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT; 1138 1139 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle); 1140 if (VK_SUCCESS != err) { 1141 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err); 1142 return false; 1143 } 1144 fVkDestroySemaphore(fDevice, semaphore, nullptr); 1145 return true; 1146 } 1147 1148 bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle, 1149 sk_sp<SkSurface> surface) { 1150 VkSemaphoreCreateInfo semaphoreInfo; 1151 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; 1152 semaphoreInfo.pNext = nullptr; 1153 semaphoreInfo.flags = 0; 1154 1155 VkSemaphore semaphore; 1156 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore); 1157 if (VK_SUCCESS != err) { 1158 ERRORF(reporter, "Failed to create import semaphore, err: %d", err); 1159 return false; 1160 } 1161 1162 VkImportSemaphoreFdInfoKHR importInfo; 1163 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR; 1164 importInfo.pNext = nullptr; 1165 importInfo.semaphore = semaphore; 1166 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT; 1167 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT; 1168 importInfo.fd = fdHandle; 1169 1170 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo); 1171 if (VK_SUCCESS != err) { 1172 ERRORF(reporter, "Failed to import semaphore, err: %d", err); 1173 return false; 1174 } 1175 1176 GrBackendSemaphore beSemaphore; 1177 beSemaphore.initVulkan(semaphore); 1178 if (!surface->wait(1, &beSemaphore)) { 1179 ERRORF(reporter, "Failed to add wait semaphore to surface"); 1180 fVkDestroySemaphore(fDevice, semaphore, nullptr); 1181 return false; 1182 } 1183 return true; 1184 } 1185 1186 sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter, 1187 AHardwareBuffer* buffer) { 1188 GrVkImageInfo imageInfo; 1189 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) { 1190 return nullptr; 1191 } 1192 1193 GrBackendTexture backendTex(DEV_W, DEV_H, imageInfo); 1194 1195 sk_sp<SkSurface> surface = SkSurface::MakeFromBackendTexture(fGrContext.get(), 1196 backendTex, 1197 kTopLeft_GrSurfaceOrigin, 1198 0, 1199 kRGBA_8888_SkColorType, 1200 nullptr, nullptr); 1201 1202 if (!surface.get()) { 1203 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface"); 1204 return nullptr; 1205 } 1206 1207 return surface; 1208 } 1209 1210 static SkPMColor get_src_color(int x, int y) { 1211 SkASSERT(x >= 0 && x < DEV_W); 1212 SkASSERT(y >= 0 && y < DEV_H); 1213 1214 U8CPU r = x; 1215 U8CPU g = y; 1216 U8CPU b = 0xc; 1217 1218 U8CPU a = 0xff; 1219 switch ((x+y) % 5) { 1220 case 0: 1221 a = 0xff; 1222 break; 1223 case 1: 1224 a = 0x80; 1225 break; 1226 case 2: 1227 a = 0xCC; 1228 break; 1229 case 4: 1230 a = 0x01; 1231 break; 1232 case 3: 1233 a = 0x00; 1234 break; 1235 } 1236 a = 0xff; 1237 return SkPremultiplyARGBInline(a, r, g, b); 1238 } 1239 1240 static SkBitmap make_src_bitmap() { 1241 static SkBitmap bmp; 1242 if (bmp.isNull()) { 1243 bmp.allocN32Pixels(DEV_W, DEV_H); 1244 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels()); 1245 for (int y = 0; y < DEV_H; ++y) { 1246 for (int x = 0; x < DEV_W; ++x) { 1247 SkPMColor* pixel = reinterpret_cast<SkPMColor*>( 1248 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel()); 1249 *pixel = get_src_color(x, y); 1250 } 1251 } 1252 } 1253 return bmp; 1254 } 1255 1256 static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap, 1257 const SkBitmap& dstBitmap) { 1258 bool result = true; 1259 for (int y = 0; y < DEV_H && result; ++y) { 1260 for (int x = 0; x < DEV_W && result; ++x) { 1261 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y); 1262 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y); 1263 if (srcPixel != dstPixel) { 1264 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.", 1265 x, y, srcPixel, dstPixel); 1266 result = false; 1267 } /*else { 1268 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.", 1269 x, y, srcPixel, dstPixel); 1270 1271 }*/ 1272 } 1273 } 1274 return result; 1275 } 1276 1277 static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper, 1278 AHardwareBuffer* buffer) { 1279 if (srcHelper) { 1280 srcHelper->cleanup(); 1281 } 1282 if (dstHelper) { 1283 dstHelper->cleanup(); 1284 } 1285 if (buffer) { 1286 AHardwareBuffer_release(buffer); 1287 } 1288 } 1289 1290 enum class SrcType { 1291 kCPU, 1292 kEGL, 1293 kVulkan, 1294 }; 1295 1296 enum class DstType { 1297 kEGL, 1298 kVulkan, 1299 }; 1300 1301 void run_test(skiatest::Reporter* reporter, const GrContextOptions& options, 1302 SrcType srcType, DstType dstType, bool shareSyncs) { 1303 if (SrcType::kCPU == srcType && shareSyncs) { 1304 // We don't currently test this since we don't do any syncs in this case. 1305 return; 1306 } 1307 std::unique_ptr<BaseTestHelper> srcHelper; 1308 std::unique_ptr<BaseTestHelper> dstHelper; 1309 AHardwareBuffer* buffer = nullptr; 1310 if (SrcType::kVulkan == srcType) { 1311 srcHelper.reset(new VulkanTestHelper()); 1312 } else if (SrcType::kEGL == srcType) { 1313 srcHelper.reset(new EGLTestHelper(options)); 1314 } 1315 if (srcHelper) { 1316 if (!srcHelper->init(reporter)) { 1317 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1318 return; 1319 } 1320 } 1321 1322 if (DstType::kVulkan == dstType) { 1323 dstHelper.reset(new VulkanTestHelper()); 1324 } else { 1325 SkASSERT(DstType::kEGL == dstType); 1326 dstHelper.reset(new EGLTestHelper(options)); 1327 } 1328 if (dstHelper) { 1329 if (!dstHelper->init(reporter)) { 1330 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1331 return; 1332 } 1333 } 1334 1335 /////////////////////////////////////////////////////////////////////////// 1336 // Setup SkBitmaps 1337 /////////////////////////////////////////////////////////////////////////// 1338 1339 SkBitmap srcBitmap = make_src_bitmap(); 1340 SkBitmap dstBitmapSurface; 1341 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H); 1342 SkBitmap dstBitmapFinal; 1343 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H); 1344 1345 /////////////////////////////////////////////////////////////////////////// 1346 // Setup AHardwareBuffer 1347 /////////////////////////////////////////////////////////////////////////// 1348 1349 AHardwareBuffer_Desc hwbDesc; 1350 hwbDesc.width = DEV_W; 1351 hwbDesc.height = DEV_H; 1352 hwbDesc.layers = 1; 1353 if (SrcType::kCPU == srcType) { 1354 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER | 1355 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | 1356 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 1357 } else { 1358 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER | 1359 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER | 1360 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | 1361 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT; 1362 } 1363 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM; 1364 // The following three are not used in the allocate 1365 hwbDesc.stride = 0; 1366 hwbDesc.rfu0= 0; 1367 hwbDesc.rfu1= 0; 1368 1369 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) { 1370 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error); 1371 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1372 return; 1373 } 1374 1375 if (SrcType::kCPU == srcType) { 1376 // Get actual desc for allocated buffer so we know the stride for uploading cpu data. 1377 AHardwareBuffer_describe(buffer, &hwbDesc); 1378 1379 uint32_t* bufferAddr; 1380 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr, 1381 reinterpret_cast<void**>(&bufferAddr))) { 1382 ERRORF(reporter, "Failed to lock hardware buffer"); 1383 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1384 return; 1385 } 1386 1387 int bbp = srcBitmap.bytesPerPixel(); 1388 uint32_t* src = (uint32_t*)srcBitmap.getPixels(); 1389 uint32_t* dst = bufferAddr; 1390 for (int y = 0; y < DEV_H; ++y) { 1391 memcpy(dst, src, DEV_W * bbp); 1392 src += DEV_W; 1393 dst += hwbDesc.stride; 1394 } 1395 1396 for (int y = 0; y < DEV_H; ++y) { 1397 for (int x = 0; x < DEV_W; ++x) { 1398 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y); 1399 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x]; 1400 if (srcPixel != dstPixel) { 1401 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.", 1402 x, y, srcPixel, dstPixel); 1403 } 1404 } 1405 } 1406 1407 AHardwareBuffer_unlock(buffer, nullptr); 1408 1409 } else { 1410 srcHelper->makeCurrent(); 1411 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer); 1412 1413 if (!surface) { 1414 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1415 return; 1416 } 1417 1418 sk_sp<SkImage> srcBmpImage = SkImage::MakeFromBitmap(srcBitmap); 1419 surface->getCanvas()->drawImage(srcBmpImage, 0, 0); 1420 1421 // If we are testing sharing of syncs, don't do a read here since it forces sychronization 1422 // to occur. 1423 if (!shareSyncs) { 1424 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0); 1425 if (!readResult) { 1426 ERRORF(reporter, "Read Pixels on surface failed"); 1427 surface.reset(); 1428 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1429 return; 1430 } 1431 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface)); 1432 } 1433 1434 /////////////////////////////////////////////////////////////////////////// 1435 // Cleanup GL/EGL and add syncs 1436 /////////////////////////////////////////////////////////////////////////// 1437 1438 if (shareSyncs) { 1439 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, surface)) { 1440 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1441 return; 1442 } 1443 1444 surface.reset(); 1445 } else { 1446 surface.reset(); 1447 srcHelper->doClientSync(); 1448 srcHelper->releaseImage(); 1449 } 1450 } 1451 1452 /////////////////////////////////////////////////////////////////////////// 1453 // Import the HWB into backend and draw it to a surface 1454 /////////////////////////////////////////////////////////////////////////// 1455 1456 dstHelper->makeCurrent(); 1457 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer); 1458 if (!wrappedImage) { 1459 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1460 return; 1461 } 1462 1463 GrContext* grContext = dstHelper->grContext(); 1464 1465 // Make SkSurface to render wrapped HWB into. 1466 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType, 1467 kPremul_SkAlphaType, nullptr); 1468 1469 sk_sp<SkSurface> dstSurf = SkSurface::MakeRenderTarget(grContext, 1470 SkBudgeted::kNo, imageInfo, 0, 1471 kTopLeft_GrSurfaceOrigin, 1472 nullptr, false); 1473 if (!dstSurf.get()) { 1474 ERRORF(reporter, "Failed to create destination SkSurface"); 1475 wrappedImage.reset(); 1476 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1477 return; 1478 } 1479 1480 if (shareSyncs) { 1481 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) { 1482 wrappedImage.reset(); 1483 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1484 return; 1485 } 1486 } 1487 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0); 1488 1489 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0); 1490 if (!readResult) { 1491 ERRORF(reporter, "Read Pixels failed"); 1492 wrappedImage.reset(); 1493 dstHelper->doClientSync(); 1494 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1495 return; 1496 } 1497 1498 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal)); 1499 1500 wrappedImage.reset(); 1501 dstHelper->doClientSync(); 1502 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer); 1503 } 1504 1505 DEF_GPUTEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options) { 1506 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false); 1507 } 1508 1509 DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options) { 1510 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false); 1511 } 1512 1513 DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan, reporter, options) { 1514 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false); 1515 } 1516 1517 DEF_GPUTEST(VulkanHardwareBuffer_CPU_EGL, reporter, options) { 1518 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false); 1519 } 1520 1521 DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL, reporter, options) { 1522 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false); 1523 } 1524 1525 DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options) { 1526 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false); 1527 } 1528 1529 DEF_GPUTEST(VulkanHardwareBuffer_EGL_EGL_Syncs, reporter, options) { 1530 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true); 1531 } 1532 1533 DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs, reporter, options) { 1534 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true); 1535 } 1536 1537 DEF_GPUTEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs, reporter, options) { 1538 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true); 1539 } 1540 1541 DEF_GPUTEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs, reporter, options) { 1542 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true); 1543 } 1544 1545 #endif 1546 #endif 1547 1548