1 /* 2 * Copyright 2015 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #include "GrVkUtil.h" 9 10 #include "vk/GrVkGpu.h" 11 #include "SkSLCompiler.h" 12 13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) { 14 VkFormat dontCare; 15 if (!format) { 16 format = &dontCare; 17 } 18 19 switch (config) { 20 case kUnknown_GrPixelConfig: 21 return false; 22 case kRGBA_8888_GrPixelConfig: 23 *format = VK_FORMAT_R8G8B8A8_UNORM; 24 return true; 25 case kRGB_888_GrPixelConfig: 26 *format = VK_FORMAT_R8G8B8_UNORM; 27 return true; 28 case kRG_88_GrPixelConfig: 29 *format = VK_FORMAT_R8G8_UNORM; 30 return true; 31 case kBGRA_8888_GrPixelConfig: 32 *format = VK_FORMAT_B8G8R8A8_UNORM; 33 return true; 34 case kSRGBA_8888_GrPixelConfig: 35 *format = VK_FORMAT_R8G8B8A8_SRGB; 36 return true; 37 case kSBGRA_8888_GrPixelConfig: 38 *format = VK_FORMAT_B8G8R8A8_SRGB; 39 return true; 40 case kRGBA_1010102_GrPixelConfig: 41 *format = VK_FORMAT_A2B10G10R10_UNORM_PACK32; 42 return true; 43 case kRGB_565_GrPixelConfig: 44 *format = VK_FORMAT_R5G6B5_UNORM_PACK16; 45 return true; 46 case kRGBA_4444_GrPixelConfig: 47 // R4G4B4A4 is not required to be supported so we actually 48 // store the data is if it was B4G4R4A4 and swizzle in shaders 49 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16; 50 return true; 51 case kAlpha_8_GrPixelConfig: // fall through 52 case kAlpha_8_as_Red_GrPixelConfig: 53 *format = VK_FORMAT_R8_UNORM; 54 return true; 55 case kAlpha_8_as_Alpha_GrPixelConfig: 56 return false; 57 case kGray_8_GrPixelConfig: 58 case kGray_8_as_Red_GrPixelConfig: 59 *format = VK_FORMAT_R8_UNORM; 60 return true; 61 case kGray_8_as_Lum_GrPixelConfig: 62 return false; 63 case kRGBA_float_GrPixelConfig: 64 *format = VK_FORMAT_R32G32B32A32_SFLOAT; 65 return true; 66 case kRG_float_GrPixelConfig: 67 *format = VK_FORMAT_R32G32_SFLOAT; 68 return true; 69 case kRGBA_half_GrPixelConfig: 70 *format = VK_FORMAT_R16G16B16A16_SFLOAT; 71 return true; 72 case kRGB_ETC1_GrPixelConfig: 73 // converting to ETC2 which is a superset of ETC1 74 *format = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; 75 return true; 76 case kAlpha_half_GrPixelConfig: // fall through 77 case kAlpha_half_as_Red_GrPixelConfig: 78 *format = VK_FORMAT_R16_SFLOAT; 79 return true; 80 } 81 SK_ABORT("Unexpected config"); 82 return false; 83 } 84 85 #ifdef SK_DEBUG 86 bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) { 87 switch (format) { 88 case VK_FORMAT_R8G8B8A8_UNORM: 89 return kRGBA_8888_GrPixelConfig == config; 90 case VK_FORMAT_B8G8R8A8_UNORM: 91 return kBGRA_8888_GrPixelConfig == config; 92 case VK_FORMAT_R8G8B8A8_SRGB: 93 return kSRGBA_8888_GrPixelConfig == config; 94 case VK_FORMAT_B8G8R8A8_SRGB: 95 return kSBGRA_8888_GrPixelConfig == config; 96 case VK_FORMAT_R8G8B8_UNORM: 97 return kRGB_888_GrPixelConfig == config; 98 case VK_FORMAT_R8G8_UNORM: 99 return kRG_88_GrPixelConfig == config; 100 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: 101 return kRGBA_1010102_GrPixelConfig == config; 102 case VK_FORMAT_R5G6B5_UNORM_PACK16: 103 return kRGB_565_GrPixelConfig == config; 104 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: 105 // R4G4B4A4 is not required to be supported so we actually 106 // store RGBA_4444 data as B4G4R4A4. 107 return kRGBA_4444_GrPixelConfig == config; 108 case VK_FORMAT_R8_UNORM: 109 return kAlpha_8_GrPixelConfig == config || 110 kAlpha_8_as_Red_GrPixelConfig == config || 111 kGray_8_GrPixelConfig == config || 112 kGray_8_as_Red_GrPixelConfig == config; 113 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: 114 return kRGB_ETC1_GrPixelConfig == config; 115 case VK_FORMAT_R32G32B32A32_SFLOAT: 116 return kRGBA_float_GrPixelConfig == config; 117 case VK_FORMAT_R32G32_SFLOAT: 118 return kRG_float_GrPixelConfig == config; 119 case VK_FORMAT_R16G16B16A16_SFLOAT: 120 return kRGBA_half_GrPixelConfig == config; 121 case VK_FORMAT_R16_SFLOAT: 122 return kAlpha_half_GrPixelConfig == config || 123 kAlpha_half_as_Red_GrPixelConfig == config; 124 default: 125 return false; 126 } 127 } 128 #endif 129 130 bool GrVkFormatIsSupported(VkFormat format) { 131 switch (format) { 132 case VK_FORMAT_R8G8B8A8_UNORM: 133 case VK_FORMAT_B8G8R8A8_UNORM: 134 case VK_FORMAT_R8G8B8A8_SRGB: 135 case VK_FORMAT_B8G8R8A8_SRGB: 136 case VK_FORMAT_R8G8B8A8_SINT: 137 case VK_FORMAT_R8G8B8_UNORM: 138 case VK_FORMAT_R8G8_UNORM: 139 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: 140 case VK_FORMAT_R5G6B5_UNORM_PACK16: 141 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: 142 case VK_FORMAT_R8_UNORM: 143 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: 144 case VK_FORMAT_R32G32B32A32_SFLOAT: 145 case VK_FORMAT_R32G32_SFLOAT: 146 case VK_FORMAT_R16G16B16A16_SFLOAT: 147 case VK_FORMAT_R16_SFLOAT: 148 return true; 149 default: 150 return false; 151 } 152 } 153 154 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) { 155 SkASSERT(samples >= 1); 156 switch (samples) { 157 case 1: 158 *vkSamples = VK_SAMPLE_COUNT_1_BIT; 159 return true; 160 case 2: 161 *vkSamples = VK_SAMPLE_COUNT_2_BIT; 162 return true; 163 case 4: 164 *vkSamples = VK_SAMPLE_COUNT_4_BIT; 165 return true; 166 case 8: 167 *vkSamples = VK_SAMPLE_COUNT_8_BIT; 168 return true; 169 case 16: 170 *vkSamples = VK_SAMPLE_COUNT_16_BIT; 171 return true; 172 case 32: 173 *vkSamples = VK_SAMPLE_COUNT_32_BIT; 174 return true; 175 case 64: 176 *vkSamples = VK_SAMPLE_COUNT_64_BIT; 177 return true; 178 default: 179 return false; 180 } 181 } 182 183 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) { 184 if (VK_SHADER_STAGE_VERTEX_BIT == stage) { 185 return SkSL::Program::kVertex_Kind; 186 } 187 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) { 188 return SkSL::Program::kGeometry_Kind; 189 } 190 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage); 191 return SkSL::Program::kFragment_Kind; 192 } 193 194 bool GrCompileVkShaderModule(const GrVkGpu* gpu, 195 const char* shaderString, 196 VkShaderStageFlagBits stage, 197 VkShaderModule* shaderModule, 198 VkPipelineShaderStageCreateInfo* stageInfo, 199 const SkSL::Program::Settings& settings, 200 SkSL::String* outSPIRV, 201 SkSL::Program::Inputs* outInputs) { 202 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram( 203 vk_shader_stage_to_skiasl_kind(stage), 204 SkSL::String(shaderString), 205 settings); 206 if (!program) { 207 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str()); 208 SkASSERT(false); 209 } 210 *outInputs = program->fInputs; 211 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) { 212 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str()); 213 return false; 214 } 215 216 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo); 217 } 218 219 bool GrInstallVkShaderModule(const GrVkGpu* gpu, 220 const SkSL::String& spirv, 221 VkShaderStageFlagBits stage, 222 VkShaderModule* shaderModule, 223 VkPipelineShaderStageCreateInfo* stageInfo) { 224 VkShaderModuleCreateInfo moduleCreateInfo; 225 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo)); 226 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 227 moduleCreateInfo.pNext = nullptr; 228 moduleCreateInfo.flags = 0; 229 moduleCreateInfo.codeSize = spirv.size(); 230 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str(); 231 232 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(), 233 &moduleCreateInfo, 234 nullptr, 235 shaderModule)); 236 if (err) { 237 return false; 238 } 239 240 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo)); 241 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 242 stageInfo->pNext = nullptr; 243 stageInfo->flags = 0; 244 stageInfo->stage = stage; 245 stageInfo->module = *shaderModule; 246 stageInfo->pName = "main"; 247 stageInfo->pSpecializationInfo = nullptr; 248 249 return true; 250 } 251