Home | History | Annotate | Download | only in image
      1 /*------------------------------------------------------------------------
      2  * Vulkan Conformance Tests
      3  * ------------------------
      4  *
      5  * Copyright (c) 2017 The Khronos Group Inc.
      6  *
      7  * Licensed under the Apache License, Version 2.0 (the "License");
      8  * you may not use this file except in compliance with the License.
      9  * You may obtain a copy of the License at
     10  *
     11  *      http://www.apache.org/licenses/LICENSE-2.0
     12  *
     13  * Unless required by applicable law or agreed to in writing, software
     14  * distributed under the License is distributed on an "AS IS" BASIS,
     15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     16  * See the License for the specific language governing permissions and
     17  * limitations under the License.
     18  *
     19  *//*!
     20  * \file  vktImageCompressionTranscodingSupport.cpp
     21  * \brief Compression transcoding support
     22  *//*--------------------------------------------------------------------*/
     23 
     24 #include "vktImageCompressionTranscodingSupport.hpp"
     25 #include "vktImageLoadStoreUtil.hpp"
     26 
     27 #include "deUniquePtr.hpp"
     28 #include "deStringUtil.hpp"
     29 #include "deSharedPtr.hpp"
     30 #include "deRandom.hpp"
     31 
     32 #include "vktTestCaseUtil.hpp"
     33 #include "vkPrograms.hpp"
     34 #include "vkImageUtil.hpp"
     35 #include "vktImageTestsUtil.hpp"
     36 #include "vkBuilderUtil.hpp"
     37 #include "vkRef.hpp"
     38 #include "vkRefUtil.hpp"
     39 #include "vkTypeUtil.hpp"
     40 #include "vkQueryUtil.hpp"
     41 
     42 #include "tcuTextureUtil.hpp"
     43 #include "tcuTexture.hpp"
     44 #include "tcuCompressedTexture.hpp"
     45 #include "tcuVectorType.hpp"
     46 #include "tcuResource.hpp"
     47 #include "tcuImageIO.hpp"
     48 #include "tcuImageCompare.hpp"
     49 #include "tcuTestLog.hpp"
     50 #include "tcuRGBA.hpp"
     51 #include "tcuSurface.hpp"
     52 
     53 #include <vector>
     54 using namespace vk;
     55 namespace vkt
     56 {
     57 namespace image
     58 {
     59 namespace
     60 {
     61 using std::string;
     62 using std::vector;
     63 using tcu::TestContext;
     64 using tcu::TestStatus;
     65 using tcu::UVec3;
     66 using tcu::IVec3;
     67 using tcu::CompressedTexFormat;
     68 using tcu::CompressedTexture;
     69 using tcu::Resource;
     70 using tcu::Archive;
     71 using tcu::ConstPixelBufferAccess;
     72 using de::MovePtr;
     73 using de::SharedPtr;
     74 using de::Random;
     75 
     76 typedef SharedPtr<MovePtr<Image> >			ImageSp;
     77 typedef SharedPtr<Move<VkImageView> >		ImageViewSp;
     78 typedef SharedPtr<Move<VkDescriptorSet> >	SharedVkDescriptorSet;
     79 
     80 enum ShaderType
     81 {
     82 	SHADER_TYPE_COMPUTE,
     83 	SHADER_TYPE_FRAGMENT,
     84 	SHADER_TYPE_LAST
     85 };
     86 
     87 enum Operation
     88 {
     89 	OPERATION_IMAGE_LOAD,
     90 	OPERATION_TEXEL_FETCH,
     91 	OPERATION_TEXTURE,
     92 	OPERATION_IMAGE_STORE,
     93 	OPERATION_ATTACHMENT_READ,
     94 	OPERATION_ATTACHMENT_WRITE,
     95 	OPERATION_TEXTURE_READ,
     96 	OPERATION_TEXTURE_WRITE,
     97 	OPERATION_LAST
     98 };
     99 
    100 struct TestParameters
    101 {
    102 	Operation			operation;
    103 	ShaderType			shader;
    104 	UVec3				size;
    105 	ImageType			imageType;
    106 	VkFormat			formatCompressed;
    107 	VkFormat			formatUncompressed;
    108 	deUint32			imagesCount;
    109 	VkImageUsageFlags	compressedImageUsage;
    110 	VkImageUsageFlags	compressedImageViewUsage;
    111 	VkImageUsageFlags	uncompressedImageUsage;
    112 	bool				useMipmaps;
    113 	VkFormat			formatForVerify;
    114 };
    115 
    116 template<typename T>
    117 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
    118 {
    119 	return SharedPtr<Move<T> >(new Move<T>(move));
    120 }
    121 
    122 template<typename T>
    123 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
    124 {
    125 	return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
    126 }
    127 
    128 const deUint32 SINGLE_LEVEL = 1u;
    129 const deUint32 SINGLE_LAYER = 1u;
    130 
    131 class BasicTranscodingTestInstance : public TestInstance
    132 {
    133 public:
    134 							BasicTranscodingTestInstance	(Context&						context,
    135 															 const TestParameters&			parameters);
    136 	virtual TestStatus		iterate							(void) = 0;
    137 protected:
    138 	void					generateData					(deUint8*						toFill,
    139 															 const size_t					size,
    140 															 const VkFormat					format,
    141 															 const deUint32					layer = 0u,
    142 															 const deUint32					level = 0u);
    143 	deUint32				getLevelCount					();
    144 	deUint32				getLayerCount					();
    145 	UVec3					getLayerDims					();
    146 	vector<UVec3>			getMipLevelSizes				(UVec3							baseSize);
    147 	vector<UVec3>			getCompressedMipLevelSizes		(const VkFormat					compressedFormat,
    148 															 const vector<UVec3>&			uncompressedSizes);
    149 
    150 	const TestParameters	m_parameters;
    151 	const deUint32			m_blockWidth;
    152 	const deUint32			m_blockHeight;
    153 	const deUint32			m_levelCount;
    154 	const UVec3				m_layerSize;
    155 
    156 private:
    157 	deUint32				findMipMapLevelCount			();
    158 };
    159 
    160 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
    161 {
    162 	deUint32 levelCount = 1;
    163 
    164 	// We cannot use mipmap levels which have resolution below block size.
    165 	// Reduce number of mipmap levels
    166 	if (m_parameters.useMipmaps)
    167 	{
    168 		deUint32 w = m_parameters.size.x();
    169 		deUint32 h = m_parameters.size.y();
    170 
    171 		DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
    172 
    173 		while (w > m_blockWidth && h > m_blockHeight)
    174 		{
    175 			w >>= 1;
    176 			h >>= 1;
    177 
    178 			if (w > m_blockWidth && h > m_blockHeight)
    179 				levelCount++;
    180 		}
    181 
    182 		DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
    183 		DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
    184 	}
    185 
    186 	return levelCount;
    187 }
    188 
    189 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
    190 	: TestInstance	(context)
    191 	, m_parameters	(parameters)
    192 	, m_blockWidth	(getBlockWidth(m_parameters.formatCompressed))
    193 	, m_blockHeight	(getBlockHeight(m_parameters.formatCompressed))
    194 	, m_levelCount	(findMipMapLevelCount())
    195 	, m_layerSize	(getLayerSize(m_parameters.imageType, m_parameters.size))
    196 {
    197 	DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
    198 }
    199 
    200 deUint32 BasicTranscodingTestInstance::getLevelCount()
    201 {
    202 	return m_levelCount;
    203 }
    204 
    205 deUint32 BasicTranscodingTestInstance::getLayerCount()
    206 {
    207 	return m_parameters.size.z();
    208 }
    209 
    210 UVec3 BasicTranscodingTestInstance::getLayerDims()
    211 {
    212 	return m_layerSize;
    213 }
    214 
    215 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
    216 {
    217 	vector<UVec3>	levelSizes;
    218 	const deUint32	levelCount = getLevelCount();
    219 
    220 	DE_ASSERT(m_parameters.imageType == IMAGE_TYPE_2D || m_parameters.imageType == IMAGE_TYPE_2D_ARRAY);
    221 
    222 	baseSize.z() = 1u;
    223 
    224 	levelSizes.push_back(baseSize);
    225 
    226 	while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
    227 	{
    228 		baseSize.x() = deMax32(baseSize.x() >> 1, 1);
    229 		baseSize.y() = deMax32(baseSize.y() >> 1, 1);
    230 		levelSizes.push_back(baseSize);
    231 	}
    232 
    233 	DE_ASSERT(levelSizes.size() == getLevelCount());
    234 
    235 	return levelSizes;
    236 }
    237 
    238 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
    239 {
    240 	vector<UVec3> levelSizes;
    241 	vector<UVec3>::const_iterator it;
    242 
    243 	for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
    244 		levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
    245 
    246 	return levelSizes;
    247 }
    248 
    249 void BasicTranscodingTestInstance::generateData (deUint8*		toFill,
    250 												 const size_t	size,
    251 												 const VkFormat format,
    252 												 const deUint32 layer,
    253 												 const deUint32 level)
    254 {
    255 	const deUint8 pattern[] =
    256 	{
    257 		// 64-bit values
    258 		0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
    259 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
    260 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
    261 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
    262 		0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
    263 		0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
    264 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
    265 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
    266 		0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
    267 		0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
    268 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Positive infinity
    269 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Negative infinity
    270 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
    271 		0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
    272 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
    273 		0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
    274 		0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
    275 		0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of of a quiet NaN (NANQ)
    276 		0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
    277 		0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a quiet NaN (NANQ)
    278 		// 32-bit values
    279 		0x7F, 0x80, 0x00, 0x00,								// Positive infinity
    280 		0xFF, 0x80, 0x00, 0x00,								// Negative infinity
    281 		0x7F, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
    282 		0x7F, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
    283 		0xFF, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
    284 		0xFF, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
    285 		0x7F, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
    286 		0x7F, 0xFF, 0xFF, 0xFF,								// End of of a quiet NaN (NANQ)
    287 		0xFF, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
    288 		0xFF, 0xFF, 0xFF, 0xFF,								// End of a quiet NaN (NANQ)
    289 		0xAA, 0xAA, 0xAA, 0xAA,
    290 		0x55, 0x55, 0x55, 0x55,
    291 	};
    292 
    293 	deUint8*	start		= toFill;
    294 	size_t		sizeToRnd	= size;
    295 
    296 	// Pattern part
    297 	if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
    298 	{
    299 		// Rotated pattern
    300 		for (size_t i = 0; i < sizeof(pattern); i++)
    301 			start[sizeof(pattern) - i - 1] = pattern[i];
    302 
    303 		start		+= sizeof(pattern);
    304 		sizeToRnd	-= sizeof(pattern);
    305 
    306 		// Direct pattern
    307 		deMemcpy(start, pattern, sizeof(pattern));
    308 
    309 		start		+= sizeof(pattern);
    310 		sizeToRnd	-= sizeof(pattern);
    311 	}
    312 
    313 	// Random part
    314 	{
    315 		DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
    316 
    317 		deUint32*	start32		= reinterpret_cast<deUint32*>(start);
    318 		size_t		sizeToRnd32	= sizeToRnd / sizeof(deUint32);
    319 		deUint32	seed		= (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
    320 		Random		rnd			(seed);
    321 
    322 		for (size_t i = 0; i < sizeToRnd32; i++)
    323 			start32[i] = rnd.getUint32();
    324 	}
    325 
    326 	{
    327 		// Remove certain values that may not be preserved based on the uncompressed view format
    328 		if (isSnormFormat(m_parameters.formatUncompressed))
    329 		{
    330 			for (size_t i = 0; i < size; i += 2)
    331 			{
    332 				// SNORM fix: due to write operation in SNORM format
    333 				// replaces 0x00 0x80 to 0x01 0x80
    334 				if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
    335 					toFill[i+1] = 0x81;
    336 			}
    337 		}
    338 		else if (isFloatFormat(m_parameters.formatUncompressed))
    339 		{
    340 			tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
    341 
    342 			if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
    343 			{
    344 				for (size_t i = 0; i < size; i += 2)
    345 				{
    346 					// HALF_FLOAT fix: remove INF and NaN
    347 					if ((toFill[i+1] & 0x7C) == 0x7C)
    348 						toFill[i+1] = 0x00;
    349 				}
    350 			}
    351 			else if (textureFormat.type == tcu::TextureFormat::FLOAT)
    352 			{
    353 				for (size_t i = 0; i < size; i += 4)
    354 				{
    355 					// HALF_FLOAT fix: remove INF and NaN
    356 					if ((toFill[i+1] & 0x7C) == 0x7C)
    357 						toFill[i+1] = 0x00;
    358 				}
    359 
    360 				for (size_t i = 0; i < size; i += 4)
    361 				{
    362 					// FLOAT fix: remove INF, NaN, and denorm
    363 					// Little endian fix
    364 					if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
    365 						toFill[i+3] = 0x01;
    366 					// Big endian fix
    367 					if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
    368 						toFill[i+0] = 0x01;
    369 				}
    370 			}
    371 		}
    372 	}
    373 }
    374 
    375 class BasicComputeTestInstance : public BasicTranscodingTestInstance
    376 {
    377 public:
    378 					BasicComputeTestInstance	(Context&							context,
    379 												const TestParameters&				parameters);
    380 	TestStatus		iterate						(void);
    381 protected:
    382 	struct ImageData
    383 	{
    384 		deUint32			getImagesCount		(void)									{ return static_cast<deUint32>(images.size());		}
    385 		deUint32			getImageViewCount	(void)									{ return static_cast<deUint32>(imagesViews.size());	}
    386 		deUint32			getImageInfoCount	(void)									{ return static_cast<deUint32>(imagesInfos.size());	}
    387 		VkImage				getImage			(const deUint32				ndx)		{ return **images[ndx]->get();						}
    388 		VkImageView			getImageView		(const deUint32				ndx)		{ return **imagesViews[ndx];						}
    389 		VkImageCreateInfo	getImageInfo		(const deUint32				ndx)		{ return imagesInfos[ndx];							}
    390 		void				addImage			(MovePtr<Image>				image)		{ images.push_back(makeVkSharedPtr(image));			}
    391 		void				addImageView		(Move<VkImageView>			imageView)	{ imagesViews.push_back(makeVkSharedPtr(imageView));}
    392 		void				addImageInfo		(const VkImageCreateInfo	imageInfo)	{ imagesInfos.push_back(imageInfo);					}
    393 		void				resetViews			()										{ imagesViews.clear();								}
    394 	private:
    395 		vector<ImageSp>				images;
    396 		vector<ImageViewSp>			imagesViews;
    397 		vector<VkImageCreateInfo>	imagesInfos;
    398 	};
    399 	void			copyDataToImage				(const VkCommandBuffer&				cmdBuffer,
    400 												 ImageData&							imageData,
    401 												 const vector<UVec3>&				mipMapSizes,
    402 												 const bool							isCompressed);
    403 	virtual void	executeShader				(const VkCommandBuffer&				cmdBuffer,
    404 												 const VkDescriptorSetLayout&		descriptorSetLayout,
    405 												 const VkDescriptorPool&			descriptorPool,
    406 												vector<ImageData>&					imageData);
    407 	bool			copyResultAndCompare		(const VkCommandBuffer&				cmdBuffer,
    408 												 const VkImage&						uncompressed,
    409 												 const VkDeviceSize					offset,
    410 												 const UVec3&						size);
    411 	void			descriptorSetUpdate			(VkDescriptorSet					descriptorSet,
    412 												 const VkDescriptorImageInfo*		descriptorImageInfos);
    413 	void			createImageInfos			(ImageData&							imageData,
    414 												 const vector<UVec3>&				mipMapSizes,
    415 												 const bool							isCompressed);
    416 	bool			decompressImage				(const VkCommandBuffer&				cmdBuffer,
    417 												 vector<ImageData>&					imageData,
    418 												 const vector<UVec3>&				mipMapSizes);
    419 	vector<deUint8>	m_data;
    420 };
    421 
    422 
    423 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
    424 	:BasicTranscodingTestInstance	(context, parameters)
    425 {
    426 }
    427 
    428 TestStatus BasicComputeTestInstance::iterate (void)
    429 {
    430 	const DeviceInterface&					vk					= m_context.getDeviceInterface();
    431 	const VkDevice							device				= m_context.getDevice();
    432 	const deUint32							queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
    433 	Allocator&								allocator			= m_context.getDefaultAllocator();
    434 	const Unique<VkCommandPool>				cmdPool				(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
    435 	const Unique<VkCommandBuffer>			cmdBuffer			(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
    436 	const vector<UVec3>						mipMapSizes			= m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, m_parameters.size);
    437 	vector<ImageData>						imageData			(m_parameters.imagesCount);
    438 	const deUint32							compressedNdx		= 0u;
    439 	const deUint32							resultImageNdx		= m_parameters.imagesCount -1u;
    440 
    441 	for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
    442 	{
    443 		const bool isCompressed = compressedNdx == imageNdx ? true : false;
    444 		createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
    445 		for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
    446 		{
    447 			imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
    448 			if (isCompressed)
    449 			{
    450 				const VkImageViewUsageCreateInfo	imageViewUsageKHR	=
    451 				{
    452 					VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,				//VkStructureType		sType;
    453 					DE_NULL,														//const void*			pNext;
    454 					m_parameters.compressedImageUsage,								//VkImageUsageFlags		usage;
    455 				};
    456 				for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
    457 				for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
    458 				{
    459 					imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
    460 														mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
    461 														makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
    462 														&imageViewUsageKHR));
    463 				}
    464 			}
    465 			else
    466 			{
    467 				imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
    468 													mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
    469 													makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
    470 			}
    471 		}
    472 	}
    473 
    474 	{
    475 		size_t size = 0ull;
    476 		for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
    477 		{
    478 			size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
    479 		}
    480 		m_data.resize(size);
    481 		generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
    482 	}
    483 
    484 	switch(m_parameters.operation)
    485 	{
    486 		case OPERATION_IMAGE_LOAD:
    487 		case OPERATION_TEXEL_FETCH:
    488 		case OPERATION_TEXTURE:
    489 			copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
    490 			break;
    491 		case OPERATION_IMAGE_STORE:
    492 			copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
    493 			break;
    494 		default:
    495 			DE_ASSERT(false);
    496 			break;
    497 	}
    498 
    499 	{
    500 		Move<VkDescriptorSetLayout>	descriptorSetLayout;
    501 		Move<VkDescriptorPool>		descriptorPool;
    502 
    503 		DescriptorSetLayoutBuilder	descriptorSetLayoutBuilder;
    504 		DescriptorPoolBuilder		descriptorPoolBuilder;
    505 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
    506 		{
    507 			switch(m_parameters.operation)
    508 			{
    509 				case OPERATION_IMAGE_LOAD:
    510 				case OPERATION_IMAGE_STORE:
    511 					descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
    512 					descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
    513 					break;
    514 				case OPERATION_TEXEL_FETCH:
    515 				case OPERATION_TEXTURE:
    516 					descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
    517 					descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
    518 					break;
    519 				default:
    520 					DE_ASSERT(false);
    521 					break;
    522 			}
    523 		}
    524 		descriptorSetLayout	= descriptorSetLayoutBuilder.build(vk, device);
    525 		descriptorPool		= descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
    526 		executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
    527 
    528 		{
    529 			VkDeviceSize offset = 0ull;
    530 			for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
    531 			for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
    532 			{
    533 				const deUint32	imageNdx	= layerNdx + mipNdx * getLayerCount();
    534 				const UVec3		size		= UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
    535 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
    536 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
    537 				if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
    538 					return TestStatus::fail("Fail");
    539 				offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
    540 			}
    541 		}
    542 	};
    543 	if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
    544 			return TestStatus::fail("Fail");
    545 	return TestStatus::pass("Pass");
    546 }
    547 
    548 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer&	cmdBuffer,
    549 												ImageData&				imageData,
    550 												const vector<UVec3>&	mipMapSizes,
    551 												const bool				isCompressed)
    552 {
    553 	const DeviceInterface&		vk			= m_context.getDeviceInterface();
    554 	const VkDevice				device		= m_context.getDevice();
    555 	const VkQueue				queue		= m_context.getUniversalQueue();
    556 	Allocator&					allocator	= m_context.getDefaultAllocator();
    557 
    558 	Buffer						imageBuffer	(vk, device, allocator,
    559 												makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
    560 												MemoryRequirement::HostVisible);
    561 	VkDeviceSize				offset		= 0ull;
    562 	{
    563 		const Allocation& alloc = imageBuffer.getAllocation();
    564 		deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
    565 		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_data.size());
    566 	}
    567 
    568 	beginCommandBuffer(vk, cmdBuffer);
    569 	const VkImageSubresourceRange	subresourceRange		=
    570 	{
    571 		VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
    572 		0u,											//deUint32				baseMipLevel
    573 		imageData.getImageInfo(0u).mipLevels,		//deUint32				levelCount
    574 		0u,											//deUint32				baseArrayLayer
    575 		imageData.getImageInfo(0u).arrayLayers		//deUint32				layerCount
    576 	};
    577 
    578 	for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
    579 	{
    580 		const VkImageMemoryBarrier		preCopyImageBarrier		= makeImageMemoryBarrier(
    581 																	0u, VK_ACCESS_TRANSFER_WRITE_BIT,
    582 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
    583 																	imageData.getImage(imageNdx), subresourceRange);
    584 
    585 		const VkBufferMemoryBarrier		FlushHostCopyBarrier	= makeBufferMemoryBarrier(
    586 																	VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
    587 																	imageBuffer.get(), 0ull, m_data.size());
    588 
    589 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
    590 				(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
    591 
    592 		for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
    593 		{
    594 			const VkExtent3D				imageExtent				= isCompressed ?
    595 																		makeExtent3D(mipMapSizes[mipNdx]) :
    596 																		imageData.getImageInfo(imageNdx).extent;
    597 			const VkBufferImageCopy			copyRegion				=
    598 			{
    599 				offset,																												//VkDeviceSize				bufferOffset;
    600 				0u,																													//deUint32					bufferRowLength;
    601 				0u,																													//deUint32					bufferImageHeight;
    602 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers),	//VkImageSubresourceLayers	imageSubresource;
    603 				makeOffset3D(0, 0, 0),																								//VkOffset3D				imageOffset;
    604 				imageExtent,																										//VkExtent3D				imageExtent;
    605 			};
    606 
    607 			vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
    608 			offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
    609 						UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
    610 						imageData.getImageInfo(imageNdx).arrayLayers;
    611 		}
    612 	}
    613 	endCommandBuffer(vk, cmdBuffer);
    614 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
    615 }
    616 
    617 void BasicComputeTestInstance::executeShader (const VkCommandBuffer&		cmdBuffer,
    618 											  const VkDescriptorSetLayout&	descriptorSetLayout,
    619 											  const VkDescriptorPool&		descriptorPool,
    620 											  vector<ImageData>&			imageData)
    621 {
    622 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
    623 	const VkDevice					device					= m_context.getDevice();
    624 	const VkQueue					queue					= m_context.getUniversalQueue();
    625 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
    626 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
    627 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
    628 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
    629 	Move<VkSampler>					sampler;
    630 	{
    631 		const VkSamplerCreateInfo createInfo =
    632 		{
    633 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
    634 			DE_NULL,									//const void*			pNext;
    635 			0u,											//VkSamplerCreateFlags	flags;
    636 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
    637 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
    638 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
    639 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
    640 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
    641 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
    642 			0.0f,										//float					mipLodBias;
    643 			VK_FALSE,									//VkBool32				anisotropyEnable;
    644 			1.0f,										//float					maxAnisotropy;
    645 			VK_FALSE,									//VkBool32				compareEnable;
    646 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
    647 			0.0f,										//float					minLod;
    648 			0.0f,										//float					maxLod;
    649 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
    650 			VK_FALSE,									//VkBool32				unnormalizedCoordinates;
    651 		};
    652 		sampler = createSampler(vk, device, &createInfo);
    653 	}
    654 
    655 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
    656 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
    657 	{
    658 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
    659 		for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
    660 		{
    661 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
    662 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
    663 		}
    664 	}
    665 
    666 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
    667 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
    668 
    669 	beginCommandBuffer(vk, cmdBuffer);
    670 	{
    671 		const VkImageSubresourceRange	compressedRange				=
    672 		{
    673 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
    674 			0u,											//deUint32				baseMipLevel
    675 			imageData[0].getImageInfo(0u).mipLevels,	//deUint32				levelCount
    676 			0u,											//deUint32				baseArrayLayer
    677 			imageData[0].getImageInfo(0u).arrayLayers	//deUint32				layerCount
    678 		};
    679 		const VkImageSubresourceRange	uncompressedRange			=
    680 		{
    681 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
    682 			0u,											//deUint32				baseMipLevel
    683 			1u,											//deUint32				levelCount
    684 			0u,											//deUint32				baseArrayLayer
    685 			1u											//deUint32				layerCount
    686 		};
    687 
    688 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
    689 
    690 		vector<VkImageMemoryBarrier>		preShaderImageBarriers;
    691 		preShaderImageBarriers.resize(descriptorSets.size() + 1u);
    692 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
    693 		{
    694 			preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
    695 												VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
    696 												VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
    697 												imageData[1].getImage(imageNdx), uncompressedRange);
    698 		}
    699 
    700 		preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
    701 															VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
    702 															VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
    703 															imageData[0].getImage(0), compressedRange);
    704 
    705 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
    706 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
    707 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
    708 
    709 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
    710 		{
    711 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
    712 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
    713 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
    714 										imageData[1].getImageInfo(ndx).extent.height,
    715 										imageData[1].getImageInfo(ndx).extent.depth);
    716 		}
    717 	}
    718 	endCommandBuffer(vk, cmdBuffer);
    719 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
    720 }
    721 
    722 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer&	cmdBuffer,
    723 													 const VkImage&			uncompressed,
    724 													 const VkDeviceSize		offset,
    725 													 const UVec3&			size)
    726 {
    727 	const DeviceInterface&	vk					= m_context.getDeviceInterface();
    728 	const VkQueue			queue				= m_context.getUniversalQueue();
    729 	const VkDevice			device				= m_context.getDevice();
    730 	Allocator&				allocator			= m_context.getDefaultAllocator();
    731 
    732 	VkDeviceSize			imageResultSize		= getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
    733 	Buffer					imageBufferResult	(vk, device, allocator,
    734 													makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
    735 													MemoryRequirement::HostVisible);
    736 
    737 	beginCommandBuffer(vk, cmdBuffer);
    738 	{
    739 		const VkImageSubresourceRange	subresourceRange	=
    740 		{
    741 			VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags	aspectMask
    742 			0u,																	//deUint32				baseMipLevel
    743 			1u,																	//deUint32				levelCount
    744 			0u,																	//deUint32				baseArrayLayer
    745 			1u																	//deUint32				layerCount
    746 		};
    747 
    748 		const VkBufferImageCopy			copyRegion			=
    749 		{
    750 			0ull,																//	VkDeviceSize				bufferOffset;
    751 			0u,																	//	deUint32					bufferRowLength;
    752 			0u,																	//	deUint32					bufferImageHeight;
    753 			makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
    754 			makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
    755 			makeExtent3D(size),													//	VkExtent3D					imageExtent;
    756 		};
    757 
    758 		const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
    759 																VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
    760 																VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
    761 																uncompressed, subresourceRange);
    762 
    763 		const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
    764 													VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
    765 													imageBufferResult.get(), 0ull, imageResultSize);
    766 
    767 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
    768 		vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, &copyRegion);
    769 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
    770 	}
    771 	endCommandBuffer(vk, cmdBuffer);
    772 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
    773 
    774 	const Allocation& allocResult = imageBufferResult.getAllocation();
    775 	invalidateMappedMemoryRange(vk, device, allocResult.getMemory(), allocResult.getOffset(), imageResultSize);
    776 	if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
    777 		return true;
    778 	return false;
    779 }
    780 
    781 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
    782 {
    783 	const DeviceInterface&		vk		= m_context.getDeviceInterface();
    784 	const VkDevice				device	= m_context.getDevice();
    785 	DescriptorSetUpdateBuilder	descriptorSetUpdateBuilder;
    786 
    787 	switch(m_parameters.operation)
    788 	{
    789 		case OPERATION_IMAGE_LOAD:
    790 		case OPERATION_IMAGE_STORE:
    791 		{
    792 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
    793 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
    794 
    795 			break;
    796 		}
    797 
    798 		case OPERATION_TEXEL_FETCH:
    799 		case OPERATION_TEXTURE:
    800 		{
    801 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
    802 			{
    803 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
    804 					bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
    805 			}
    806 
    807 			break;
    808 		}
    809 
    810 		default:
    811 			DE_ASSERT(false);
    812 	}
    813 	descriptorSetUpdateBuilder.update(vk, device);
    814 }
    815 
    816 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
    817 {
    818 	const VkImageType			imageType			= mapImageType(m_parameters.imageType);
    819 
    820 	if (isCompressed)
    821 	{
    822 		const VkExtent3D			extentCompressed	= makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
    823 		const VkImageCreateInfo compressedInfo =
    824 		{
    825 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,					// VkStructureType			sType;
    826 			DE_NULL,												// const void*				pNext;
    827 			VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
    828 			VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
    829 			VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,					// VkImageCreateFlags		flags;
    830 			imageType,												// VkImageType				imageType;
    831 			m_parameters.formatCompressed,							// VkFormat					format;
    832 			extentCompressed,										// VkExtent3D				extent;
    833 			static_cast<deUint32>(mipMapSizes.size()),				// deUint32					mipLevels;
    834 			getLayerCount(),										// deUint32					arrayLayers;
    835 			VK_SAMPLE_COUNT_1_BIT,									// VkSampleCountFlagBits	samples;
    836 			VK_IMAGE_TILING_OPTIMAL,								// VkImageTiling			tiling;
    837 			VK_IMAGE_USAGE_SAMPLED_BIT |
    838 			VK_IMAGE_USAGE_STORAGE_BIT |
    839 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
    840 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,						// VkImageUsageFlags		usage;
    841 			VK_SHARING_MODE_EXCLUSIVE,								// VkSharingMode			sharingMode;
    842 			0u,														// deUint32					queueFamilyIndexCount;
    843 			DE_NULL,												// const deUint32*			pQueueFamilyIndices;
    844 			VK_IMAGE_LAYOUT_UNDEFINED,								// VkImageLayout			initialLayout;
    845 		};
    846 		imageData.addImageInfo(compressedInfo);
    847 	}
    848 	else
    849 	{
    850 		for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
    851 		for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
    852 		{
    853 			const VkExtent3D		extentUncompressed	= m_parameters.useMipmaps ?
    854 															makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
    855 															makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, m_parameters.size));
    856 			const VkImageCreateInfo	uncompressedInfo	=
    857 			{
    858 				VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,				// VkStructureType			sType;
    859 				DE_NULL,											// const void*				pNext;
    860 				0u,													// VkImageCreateFlags		flags;
    861 				imageType,											// VkImageType				imageType;
    862 				m_parameters.formatUncompressed,					// VkFormat					format;
    863 				extentUncompressed,									// VkExtent3D				extent;
    864 				1u,													// deUint32					mipLevels;
    865 				1u,													// deUint32					arrayLayers;
    866 				VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits	samples;
    867 				VK_IMAGE_TILING_OPTIMAL,							// VkImageTiling			tiling;
    868 				m_parameters.uncompressedImageUsage |
    869 				VK_IMAGE_USAGE_SAMPLED_BIT,							// VkImageUsageFlags		usage;
    870 				VK_SHARING_MODE_EXCLUSIVE,							// VkSharingMode			sharingMode;
    871 				0u,													// deUint32					queueFamilyIndexCount;
    872 				DE_NULL,											// const deUint32*			pQueueFamilyIndices;
    873 				VK_IMAGE_LAYOUT_UNDEFINED,							// VkImageLayout			initialLayout;
    874 			};
    875 			imageData.addImageInfo(uncompressedInfo);
    876 		}
    877 	}
    878 }
    879 
    880 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer&	cmdBuffer,
    881 												 vector<ImageData>&		imageData,
    882 												 const vector<UVec3>&	mipMapSizes)
    883 {
    884 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
    885 	const VkDevice					device					= m_context.getDevice();
    886 	const VkQueue					queue					= m_context.getUniversalQueue();
    887 	Allocator&						allocator				= m_context.getDefaultAllocator();
    888 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
    889 	const VkImage&					compressed				= imageData[0].getImage(0);
    890 
    891 	for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
    892 		imageData[ndx].resetViews();
    893 
    894 	for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
    895 	for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
    896 	{
    897 		const bool						layoutShaderReadOnly	= (layerNdx % 2u) == 1;
    898 		const deUint32					imageNdx				= layerNdx + mipNdx * getLayerCount();
    899 		const VkExtent3D				extentCompressed		= makeExtent3D(mipMapSizes[mipNdx]);
    900 		const VkImage&					uncompressed			= imageData[m_parameters.imagesCount -1].getImage(imageNdx);
    901 		const VkExtent3D				extentUncompressed		= imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
    902 		const VkDeviceSize				bufferSizeComp			= getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
    903 
    904 		const VkImageCreateInfo			decompressedImageInfo	=
    905 		{
    906 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
    907 			DE_NULL,															// const void*				pNext;
    908 			0u,																	// VkImageCreateFlags		flags;
    909 			VK_IMAGE_TYPE_2D,													// VkImageType				imageType;
    910 			VK_FORMAT_R8G8B8A8_UNORM,											// VkFormat					format;
    911 			extentCompressed,													// VkExtent3D				extent;
    912 			1u,																	// deUint32					mipLevels;
    913 			1u,																	// deUint32					arrayLayers;
    914 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
    915 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
    916 			VK_IMAGE_USAGE_SAMPLED_BIT |
    917 			VK_IMAGE_USAGE_STORAGE_BIT |
    918 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
    919 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
    920 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
    921 			0u,																	// deUint32					queueFamilyIndexCount;
    922 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
    923 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
    924 		};
    925 
    926 		const VkImageCreateInfo			compressedImageInfo		=
    927 		{
    928 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
    929 			DE_NULL,															// const void*				pNext;
    930 			0u,																	// VkImageCreateFlags		flags;
    931 			VK_IMAGE_TYPE_2D,													// VkImageType				imageType;
    932 			m_parameters.formatCompressed,										// VkFormat					format;
    933 			extentCompressed,													// VkExtent3D				extent;
    934 			1u,																	// deUint32					mipLevels;
    935 			1u,																	// deUint32					arrayLayers;
    936 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
    937 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
    938 			VK_IMAGE_USAGE_SAMPLED_BIT |
    939 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
    940 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
    941 			0u,																	// deUint32					queueFamilyIndexCount;
    942 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
    943 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
    944 		};
    945 		const VkImageUsageFlags				compressedViewUsageFlags	= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
    946 		const VkImageViewUsageCreateInfo	compressedViewUsageCI		=
    947 		{
    948 			VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,					//VkStructureType		sType;
    949 			DE_NULL,															//const void*			pNext;
    950 			compressedViewUsageFlags,											//VkImageUsageFlags		usage;
    951 		};
    952 		Image							resultImage				(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
    953 		Image							referenceImage			(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
    954 		Image							uncompressedImage		(vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
    955 		Move<VkImageView>				resultView				= makeImageView(vk, device, resultImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
    956 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
    957 		Move<VkImageView>				referenceView			= makeImageView(vk, device, referenceImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
    958 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
    959 		Move<VkImageView>				uncompressedView		= makeImageView(vk, device, uncompressedImage.get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
    960 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
    961 		Move<VkImageView>				compressedView			= makeImageView(vk, device, compressed, mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
    962 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
    963 		Move<VkDescriptorSetLayout>		descriptorSetLayout		= DescriptorSetLayoutBuilder()
    964 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
    965 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
    966 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
    967 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
    968 																	.build(vk, device);
    969 		Move<VkDescriptorPool>			descriptorPool			= DescriptorPoolBuilder()
    970 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
    971 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
    972 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
    973 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
    974 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
    975 
    976 		Move<VkDescriptorSet>			descriptorSet			= makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
    977 		const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
    978 		const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
    979 		const VkDeviceSize				bufferSize				= getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), VK_FORMAT_R8G8B8A8_UNORM);
    980 		Buffer							resultBuffer			(vk, device, allocator,
    981 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
    982 		Buffer							referenceBuffer			(vk, device, allocator,
    983 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
    984 		Buffer							transferBuffer			(vk, device, allocator,
    985 																	makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
    986 		Move<VkSampler>					sampler;
    987 		{
    988 			const VkSamplerCreateInfo createInfo	=
    989 			{
    990 				VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,							//VkStructureType		sType;
    991 				DE_NULL,														//const void*			pNext;
    992 				0u,																//VkSamplerCreateFlags	flags;
    993 				VK_FILTER_NEAREST,												//VkFilter				magFilter;
    994 				VK_FILTER_NEAREST,												//VkFilter				minFilter;
    995 				VK_SAMPLER_MIPMAP_MODE_NEAREST,									//VkSamplerMipmapMode	mipmapMode;
    996 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeU;
    997 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeV;
    998 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeW;
    999 				0.0f,															//float					mipLodBias;
   1000 				VK_FALSE,														//VkBool32				anisotropyEnable;
   1001 				1.0f,															//float					maxAnisotropy;
   1002 				VK_FALSE,														//VkBool32				compareEnable;
   1003 				VK_COMPARE_OP_EQUAL,											//VkCompareOp			compareOp;
   1004 				0.0f,															//float					minLod;
   1005 				1.0f,															//float					maxLod;
   1006 				VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,						//VkBorderColor			borderColor;
   1007 				VK_FALSE,														//VkBool32				unnormalizedCoordinates;
   1008 			};
   1009 			sampler = createSampler(vk, device, &createInfo);
   1010 		}
   1011 
   1012 		VkDescriptorImageInfo			descriptorImageInfos[]	=
   1013 		{
   1014 			makeDescriptorImageInfo(*sampler,	*uncompressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
   1015 			makeDescriptorImageInfo(*sampler,	*compressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
   1016 			makeDescriptorImageInfo(DE_NULL,	*resultView,		VK_IMAGE_LAYOUT_GENERAL),
   1017 			makeDescriptorImageInfo(DE_NULL,	*referenceView,		VK_IMAGE_LAYOUT_GENERAL)
   1018 		};
   1019 		DescriptorSetUpdateBuilder()
   1020 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
   1021 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
   1022 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
   1023 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
   1024 			.update(vk, device);
   1025 
   1026 
   1027 		beginCommandBuffer(vk, cmdBuffer);
   1028 		{
   1029 			const VkImageSubresourceRange	subresourceRange		=
   1030 			{
   1031 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
   1032 				0u,																	//deUint32						baseMipLevel
   1033 				1u,																	//deUint32						levelCount
   1034 				0u,																	//deUint32						baseArrayLayer
   1035 				1u																	//deUint32						layerCount
   1036 			};
   1037 
   1038 			const VkImageSubresourceRange	subresourceRangeComp	=
   1039 			{
   1040 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
   1041 				mipNdx,																//deUint32						baseMipLevel
   1042 				1u,																	//deUint32						levelCount
   1043 				layerNdx,															//deUint32						baseArrayLayer
   1044 				1u																	//deUint32						layerCount
   1045 			};
   1046 
   1047 			const VkBufferImageCopy			copyRegion				=
   1048 			{
   1049 				0ull,																//	VkDeviceSize				bufferOffset;
   1050 				0u,																	//	deUint32					bufferRowLength;
   1051 				0u,																	//	deUint32					bufferImageHeight;
   1052 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
   1053 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
   1054 				decompressedImageInfo.extent,										//	VkExtent3D					imageExtent;
   1055 			};
   1056 
   1057 			const VkBufferImageCopy			compressedCopyRegion	=
   1058 			{
   1059 				0ull,																//	VkDeviceSize				bufferOffset;
   1060 				0u,																	//	deUint32					bufferRowLength;
   1061 				0u,																	//	deUint32					bufferImageHeight;
   1062 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
   1063 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
   1064 				extentUncompressed,													//	VkExtent3D					imageExtent;
   1065 			};
   1066 
   1067 			{
   1068 
   1069 				const VkBufferMemoryBarrier		preCopyBufferBarriers	= makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
   1070 																			transferBuffer.get(), 0ull, bufferSizeComp);
   1071 
   1072 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
   1073 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
   1074 			}
   1075 
   1076 			vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
   1077 
   1078 			{
   1079 				const VkBufferMemoryBarrier		postCopyBufferBarriers	= makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1080 																			transferBuffer.get(), 0ull, bufferSizeComp);
   1081 
   1082 				const VkImageMemoryBarrier		preCopyImageBarriers	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
   1083 																			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
   1084 
   1085 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
   1086 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
   1087 			}
   1088 
   1089 			vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
   1090 
   1091 			vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
   1092 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   1093 
   1094 			{
   1095 				const VkImageMemoryBarrier		preShaderImageBarriers[]	=
   1096 				{
   1097 
   1098 					makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
   1099 						VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
   1100 						uncompressedImage.get(), subresourceRange),
   1101 
   1102 					makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
   1103 						VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
   1104 						compressed, subresourceRangeComp),
   1105 
   1106 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
   1107 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
   1108 						resultImage.get(), subresourceRange),
   1109 
   1110 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
   1111 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
   1112 						referenceImage.get(), subresourceRange)
   1113 				};
   1114 
   1115 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
   1116 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
   1117 					DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
   1118 			}
   1119 
   1120 			vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
   1121 
   1122 			{
   1123 				const VkImageMemoryBarrier		postShaderImageBarriers[]	=
   1124 				{
   1125 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1126 					VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   1127 					resultImage.get(), subresourceRange),
   1128 
   1129 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1130 						VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   1131 						referenceImage.get(), subresourceRange)
   1132 				};
   1133 
   1134 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
   1135 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
   1136 					DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
   1137 			}
   1138 
   1139 			vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, &copyRegion);
   1140 			vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, &copyRegion);
   1141 
   1142 			{
   1143 				const VkBufferMemoryBarrier		postCopyBufferBarrier[]		=
   1144 				{
   1145 					makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1146 						resultBuffer.get(), 0ull, bufferSize),
   1147 
   1148 					makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1149 						referenceBuffer.get(), 0ull, bufferSize),
   1150 				};
   1151 
   1152 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT,
   1153 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(postCopyBufferBarrier), postCopyBufferBarrier,
   1154 					0u, (const VkImageMemoryBarrier*)DE_NULL);
   1155 			}
   1156 		}
   1157 		endCommandBuffer(vk, cmdBuffer);
   1158 		submitCommandsAndWait(vk, device, queue, cmdBuffer);
   1159 
   1160 		const Allocation&		resultAlloc		= resultBuffer.getAllocation();
   1161 		const Allocation&		referenceAlloc	= referenceBuffer.getAllocation();
   1162 		invalidateMappedMemoryRange(vk, device, resultAlloc.getMemory(), resultAlloc.getOffset(), bufferSize);
   1163 		invalidateMappedMemoryRange(vk, device, referenceAlloc.getMemory(), referenceAlloc.getOffset(), bufferSize);
   1164 
   1165 		if (deMemCmp(resultAlloc.getHostPtr(), referenceAlloc.getHostPtr(), (size_t)bufferSize) != 0)
   1166 		{
   1167 			ConstPixelBufferAccess	resultPixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
   1168 			ConstPixelBufferAccess	referencePixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
   1169 
   1170 			if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
   1171 				return false;
   1172 		}
   1173 	}
   1174 
   1175 	return true;
   1176 }
   1177 
   1178 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
   1179 {
   1180 public:
   1181 					ImageStoreComputeTestInstance	(Context&							context,
   1182 													 const TestParameters&				parameters);
   1183 protected:
   1184 	virtual void	executeShader					(const VkCommandBuffer&				cmdBuffer,
   1185 													 const VkDescriptorSetLayout&		descriptorSetLayout,
   1186 													 const VkDescriptorPool&			descriptorPool,
   1187 													 vector<ImageData>&					imageData);
   1188 private:
   1189 };
   1190 
   1191 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
   1192 	:BasicComputeTestInstance	(context, parameters)
   1193 {
   1194 }
   1195 
   1196 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer&		cmdBuffer,
   1197 												   const VkDescriptorSetLayout&	descriptorSetLayout,
   1198 												   const VkDescriptorPool&		descriptorPool,
   1199 												   vector<ImageData>&			imageData)
   1200 {
   1201 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
   1202 	const VkDevice					device					= m_context.getDevice();
   1203 	const VkQueue					queue					= m_context.getUniversalQueue();
   1204 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
   1205 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
   1206 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
   1207 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
   1208 	Move<VkSampler>					sampler;
   1209 	{
   1210 		const VkSamplerCreateInfo createInfo =
   1211 		{
   1212 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
   1213 			DE_NULL,									//const void*			pNext;
   1214 			0u,											//VkSamplerCreateFlags	flags;
   1215 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
   1216 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
   1217 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
   1218 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
   1219 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
   1220 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
   1221 			0.0f,										//float					mipLodBias;
   1222 			VK_FALSE,									//VkBool32				anisotropyEnable;
   1223 			1.0f,										//float					maxAnisotropy;
   1224 			VK_FALSE,									//VkBool32				compareEnable;
   1225 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
   1226 			0.0f,										//float					minLod;
   1227 			0.0f,										//float					maxLod;
   1228 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
   1229 			VK_TRUE,									//VkBool32				unnormalizedCoordinates;
   1230 		};
   1231 		sampler = createSampler(vk, device, &createInfo);
   1232 	}
   1233 
   1234 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
   1235 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
   1236 	{
   1237 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
   1238 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
   1239 		{
   1240 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
   1241 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
   1242 		}
   1243 	}
   1244 
   1245 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
   1246 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
   1247 
   1248 	beginCommandBuffer(vk, cmdBuffer);
   1249 	{
   1250 		const VkImageSubresourceRange	compressedRange				=
   1251 		{
   1252 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
   1253 			0u,											//deUint32				baseMipLevel
   1254 			imageData[0].getImageInfo(0).mipLevels,		//deUint32				levelCount
   1255 			0u,											//deUint32				baseArrayLayer
   1256 			imageData[0].getImageInfo(0).arrayLayers	//deUint32				layerCount
   1257 		};
   1258 
   1259 		const VkImageSubresourceRange	uncompressedRange			=
   1260 		{
   1261 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
   1262 			0u,											//deUint32				baseMipLevel
   1263 			1u,											//deUint32				levelCount
   1264 			0u,											//deUint32				baseArrayLayer
   1265 			1u											//deUint32				layerCount
   1266 		};
   1267 
   1268 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
   1269 
   1270 		vector<VkImageMemoryBarrier>		preShaderImageBarriers	(descriptorSets.size() * 2u + 1u);
   1271 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
   1272 		{
   1273 			preShaderImageBarriers[imageNdx]									= makeImageMemoryBarrier(
   1274 																					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
   1275 																					VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
   1276 																					imageData[1].getImage(imageNdx), uncompressedRange);
   1277 
   1278 			preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()]	= makeImageMemoryBarrier(
   1279 																					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
   1280 																					VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
   1281 																					imageData[2].getImage(imageNdx), uncompressedRange);
   1282 		}
   1283 
   1284 		preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
   1285 																	VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
   1286 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
   1287 																	imageData[0].getImage(0u), compressedRange);
   1288 
   1289 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
   1290 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
   1291 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
   1292 
   1293 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
   1294 		{
   1295 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
   1296 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
   1297 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
   1298 										imageData[1].getImageInfo(ndx).extent.height,
   1299 										imageData[1].getImageInfo(ndx).extent.depth);
   1300 		}
   1301 	}
   1302 	endCommandBuffer(vk, cmdBuffer);
   1303 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
   1304 }
   1305 
   1306 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
   1307 {
   1308 public:
   1309 										GraphicsAttachmentsTestInstance	(Context& context, const TestParameters& parameters);
   1310 	virtual TestStatus					iterate							(void);
   1311 
   1312 protected:
   1313 	virtual bool						isWriteToCompressedOperation	();
   1314 	VkImageCreateInfo					makeCreateImageInfo				(const VkFormat					format,
   1315 																		 const ImageType				type,
   1316 																		 const UVec3&					size,
   1317 																		 const VkImageUsageFlags		usageFlags,
   1318 																		 const VkImageCreateFlags*		createFlags,
   1319 																		 const deUint32					levels,
   1320 																		 const deUint32					layers);
   1321 	VkDeviceSize						getCompressedImageData			(const VkFormat					format,
   1322 																		 const UVec3&					size,
   1323 																		 std::vector<deUint8>&			data,
   1324 																		 const deUint32					layer,
   1325 																		 const deUint32					level);
   1326 	VkDeviceSize						getUncompressedImageData		(const VkFormat					format,
   1327 																		 const UVec3&					size,
   1328 																		 std::vector<deUint8>&			data,
   1329 																		 const deUint32					layer,
   1330 																		 const deUint32					level);
   1331 	virtual void						prepareData						();
   1332 	virtual void						prepareVertexBuffer				();
   1333 	virtual void						transcodeRead					();
   1334 	virtual void						transcodeWrite					();
   1335 	bool								verifyDecompression				(const std::vector<deUint8>&	refCompressedData,
   1336 																		 const de::MovePtr<Image>&		resCompressedImage,
   1337 																		 const deUint32					layer,
   1338 																		 const deUint32					level,
   1339 																		 const UVec3&					mipmapDims);
   1340 
   1341 	typedef std::vector<deUint8>		RawDataVector;
   1342 	typedef SharedPtr<RawDataVector>	RawDataPtr;
   1343 	typedef std::vector<RawDataPtr>		LevelData;
   1344 	typedef std::vector<LevelData>		FullImageData;
   1345 
   1346 	FullImageData						m_srcData;
   1347 	FullImageData						m_dstData;
   1348 
   1349 	typedef SharedPtr<Image>			ImagePtr;
   1350 	typedef std::vector<ImagePtr>		LevelImages;
   1351 	typedef std::vector<LevelImages>	ImagesArray;
   1352 
   1353 	ImagesArray							m_uncompressedImages;
   1354 	MovePtr<Image>						m_compressedImage;
   1355 
   1356 	VkImageViewUsageCreateInfo			m_imageViewUsageKHR;
   1357 	VkImageViewUsageCreateInfo*			m_srcImageViewUsageKHR;
   1358 	VkImageViewUsageCreateInfo*			m_dstImageViewUsageKHR;
   1359 	std::vector<tcu::UVec3>				m_compressedImageResVec;
   1360 	std::vector<tcu::UVec3>				m_uncompressedImageResVec;
   1361 	VkFormat							m_srcFormat;
   1362 	VkFormat							m_dstFormat;
   1363 	VkImageUsageFlags					m_srcImageUsageFlags;
   1364 	VkImageUsageFlags					m_dstImageUsageFlags;
   1365 	std::vector<tcu::UVec3>				m_srcImageResolutions;
   1366 	std::vector<tcu::UVec3>				m_dstImageResolutions;
   1367 
   1368 	MovePtr<Buffer>						m_vertexBuffer;
   1369 	deUint32							m_vertexCount;
   1370 	VkDeviceSize						m_vertexBufferOffset;
   1371 };
   1372 
   1373 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
   1374 	: BasicTranscodingTestInstance(context, parameters)
   1375 	, m_srcData()
   1376 	, m_dstData()
   1377 	, m_uncompressedImages()
   1378 	, m_compressedImage()
   1379 	, m_imageViewUsageKHR()
   1380 	, m_srcImageViewUsageKHR()
   1381 	, m_dstImageViewUsageKHR()
   1382 	, m_compressedImageResVec()
   1383 	, m_uncompressedImageResVec()
   1384 	, m_srcFormat()
   1385 	, m_dstFormat()
   1386 	, m_srcImageUsageFlags()
   1387 	, m_dstImageUsageFlags()
   1388 	, m_srcImageResolutions()
   1389 	, m_dstImageResolutions()
   1390 	, m_vertexBuffer()
   1391 	, m_vertexCount(0u)
   1392 	, m_vertexBufferOffset(0ull)
   1393 {
   1394 }
   1395 
   1396 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
   1397 {
   1398 	prepareData();
   1399 	prepareVertexBuffer();
   1400 
   1401 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   1402 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   1403 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
   1404 
   1405 	if (isWriteToCompressedOperation())
   1406 		transcodeWrite();
   1407 	else
   1408 		transcodeRead();
   1409 
   1410 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   1411 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   1412 			if (isWriteToCompressedOperation())
   1413 			{
   1414 				if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
   1415 					return TestStatus::fail("Images difference detected");
   1416 			}
   1417 			else
   1418 			{
   1419 				if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
   1420 					return TestStatus::fail("Images difference detected");
   1421 			}
   1422 
   1423 	return TestStatus::pass("Pass");
   1424 }
   1425 
   1426 void GraphicsAttachmentsTestInstance::prepareData ()
   1427 {
   1428 	VkImageViewUsageCreateInfo*	imageViewUsageKHRNull	= (VkImageViewUsageCreateInfo*)DE_NULL;
   1429 
   1430 	m_imageViewUsageKHR			= makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
   1431 
   1432 	m_srcImageViewUsageKHR		= isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
   1433 	m_dstImageViewUsageKHR		= isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
   1434 
   1435 	m_srcFormat					= isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
   1436 	m_dstFormat					= isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
   1437 
   1438 	m_srcImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
   1439 	m_dstImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
   1440 
   1441 	m_compressedImageResVec		= getMipLevelSizes(getLayerDims());
   1442 	m_uncompressedImageResVec	= getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
   1443 
   1444 	m_srcImageResolutions		= isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
   1445 	m_dstImageResolutions		= isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
   1446 
   1447 	m_srcData.resize(getLevelCount());
   1448 	m_dstData.resize(getLevelCount());
   1449 	m_uncompressedImages.resize(getLevelCount());
   1450 
   1451 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   1452 	{
   1453 		m_srcData[levelNdx].resize(getLayerCount());
   1454 		m_dstData[levelNdx].resize(getLayerCount());
   1455 		m_uncompressedImages[levelNdx].resize(getLayerCount());
   1456 
   1457 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   1458 		{
   1459 			m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
   1460 			m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
   1461 
   1462 			if (isWriteToCompressedOperation())
   1463 			{
   1464 				getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
   1465 
   1466 				m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
   1467 			}
   1468 			else
   1469 			{
   1470 				getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
   1471 
   1472 				m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
   1473 			}
   1474 
   1475 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
   1476 		}
   1477 	}
   1478 }
   1479 
   1480 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
   1481 {
   1482 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
   1483 	const VkDevice					device					= m_context.getDevice();
   1484 	Allocator&						allocator				= m_context.getDefaultAllocator();
   1485 
   1486 	const std::vector<tcu::Vec4>	vertexArray				= createFullscreenQuad();
   1487 	const size_t					vertexBufferSizeInBytes	= vertexArray.size() * sizeof(vertexArray[0]);
   1488 
   1489 	m_vertexCount	= static_cast<deUint32>(vertexArray.size());
   1490 	m_vertexBuffer	= MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
   1491 
   1492 	// Upload vertex data
   1493 	const Allocation&	vertexBufferAlloc	= m_vertexBuffer->getAllocation();
   1494 	deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
   1495 	flushMappedMemoryRange(vk, device, vertexBufferAlloc.getMemory(), vertexBufferAlloc.getOffset(), vertexBufferSizeInBytes);
   1496 }
   1497 
   1498 void GraphicsAttachmentsTestInstance::transcodeRead ()
   1499 {
   1500 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
   1501 	const VkDevice						device					= m_context.getDevice();
   1502 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
   1503 	const VkQueue						queue					= m_context.getUniversalQueue();
   1504 	Allocator&							allocator				= m_context.getDefaultAllocator();
   1505 
   1506 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
   1507 
   1508 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
   1509 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
   1510 
   1511 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
   1512 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
   1513 
   1514 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
   1515 
   1516 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
   1517 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
   1518 																	.build(vk, device));
   1519 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
   1520 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
   1521 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
   1522 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
   1523 
   1524 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
   1525 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
   1526 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
   1527 
   1528 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
   1529 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
   1530 
   1531 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   1532 	{
   1533 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
   1534 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
   1535 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
   1536 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
   1537 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
   1538 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
   1539 
   1540 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
   1541 
   1542 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
   1543 		const MovePtr<Buffer>		srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
   1544 
   1545 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
   1546 		MovePtr<Buffer>				dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
   1547 
   1548 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
   1549 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
   1550 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
   1551 
   1552 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   1553 		{
   1554 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
   1555 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
   1556 
   1557 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
   1558 
   1559 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
   1560 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
   1561 
   1562 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
   1563 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
   1564 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
   1565 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
   1566 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
   1567 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
   1568 
   1569 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
   1570 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
   1571 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
   1572 
   1573 			// Upload source image data
   1574 			const Allocation& alloc = srcImageBuffer->getAllocation();
   1575 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
   1576 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
   1577 
   1578 			beginCommandBuffer(vk, *cmdBuffer);
   1579 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
   1580 
   1581 			// Copy buffer to image
   1582 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
   1583 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
   1584 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
   1585 
   1586 			// Define destination image layout
   1587 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
   1588 
   1589 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
   1590 
   1591 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
   1592 			DescriptorSetUpdateBuilder()
   1593 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
   1594 				.update(vk, device);
   1595 
   1596 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   1597 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
   1598 
   1599 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
   1600 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
   1601 
   1602 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
   1603 
   1604 			vk.cmdEndRenderPass(*cmdBuffer);
   1605 
   1606 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
   1607 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1608 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   1609 				dstImage->get(), dstSubresourceRange);
   1610 
   1611 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
   1612 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1613 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
   1614 
   1615 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
   1616 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
   1617 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   1618 
   1619 			endCommandBuffer(vk, *cmdBuffer);
   1620 
   1621 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
   1622 
   1623 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
   1624 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
   1625 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
   1626 		}
   1627 	}
   1628 
   1629 	m_compressedImage = srcImage;
   1630 }
   1631 
   1632 void GraphicsAttachmentsTestInstance::transcodeWrite ()
   1633 {
   1634 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
   1635 	const VkDevice						device					= m_context.getDevice();
   1636 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
   1637 	const VkQueue						queue					= m_context.getUniversalQueue();
   1638 	Allocator&							allocator				= m_context.getDefaultAllocator();
   1639 
   1640 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
   1641 
   1642 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
   1643 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
   1644 
   1645 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
   1646 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
   1647 
   1648 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
   1649 
   1650 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
   1651 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
   1652 																	.build(vk, device));
   1653 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
   1654 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
   1655 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
   1656 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
   1657 
   1658 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
   1659 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
   1660 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
   1661 
   1662 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
   1663 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
   1664 
   1665 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   1666 	{
   1667 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
   1668 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
   1669 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
   1670 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
   1671 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
   1672 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
   1673 
   1674 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
   1675 
   1676 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
   1677 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
   1678 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
   1679 
   1680 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   1681 		{
   1682 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
   1683 			const MovePtr<Buffer>			srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
   1684 
   1685 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
   1686 			MovePtr<Buffer>					dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
   1687 
   1688 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
   1689 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
   1690 
   1691 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
   1692 
   1693 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
   1694 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
   1695 
   1696 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
   1697 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
   1698 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
   1699 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
   1700 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
   1701 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
   1702 
   1703 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
   1704 			const VkExtent2D				framebufferSize			(renderSize);
   1705 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
   1706 
   1707 			// Upload source image data
   1708 			const Allocation& alloc = srcImageBuffer->getAllocation();
   1709 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
   1710 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
   1711 
   1712 			beginCommandBuffer(vk, *cmdBuffer);
   1713 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
   1714 
   1715 			// Copy buffer to image
   1716 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
   1717 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
   1718 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
   1719 
   1720 			// Define destination image layout
   1721 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
   1722 
   1723 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
   1724 
   1725 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
   1726 			DescriptorSetUpdateBuilder()
   1727 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
   1728 				.update(vk, device);
   1729 
   1730 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   1731 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
   1732 
   1733 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
   1734 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
   1735 
   1736 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
   1737 
   1738 			vk.cmdEndRenderPass(*cmdBuffer);
   1739 
   1740 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
   1741 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1742 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   1743 				dstImage->get(), dstSubresourceRange);
   1744 
   1745 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
   1746 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1747 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
   1748 
   1749 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
   1750 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
   1751 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   1752 
   1753 			endCommandBuffer(vk, *cmdBuffer);
   1754 
   1755 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
   1756 
   1757 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
   1758 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
   1759 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
   1760 		}
   1761 	}
   1762 
   1763 	m_compressedImage = dstImage;
   1764 }
   1765 
   1766 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
   1767 {
   1768 	return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
   1769 }
   1770 
   1771 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat				format,
   1772 																	    const ImageType				type,
   1773 																	    const UVec3&				size,
   1774 																	    const VkImageUsageFlags		usageFlags,
   1775 																	    const VkImageCreateFlags*	createFlags,
   1776 																	    const deUint32				levels,
   1777 																	    const deUint32				layers)
   1778 {
   1779 	const VkImageType			imageType				= mapImageType(type);
   1780 	const VkImageCreateFlags	imageCreateFlagsBase	= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
   1781 	const VkImageCreateFlags	imageCreateFlagsAddOn	= isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
   1782 	const VkImageCreateFlags	imageCreateFlags		= (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
   1783 
   1784 	const VkImageCreateInfo createImageInfo =
   1785 	{
   1786 		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,			// VkStructureType			sType;
   1787 		DE_NULL,										// const void*				pNext;
   1788 		imageCreateFlags,								// VkImageCreateFlags		flags;
   1789 		imageType,										// VkImageType				imageType;
   1790 		format,											// VkFormat					format;
   1791 		makeExtent3D(getLayerSize(type, size)),			// VkExtent3D				extent;
   1792 		levels,											// deUint32					mipLevels;
   1793 		layers,											// deUint32					arrayLayers;
   1794 		VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits	samples;
   1795 		VK_IMAGE_TILING_OPTIMAL,						// VkImageTiling			tiling;
   1796 		usageFlags,										// VkImageUsageFlags		usage;
   1797 		VK_SHARING_MODE_EXCLUSIVE,						// VkSharingMode			sharingMode;
   1798 		0u,												// deUint32					queueFamilyIndexCount;
   1799 		DE_NULL,										// const deUint32*			pQueueFamilyIndices;
   1800 		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			initialLayout;
   1801 	};
   1802 
   1803 	return createImageInfo;
   1804 }
   1805 
   1806 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat			format,
   1807 																	  const UVec3&				size,
   1808 																	  std::vector<deUint8>&		data,
   1809 																	  const deUint32			layer,
   1810 																	  const deUint32			level)
   1811 {
   1812 	VkDeviceSize	sizeBytes	= getCompressedImageSizeInBytes(format, size);
   1813 
   1814 	data.resize((size_t)sizeBytes);
   1815 	generateData(&data[0], data.size(), format, layer, level);
   1816 
   1817 	return sizeBytes;
   1818 }
   1819 
   1820 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat			format,
   1821 																		const UVec3&			size,
   1822 																		std::vector<deUint8>&	data,
   1823 																		const deUint32			layer,
   1824 																		const deUint32			level)
   1825 {
   1826 	tcu::IVec3				sizeAsIVec3	= tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
   1827 	VkDeviceSize			sizeBytes	= getImageSizeBytes(sizeAsIVec3, format);
   1828 
   1829 	data.resize((size_t)sizeBytes);
   1830 	generateData(&data[0], data.size(), format, layer, level);
   1831 
   1832 	return sizeBytes;
   1833 }
   1834 
   1835 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>&	refCompressedData,
   1836 														   const de::MovePtr<Image>&	resCompressedImage,
   1837 														   const deUint32				level,
   1838 														   const deUint32				layer,
   1839 														   const UVec3&					mipmapDims)
   1840 {
   1841 	const DeviceInterface&				vk							= m_context.getDeviceInterface();
   1842 	const VkDevice						device						= m_context.getDevice();
   1843 	const deUint32						queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
   1844 	const VkQueue						queue						= m_context.getUniversalQueue();
   1845 	Allocator&							allocator					= m_context.getDefaultAllocator();
   1846 
   1847 	const bool							layoutShaderReadOnly		= (layer % 2u) == 1;
   1848 	const UVec3							mipmapDimsBlocked			= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
   1849 
   1850 	const VkImageSubresourceRange		subresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
   1851 	const VkImageSubresourceRange		resSubresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
   1852 
   1853 	const VkDeviceSize					dstBufferSize				= getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
   1854 	const VkImageUsageFlags				refSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
   1855 
   1856 	const VkBufferCreateInfo			refSrcImageBufferInfo		(makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
   1857 	const MovePtr<Buffer>				refSrcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
   1858 
   1859 	const VkImageCreateFlags			refSrcImageCreateFlags		= 0;
   1860 	const VkImageCreateInfo				refSrcImageCreateInfo		= makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
   1861 	const MovePtr<Image>				refSrcImage					(new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
   1862 	Move<VkImageView>					refSrcImageView				(makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
   1863 
   1864 	const VkImageUsageFlags				resSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
   1865 	const VkImageViewUsageCreateInfo	resSrcImageViewUsageKHR		= makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
   1866 	Move<VkImageView>					resSrcImageView				(makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
   1867 
   1868 	const VkImageCreateFlags			refDstImageCreateFlags		= 0;
   1869 	const VkImageUsageFlags				refDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
   1870 	const VkImageCreateInfo				refDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
   1871 	const MovePtr<Image>				refDstImage					(new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
   1872 	const Move<VkImageView>				refDstImageView				(makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
   1873 	const VkImageMemoryBarrier			refDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
   1874 	const VkBufferCreateInfo			refDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
   1875 	const MovePtr<Buffer>				refDstBuffer				= MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
   1876 
   1877 	const VkImageCreateFlags			resDstImageCreateFlags		= 0;
   1878 	const VkImageUsageFlags				resDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
   1879 	const VkImageCreateInfo				resDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
   1880 	const MovePtr<Image>				resDstImage					(new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
   1881 	const Move<VkImageView>				resDstImageView				(makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
   1882 	const VkImageMemoryBarrier			resDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
   1883 	const VkBufferCreateInfo			resDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
   1884 	const MovePtr<Buffer>				resDstBuffer				= MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
   1885 
   1886 	const Unique<VkShaderModule>		vertShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
   1887 	const Unique<VkShaderModule>		fragShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
   1888 
   1889 	const Unique<VkRenderPass>			renderPass					(makeRenderPass(vk, device));
   1890 
   1891 	const Move<VkDescriptorSetLayout>	descriptorSetLayout			(DescriptorSetLayoutBuilder()
   1892 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
   1893 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
   1894 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
   1895 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
   1896 																		.build(vk, device));
   1897 	const Move<VkDescriptorPool>		descriptorPool				(DescriptorPoolBuilder()
   1898 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
   1899 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
   1900 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
   1901 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
   1902 																		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
   1903 	const Move<VkDescriptorSet>			descriptorSet				(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
   1904 	const VkSamplerCreateInfo			refSrcSamplerInfo			(makeSamplerCreateInfo());
   1905 	const Move<VkSampler>				refSrcSampler				= vk::createSampler(vk, device, &refSrcSamplerInfo);
   1906 	const VkSamplerCreateInfo			resSrcSamplerInfo			(makeSamplerCreateInfo());
   1907 	const Move<VkSampler>				resSrcSampler				= vk::createSampler(vk, device, &resSrcSamplerInfo);
   1908 	const VkDescriptorImageInfo			descriptorRefSrcImage		(makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
   1909 	const VkDescriptorImageInfo			descriptorResSrcImage		(makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
   1910 	const VkDescriptorImageInfo			descriptorRefDstImage		(makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
   1911 	const VkDescriptorImageInfo			descriptorResDstImage		(makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
   1912 
   1913 	const VkExtent2D					renderSize					(makeExtent2D(mipmapDims.x(), mipmapDims.y()));
   1914 	const Unique<VkPipelineLayout>		pipelineLayout				(makePipelineLayout(vk, device, *descriptorSetLayout));
   1915 	const Unique<VkPipeline>			pipeline					(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
   1916 	const Unique<VkCommandPool>			cmdPool						(createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
   1917 	const Unique<VkCommandBuffer>		cmdBuffer					(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
   1918 
   1919 	const VkBufferImageCopy				copyBufferToImageRegion		= makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
   1920 	const VkBufferImageCopy				copyRegion					= makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
   1921 	const VkBufferMemoryBarrier			refSrcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
   1922 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
   1923 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
   1924 	const VkImageMemoryBarrier			resCompressedImageBarrier	= makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
   1925 
   1926 	const Move<VkFramebuffer>			framebuffer					(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
   1927 
   1928 	// Upload source image data
   1929 	{
   1930 		const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
   1931 		deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
   1932 		flushMappedMemoryRange(vk, device, refSrcImageBufferAlloc.getMemory(), refSrcImageBufferAlloc.getOffset(), refCompressedData.size());
   1933 	}
   1934 
   1935 	beginCommandBuffer(vk, *cmdBuffer);
   1936 	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
   1937 
   1938 	// Copy buffer to image
   1939 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
   1940 	vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, &copyBufferToImageRegion);
   1941 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
   1942 
   1943 	// Make reference and result images readable
   1944 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
   1945 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
   1946 	{
   1947 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
   1948 	}
   1949 
   1950 	beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
   1951 	{
   1952 		DescriptorSetUpdateBuilder()
   1953 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
   1954 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
   1955 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
   1956 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
   1957 			.update(vk, device);
   1958 
   1959 		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   1960 		vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
   1961 		vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
   1962 	}
   1963 	vk.cmdEndRenderPass(*cmdBuffer);
   1964 
   1965 	// Decompress reference image
   1966 	{
   1967 		const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
   1968 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1969 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   1970 			refDstImage->get(), subresourceRange);
   1971 
   1972 		const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
   1973 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1974 			refDstBuffer->get(), 0ull, dstBufferSize);
   1975 
   1976 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
   1977 		vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, &copyRegion);
   1978 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   1979 	}
   1980 
   1981 	// Decompress result image
   1982 	{
   1983 		const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
   1984 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   1985 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   1986 			resDstImage->get(), subresourceRange);
   1987 
   1988 		const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
   1989 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   1990 			resDstBuffer->get(), 0ull, dstBufferSize);
   1991 
   1992 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
   1993 		vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, &copyRegion);
   1994 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   1995 	}
   1996 
   1997 	endCommandBuffer(vk, *cmdBuffer);
   1998 
   1999 	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
   2000 
   2001 	// Compare decompressed pixel data in reference and result images
   2002 	{
   2003 		const Allocation&	refDstBufferAlloc	= refDstBuffer->getAllocation();
   2004 		invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
   2005 
   2006 		const Allocation&	resDstBufferAlloc	= resDstBuffer->getAllocation();
   2007 		invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
   2008 
   2009 		if (deMemCmp(refDstBufferAlloc.getHostPtr(), resDstBufferAlloc.getHostPtr(), (size_t)dstBufferSize) != 0)
   2010 		{
   2011 			// Do fuzzy to log error mask
   2012 			invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
   2013 			invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
   2014 
   2015 			tcu::ConstPixelBufferAccess	resPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
   2016 			tcu::ConstPixelBufferAccess	refPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
   2017 
   2018 			string	comment	= string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
   2019 
   2020 			if (isWriteToCompressedOperation())
   2021 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
   2022 			else
   2023 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
   2024 
   2025 			return false;
   2026 		}
   2027 	}
   2028 
   2029 	return true;
   2030 }
   2031 
   2032 
   2033 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
   2034 {
   2035 public:
   2036 						GraphicsTextureTestInstance		(Context& context, const TestParameters& parameters);
   2037 
   2038 protected:
   2039 	virtual bool		isWriteToCompressedOperation	();
   2040 	virtual void		transcodeRead					();
   2041 	virtual void		transcodeWrite					();
   2042 };
   2043 
   2044 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
   2045 	: GraphicsAttachmentsTestInstance(context, parameters)
   2046 {
   2047 }
   2048 
   2049 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
   2050 {
   2051 	return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
   2052 }
   2053 
   2054 void GraphicsTextureTestInstance::transcodeRead ()
   2055 {
   2056 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
   2057 	const VkDevice						device					= m_context.getDevice();
   2058 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
   2059 	const VkQueue						queue					= m_context.getUniversalQueue();
   2060 	Allocator&							allocator				= m_context.getDefaultAllocator();
   2061 
   2062 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
   2063 
   2064 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
   2065 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
   2066 
   2067 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
   2068 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
   2069 
   2070 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device));
   2071 
   2072 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
   2073 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
   2074 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
   2075 																	.build(vk, device));
   2076 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
   2077 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
   2078 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
   2079 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
   2080 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
   2081 
   2082 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
   2083 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
   2084 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
   2085 
   2086 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
   2087 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
   2088 
   2089 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   2090 	{
   2091 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
   2092 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
   2093 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
   2094 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
   2095 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
   2096 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
   2097 
   2098 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
   2099 
   2100 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
   2101 		const MovePtr<Buffer>		srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
   2102 
   2103 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
   2104 		MovePtr<Buffer>				dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
   2105 
   2106 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
   2107 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
   2108 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
   2109 
   2110 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   2111 		{
   2112 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
   2113 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
   2114 
   2115 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
   2116 
   2117 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
   2118 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
   2119 
   2120 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
   2121 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
   2122 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
   2123 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
   2124 
   2125 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
   2126 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
   2127 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
   2128 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
   2129 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
   2130 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
   2131 
   2132 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
   2133 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
   2134 
   2135 			// Upload source image data
   2136 			const Allocation& alloc = srcImageBuffer->getAllocation();
   2137 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
   2138 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
   2139 
   2140 			beginCommandBuffer(vk, *cmdBuffer);
   2141 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
   2142 
   2143 			// Copy buffer to image
   2144 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
   2145 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
   2146 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
   2147 
   2148 			// Define destination image layout
   2149 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
   2150 
   2151 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
   2152 
   2153 			DescriptorSetUpdateBuilder()
   2154 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
   2155 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
   2156 				.update(vk, device);
   2157 
   2158 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   2159 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
   2160 
   2161 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
   2162 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
   2163 
   2164 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
   2165 
   2166 			vk.cmdEndRenderPass(*cmdBuffer);
   2167 
   2168 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
   2169 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   2170 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   2171 				dstImage->get(), dstSubresourceRange);
   2172 
   2173 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
   2174 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   2175 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
   2176 
   2177 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
   2178 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
   2179 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   2180 
   2181 			endCommandBuffer(vk, *cmdBuffer);
   2182 
   2183 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
   2184 
   2185 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
   2186 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
   2187 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
   2188 		}
   2189 	}
   2190 
   2191 	m_compressedImage = srcImage;
   2192 }
   2193 
   2194 void GraphicsTextureTestInstance::transcodeWrite ()
   2195 {
   2196 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
   2197 	const VkDevice						device					= m_context.getDevice();
   2198 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
   2199 	const VkQueue						queue					= m_context.getUniversalQueue();
   2200 	Allocator&							allocator				= m_context.getDefaultAllocator();
   2201 
   2202 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
   2203 
   2204 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
   2205 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
   2206 
   2207 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
   2208 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
   2209 
   2210 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device));
   2211 
   2212 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
   2213 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
   2214 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
   2215 																	.build(vk, device));
   2216 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
   2217 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
   2218 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
   2219 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
   2220 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
   2221 
   2222 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
   2223 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
   2224 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
   2225 
   2226 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
   2227 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
   2228 
   2229 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
   2230 	{
   2231 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
   2232 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
   2233 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
   2234 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
   2235 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
   2236 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
   2237 
   2238 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
   2239 
   2240 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
   2241 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
   2242 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
   2243 
   2244 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
   2245 		{
   2246 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
   2247 			const MovePtr<Buffer>			srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
   2248 
   2249 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
   2250 			MovePtr<Buffer>					dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
   2251 
   2252 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
   2253 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
   2254 
   2255 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
   2256 
   2257 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
   2258 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
   2259 
   2260 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
   2261 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
   2262 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
   2263 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
   2264 
   2265 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
   2266 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
   2267 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
   2268 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
   2269 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
   2270 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
   2271 
   2272 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
   2273 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
   2274 
   2275 			// Upload source image data
   2276 			const Allocation& alloc = srcImageBuffer->getAllocation();
   2277 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
   2278 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
   2279 
   2280 			beginCommandBuffer(vk, *cmdBuffer);
   2281 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
   2282 
   2283 			// Copy buffer to image
   2284 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
   2285 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
   2286 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
   2287 
   2288 			// Define destination image layout
   2289 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
   2290 
   2291 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
   2292 
   2293 			DescriptorSetUpdateBuilder()
   2294 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
   2295 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
   2296 				.update(vk, device);
   2297 
   2298 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
   2299 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
   2300 
   2301 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
   2302 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
   2303 
   2304 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
   2305 
   2306 			vk.cmdEndRenderPass(*cmdBuffer);
   2307 
   2308 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
   2309 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
   2310 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
   2311 				dstImage->get(), dstSubresourceRange);
   2312 
   2313 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
   2314 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
   2315 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
   2316 
   2317 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
   2318 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
   2319 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
   2320 
   2321 			endCommandBuffer(vk, *cmdBuffer);
   2322 
   2323 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
   2324 
   2325 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
   2326 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
   2327 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
   2328 		}
   2329 	}
   2330 
   2331 	m_compressedImage = dstImage;
   2332 }
   2333 
   2334 class TexelViewCompatibleCase : public TestCase
   2335 {
   2336 public:
   2337 							TexelViewCompatibleCase		(TestContext&				testCtx,
   2338 														 const std::string&			name,
   2339 														 const std::string&			desc,
   2340 														 const TestParameters&		parameters);
   2341 	void					initPrograms				(SourceCollections&			programCollection) const;
   2342 	TestInstance*			createInstance				(Context&					context) const;
   2343 protected:
   2344 	const TestParameters	m_parameters;
   2345 };
   2346 
   2347 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
   2348 	: TestCase				(testCtx, name, desc)
   2349 	, m_parameters			(parameters)
   2350 {
   2351 }
   2352 
   2353 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections&	programCollection) const
   2354 {
   2355 	DE_ASSERT(m_parameters.size.x() > 0);
   2356 	DE_ASSERT(m_parameters.size.y() > 0);
   2357 
   2358 	switch (m_parameters.shader)
   2359 	{
   2360 		case SHADER_TYPE_COMPUTE:
   2361 		{
   2362 			const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
   2363 			const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
   2364 			std::ostringstream	src;
   2365 			std::ostringstream	src_decompress;
   2366 
   2367 			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
   2368 				<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
   2369 			src_decompress << src.str();
   2370 
   2371 			switch(m_parameters.operation)
   2372 			{
   2373 				case OPERATION_IMAGE_LOAD:
   2374 				{
   2375 					src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
   2376 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
   2377 						<< "void main (void)\n"
   2378 						<< "{\n"
   2379 						<< "    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
   2380 						<< "    imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
   2381 						<< "}\n";
   2382 
   2383 					break;
   2384 				}
   2385 
   2386 				case OPERATION_TEXEL_FETCH:
   2387 				{
   2388 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
   2389 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
   2390 						<< "void main (void)\n"
   2391 						<< "{\n"
   2392 						<< "    ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
   2393 						<< "    imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n"
   2394 						<< "}\n";
   2395 
   2396 					break;
   2397 				}
   2398 
   2399 				case OPERATION_TEXTURE:
   2400 				{
   2401 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
   2402 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
   2403 						<< "void main (void)\n"
   2404 						<< "{\n"
   2405 						<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
   2406 						<< "    const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
   2407 						<< "    const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
   2408 						<< "    imageStore(u_image1, pos, texture(u_image0, coord));\n"
   2409 						<< "}\n";
   2410 
   2411 					break;
   2412 				}
   2413 
   2414 				case OPERATION_IMAGE_STORE:
   2415 				{
   2416 					src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<"           u_image0;\n"
   2417 						<< "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<"  u_image1;\n"
   2418 						<< "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
   2419 						<< "void main (void)\n"
   2420 						<< "{\n"
   2421 						<< "    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
   2422 						<< "    imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
   2423 						<< "    imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
   2424 						<< "}\n";
   2425 
   2426 					break;
   2427 				}
   2428 
   2429 				default:
   2430 					DE_ASSERT(false);
   2431 			}
   2432 
   2433 			src_decompress	<< "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
   2434 							<< "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
   2435 							<< "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
   2436 							<< "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
   2437 							<< "void main (void)\n"
   2438 							<< "{\n"
   2439 							<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
   2440 							<< "    const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
   2441 							<< "    const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n"
   2442 							<< "    imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
   2443 							<< "    imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
   2444 							<< "}\n";
   2445 			programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
   2446 			programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
   2447 
   2448 			break;
   2449 		}
   2450 
   2451 		case SHADER_TYPE_FRAGMENT:
   2452 		{
   2453 			ImageType	imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
   2454 
   2455 			// Vertex shader
   2456 			{
   2457 				std::ostringstream src;
   2458 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
   2459 					<< "layout(location = 0) in vec4 v_in_position;\n"
   2460 					<< "\n"
   2461 					<< "void main (void)\n"
   2462 					<< "{\n"
   2463 					<< "    gl_Position = v_in_position;\n"
   2464 					<< "}\n";
   2465 
   2466 				programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
   2467 			}
   2468 
   2469 			// Fragment shader
   2470 			{
   2471 				switch(m_parameters.operation)
   2472 				{
   2473 					case OPERATION_ATTACHMENT_READ:
   2474 					case OPERATION_ATTACHMENT_WRITE:
   2475 					{
   2476 						std::ostringstream	src;
   2477 
   2478 						const std::string	dstTypeStr	= getGlslFormatType(m_parameters.formatUncompressed);
   2479 						const std::string	srcTypeStr	= getGlslInputFormatType(m_parameters.formatUncompressed);
   2480 
   2481 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
   2482 							<< "precision highp int;\n"
   2483 							<< "precision highp float;\n"
   2484 							<< "\n"
   2485 							<< "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
   2486 							<< "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
   2487 							<< "\n"
   2488 							<< "void main (void)\n"
   2489 							<< "{\n"
   2490 							<< "    o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
   2491 							<< "}\n";
   2492 
   2493 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
   2494 
   2495 						break;
   2496 					}
   2497 
   2498 					case OPERATION_TEXTURE_READ:
   2499 					case OPERATION_TEXTURE_WRITE:
   2500 					{
   2501 						std::ostringstream	src;
   2502 
   2503 						const std::string	srcSamplerTypeStr		= getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
   2504 						const std::string	dstImageTypeStr			= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
   2505 						const std::string	dstFormatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
   2506 
   2507 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
   2508 							<< "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
   2509 							<< "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
   2510 							<< "\n"
   2511 							<< "void main (void)\n"
   2512 							<< "{\n"
   2513 							<< "    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
   2514 							<< "    const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
   2515 							<< "    const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n"
   2516 							<< "    imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
   2517 							<< "}\n";
   2518 
   2519 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
   2520 
   2521 						break;
   2522 					}
   2523 
   2524 					default:
   2525 						DE_ASSERT(false);
   2526 				}
   2527 			}
   2528 
   2529 			// Verification fragment shader
   2530 			{
   2531 				std::ostringstream	src;
   2532 
   2533 				const std::string	samplerType			= getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
   2534 				const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
   2535 				const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
   2536 
   2537 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
   2538 					<< "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
   2539 					<< "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
   2540 					<< "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
   2541 					<< "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
   2542 					<< "\n"
   2543 					<< "void main (void)\n"
   2544 					<< "{\n"
   2545 					<< "    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
   2546 					<< "\n"
   2547 					<< "    const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
   2548 					<< "    const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n"
   2549 					<< "    imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
   2550 					<< "\n"
   2551 					<< "    const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
   2552 					<< "    const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n"
   2553 					<< "    imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
   2554 					<< "}\n";
   2555 
   2556 				programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
   2557 			}
   2558 
   2559 			break;
   2560 		}
   2561 
   2562 		default:
   2563 			DE_ASSERT(false);
   2564 	}
   2565 }
   2566 
   2567 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
   2568 {
   2569 	const VkPhysicalDevice			physicalDevice			= context.getPhysicalDevice();
   2570 	const InstanceInterface&		vk						= context.getInstanceInterface();
   2571 
   2572 	if (!m_parameters.useMipmaps)
   2573 	{
   2574 		DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size)     == 1u);
   2575 		DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).z() == 1u);
   2576 	}
   2577 
   2578 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() >  0u);
   2579 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() >  0u);
   2580 
   2581 	if (!isDeviceExtensionSupported(context.getUsedApiVersion(), context.getDeviceExtensions(), "VK_KHR_maintenance2"))
   2582 		TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
   2583 
   2584 	{
   2585 		VkImageFormatProperties imageFormatProperties;
   2586 
   2587 		if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
   2588 												mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
   2589 												m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
   2590 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
   2591 
   2592 		if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
   2593 												mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
   2594 												VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
   2595 												VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
   2596 												&imageFormatProperties))
   2597 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
   2598 	}
   2599 
   2600 	{
   2601 		const VkPhysicalDeviceFeatures	physicalDeviceFeatures	= getPhysicalDeviceFeatures (vk, physicalDevice);
   2602 
   2603 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
   2604 			!physicalDeviceFeatures.textureCompressionBC)
   2605 			TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
   2606 
   2607 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
   2608 			!physicalDeviceFeatures.textureCompressionETC2)
   2609 			TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
   2610 
   2611 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK) &&
   2612 			!physicalDeviceFeatures.textureCompressionASTC_LDR)
   2613 			TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
   2614 
   2615 		if ((m_parameters.uncompressedImageUsage & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) &&
   2616 			isStorageImageExtendedFormat(m_parameters.formatUncompressed) &&
   2617 			!physicalDeviceFeatures.shaderStorageImageExtendedFormats)
   2618 			TCU_THROW(NotSupportedError, "Storage view format requires shaderStorageImageExtended");
   2619 	}
   2620 
   2621 	switch (m_parameters.shader)
   2622 	{
   2623 		case SHADER_TYPE_COMPUTE:
   2624 		{
   2625 			switch (m_parameters.operation)
   2626 			{
   2627 				case OPERATION_IMAGE_LOAD:
   2628 				case OPERATION_TEXEL_FETCH:
   2629 				case OPERATION_TEXTURE:
   2630 					return new BasicComputeTestInstance(context, m_parameters);
   2631 				case OPERATION_IMAGE_STORE:
   2632 					return new ImageStoreComputeTestInstance(context, m_parameters);
   2633 				default:
   2634 					TCU_THROW(InternalError, "Impossible");
   2635 			}
   2636 		}
   2637 
   2638 		case SHADER_TYPE_FRAGMENT:
   2639 		{
   2640 			switch (m_parameters.operation)
   2641 			{
   2642 				case OPERATION_ATTACHMENT_READ:
   2643 				case OPERATION_ATTACHMENT_WRITE:
   2644 					return new GraphicsAttachmentsTestInstance(context, m_parameters);
   2645 
   2646 				case OPERATION_TEXTURE_READ:
   2647 				case OPERATION_TEXTURE_WRITE:
   2648 					return new GraphicsTextureTestInstance(context, m_parameters);
   2649 
   2650 				default:
   2651 					TCU_THROW(InternalError, "Impossible");
   2652 			}
   2653 		}
   2654 
   2655 		default:
   2656 			TCU_THROW(InternalError, "Impossible");
   2657 	}
   2658 }
   2659 
   2660 } // anonymous ns
   2661 
   2662 static tcu::UVec3 getUnniceResolution(const VkFormat format, const deUint32 layers)
   2663 {
   2664 	const deUint32	unniceMipmapTextureSize[]	= { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
   2665 	const deUint32	baseTextureWidth			= unniceMipmapTextureSize[getBlockWidth(format)];
   2666 	const deUint32	baseTextureHeight			= unniceMipmapTextureSize[getBlockHeight(format)];
   2667 	const deUint32	baseTextureWidthLevels		= deLog2Floor32(baseTextureWidth);
   2668 	const deUint32	baseTextureHeightLevels		= deLog2Floor32(baseTextureHeight);
   2669 	const deUint32	widthMultiplier				= (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
   2670 	const deUint32	heightMultiplier			= (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
   2671 	const deUint32	width						= baseTextureWidth * widthMultiplier;
   2672 	const deUint32	height						= baseTextureHeight * heightMultiplier;
   2673 
   2674 	// Number of levels should be same on both axises
   2675 	DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
   2676 
   2677 	return tcu::UVec3(width, height, layers);
   2678 }
   2679 
   2680 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
   2681 {
   2682 	struct FormatsArray
   2683 	{
   2684 		const VkFormat*	formats;
   2685 		deUint32		count;
   2686 	};
   2687 
   2688 	const bool					mipmapness[]									=
   2689 	{
   2690 		false,
   2691 		true,
   2692 	};
   2693 
   2694 	const std::string			pipelineName[SHADER_TYPE_LAST]					=
   2695 	{
   2696 		"compute",
   2697 		"graphic",
   2698 	};
   2699 
   2700 	const std::string			mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)]	=
   2701 	{
   2702 		"basic",
   2703 		"extended",
   2704 	};
   2705 
   2706 	const std::string			operationName[OPERATION_LAST]					=
   2707 	{
   2708 		"image_load",
   2709 		"texel_fetch",
   2710 		"texture",
   2711 		"image_store",
   2712 		"attachment_read",
   2713 		"attachment_write",
   2714 		"texture_read",
   2715 		"texture_write",
   2716 	};
   2717 
   2718 	const VkImageUsageFlags		baseImageUsageFlagSet							= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
   2719 	const VkImageUsageFlags		compressedImageUsageFlags[OPERATION_LAST]		=
   2720 	{
   2721 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											// "image_load"
   2722 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texel_fetch"
   2723 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture"
   2724 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "image_store"
   2725 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	// "attachment_read"
   2726 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),	// "attachment_write"
   2727 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											// "texture_read"
   2728 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture_write"
   2729 	};
   2730 
   2731 	const VkImageUsageFlags		compressedImageViewUsageFlags[OPERATION_LAST]	=
   2732 	{
   2733 		compressedImageUsageFlags[0],																									//"image_load"
   2734 		compressedImageUsageFlags[1],																									//"texel_fetch"
   2735 		compressedImageUsageFlags[2],																									//"texture"
   2736 		compressedImageUsageFlags[3],																									//"image_store"
   2737 		compressedImageUsageFlags[4],																									//"attachment_read"
   2738 		compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,																//"attachment_write"
   2739 		compressedImageUsageFlags[6],																									//"texture_read"
   2740 		compressedImageUsageFlags[7],																									//"texture_write"
   2741 	};
   2742 
   2743 	const VkImageUsageFlags		uncompressedImageUsageFlags[OPERATION_LAST]		=
   2744 	{
   2745 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											//"image_load"
   2746 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texel_fetch"
   2747 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texture"
   2748 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"image_store"
   2749 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	//"attachment_read"
   2750 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT),									//"attachment_write"
   2751 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),				//"texture_read"
   2752 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											//"texture_write"
   2753 	};
   2754 
   2755 	const VkFormat				compressedFormats64bit[]						=
   2756 	{
   2757 		VK_FORMAT_BC1_RGB_UNORM_BLOCK,
   2758 		VK_FORMAT_BC1_RGB_SRGB_BLOCK,
   2759 		VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
   2760 		VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
   2761 		VK_FORMAT_BC4_UNORM_BLOCK,
   2762 		VK_FORMAT_BC4_SNORM_BLOCK,
   2763 		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
   2764 		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
   2765 		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
   2766 		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
   2767 		VK_FORMAT_EAC_R11_UNORM_BLOCK,
   2768 		VK_FORMAT_EAC_R11_SNORM_BLOCK,
   2769 	};
   2770 
   2771 	const VkFormat				compressedFormats128bit[]						=
   2772 	{
   2773 		VK_FORMAT_BC2_UNORM_BLOCK,
   2774 		VK_FORMAT_BC2_SRGB_BLOCK,
   2775 		VK_FORMAT_BC3_UNORM_BLOCK,
   2776 		VK_FORMAT_BC3_SRGB_BLOCK,
   2777 		VK_FORMAT_BC5_UNORM_BLOCK,
   2778 		VK_FORMAT_BC5_SNORM_BLOCK,
   2779 		VK_FORMAT_BC6H_UFLOAT_BLOCK,
   2780 		VK_FORMAT_BC6H_SFLOAT_BLOCK,
   2781 		VK_FORMAT_BC7_UNORM_BLOCK,
   2782 		VK_FORMAT_BC7_SRGB_BLOCK,
   2783 		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
   2784 		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
   2785 		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
   2786 		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
   2787 		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
   2788 		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
   2789 		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
   2790 		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
   2791 		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
   2792 		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
   2793 		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
   2794 		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
   2795 		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
   2796 		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
   2797 		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
   2798 		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
   2799 		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
   2800 		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
   2801 		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
   2802 		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
   2803 		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
   2804 		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
   2805 		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
   2806 		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
   2807 		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
   2808 		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
   2809 		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
   2810 		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
   2811 		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
   2812 		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
   2813 		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
   2814 		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
   2815 	};
   2816 
   2817 	const VkFormat				uncompressedFormats64bit[]						=
   2818 	{
   2819 		VK_FORMAT_R16G16B16A16_UNORM,
   2820 		VK_FORMAT_R16G16B16A16_SNORM,
   2821 		VK_FORMAT_R16G16B16A16_USCALED,
   2822 		VK_FORMAT_R16G16B16A16_SSCALED,
   2823 		VK_FORMAT_R16G16B16A16_UINT,
   2824 		VK_FORMAT_R16G16B16A16_SINT,
   2825 		VK_FORMAT_R16G16B16A16_SFLOAT,
   2826 		VK_FORMAT_R32G32_UINT,
   2827 		VK_FORMAT_R32G32_SINT,
   2828 		VK_FORMAT_R32G32_SFLOAT,
   2829 		//VK_FORMAT_R64_UINT, remove from the test it couln'd not be use
   2830 		//VK_FORMAT_R64_SINT, remove from the test it couln'd not be use
   2831 		//VK_FORMAT_R64_SFLOAT, remove from the test it couln'd not be use
   2832 	};
   2833 
   2834 	const VkFormat				uncompressedFormats128bit[]						=
   2835 	{
   2836 		VK_FORMAT_R32G32B32A32_UINT,
   2837 		VK_FORMAT_R32G32B32A32_SINT,
   2838 		VK_FORMAT_R32G32B32A32_SFLOAT,
   2839 		//VK_FORMAT_R64G64_UINT, remove from the test it couln'd not be use
   2840 		//VK_FORMAT_R64G64_SINT, remove from the test it couln'd not be use
   2841 		//VK_FORMAT_R64G64_SFLOAT, remove from the test it couln'd not be use
   2842 	};
   2843 
   2844 	const FormatsArray			formatsCompressedSets[]							=
   2845 	{
   2846 		{
   2847 			compressedFormats64bit,
   2848 			DE_LENGTH_OF_ARRAY(compressedFormats64bit)
   2849 		},
   2850 		{
   2851 			compressedFormats128bit,
   2852 			DE_LENGTH_OF_ARRAY(compressedFormats128bit)
   2853 		},
   2854 	};
   2855 
   2856 	const FormatsArray			formatsUncompressedSets[]						=
   2857 	{
   2858 		{
   2859 			uncompressedFormats64bit,
   2860 			DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
   2861 		},
   2862 		{
   2863 			uncompressedFormats128bit,
   2864 			DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
   2865 		},
   2866 	};
   2867 
   2868 	DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
   2869 
   2870 	MovePtr<tcu::TestCaseGroup>	texelViewCompatibleTests							(new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
   2871 
   2872 	for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
   2873 	{
   2874 		MovePtr<tcu::TestCaseGroup>	pipelineTypeGroup	(new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
   2875 
   2876 		for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
   2877 		{
   2878 			const bool mipmapTest = mipmapness[mipmapTestNdx];
   2879 
   2880 			MovePtr<tcu::TestCaseGroup>	mipmapTypeGroup	(new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
   2881 
   2882 			for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
   2883 			{
   2884 				if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
   2885 					continue;
   2886 
   2887 				if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
   2888 					continue;
   2889 
   2890 				MovePtr<tcu::TestCaseGroup>	imageOperationGroup	(new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
   2891 
   2892 				// Iterate through bitness groups (64 bit, 128 bit, etc)
   2893 				for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
   2894 				{
   2895 					for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
   2896 					{
   2897 						const VkFormat				formatCompressed			= formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
   2898 						const std::string			compressedFormatGroupName	= getFormatShortString(formatCompressed);
   2899 						MovePtr<tcu::TestCaseGroup>	compressedFormatGroup		(new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
   2900 
   2901 						for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
   2902 						{
   2903 							const VkFormat			formatUncompressed			= formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
   2904 							const std::string		uncompressedFormatGroupName	= getFormatShortString(formatUncompressed);
   2905 							const TestParameters	parameters					=
   2906 							{
   2907 								static_cast<Operation>(operationNdx),
   2908 								static_cast<ShaderType>(shaderType),
   2909 								mipmapTest ? getUnniceResolution(formatCompressed, 3u) : UVec3(64u, 64u, 1u),
   2910 								IMAGE_TYPE_2D,
   2911 								formatCompressed,
   2912 								formatUncompressed,
   2913 								(operationNdx == OPERATION_IMAGE_STORE) ? 3u : 2u,
   2914 								compressedImageUsageFlags[operationNdx],
   2915 								compressedImageViewUsageFlags[operationNdx],
   2916 								uncompressedImageUsageFlags[operationNdx],
   2917 								mipmapTest,
   2918 								VK_FORMAT_R8G8B8A8_UNORM
   2919 							};
   2920 
   2921 							compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
   2922 						}
   2923 
   2924 						imageOperationGroup->addChild(compressedFormatGroup.release());
   2925 					}
   2926 				}
   2927 
   2928 				mipmapTypeGroup->addChild(imageOperationGroup.release());
   2929 			}
   2930 
   2931 			pipelineTypeGroup->addChild(mipmapTypeGroup.release());
   2932 		}
   2933 
   2934 		texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
   2935 	}
   2936 
   2937 	return texelViewCompatibleTests.release();
   2938 }
   2939 
   2940 } // image
   2941 } // vkt
   2942