Home | History | Annotate | Download | only in vulkan
      1 /*
      2  * Copyright  2016 Red Hat
      3  * based on intel anv code:
      4  * Copyright  2015 Intel Corporation
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a
      7  * copy of this software and associated documentation files (the "Software"),
      8  * to deal in the Software without restriction, including without limitation
      9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
     10  * and/or sell copies of the Software, and to permit persons to whom the
     11  * Software is furnished to do so, subject to the following conditions:
     12  *
     13  * The above copyright notice and this permission notice (including the next
     14  * paragraph) shall be included in all copies or substantial portions of the
     15  * Software.
     16  *
     17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     18  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     19  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
     20  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     21  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
     22  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
     23  * IN THE SOFTWARE.
     24  */
     25 
     26 #include "radv_meta.h"
     27 
     28 #include <fcntl.h>
     29 #include <limits.h>
     30 #include <pwd.h>
     31 #include <sys/stat.h>
     32 
     33 void
     34 radv_meta_save(struct radv_meta_saved_state *state,
     35 	       const struct radv_cmd_buffer *cmd_buffer,
     36 	       uint32_t dynamic_mask)
     37 {
     38 	state->old_pipeline = cmd_buffer->state.pipeline;
     39 	state->old_descriptor_set0 = cmd_buffer->state.descriptors[0];
     40 	memcpy(state->old_vertex_bindings, cmd_buffer->state.vertex_bindings,
     41 	       sizeof(state->old_vertex_bindings));
     42 
     43 	state->dynamic_mask = dynamic_mask;
     44 	radv_dynamic_state_copy(&state->dynamic, &cmd_buffer->state.dynamic,
     45 				dynamic_mask);
     46 
     47 	memcpy(state->push_constants, cmd_buffer->push_constants, MAX_PUSH_CONSTANTS_SIZE);
     48 }
     49 
     50 void
     51 radv_meta_restore(const struct radv_meta_saved_state *state,
     52 		  struct radv_cmd_buffer *cmd_buffer)
     53 {
     54 	cmd_buffer->state.pipeline = state->old_pipeline;
     55 	radv_bind_descriptor_set(cmd_buffer, state->old_descriptor_set0, 0);
     56 	memcpy(cmd_buffer->state.vertex_bindings, state->old_vertex_bindings,
     57 	       sizeof(state->old_vertex_bindings));
     58 
     59 	cmd_buffer->state.vb_dirty |= (1 << RADV_META_VERTEX_BINDING_COUNT) - 1;
     60 	cmd_buffer->state.dirty |= RADV_CMD_DIRTY_PIPELINE;
     61 
     62 	radv_dynamic_state_copy(&cmd_buffer->state.dynamic, &state->dynamic,
     63 				state->dynamic_mask);
     64 	cmd_buffer->state.dirty |= state->dynamic_mask;
     65 
     66 	memcpy(cmd_buffer->push_constants, state->push_constants, MAX_PUSH_CONSTANTS_SIZE);
     67 	cmd_buffer->push_constant_stages |= VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT;
     68 }
     69 
     70 void
     71 radv_meta_save_pass(struct radv_meta_saved_pass_state *state,
     72                     const struct radv_cmd_buffer *cmd_buffer)
     73 {
     74 	state->pass = cmd_buffer->state.pass;
     75 	state->subpass = cmd_buffer->state.subpass;
     76 	state->framebuffer = cmd_buffer->state.framebuffer;
     77 	state->attachments = cmd_buffer->state.attachments;
     78 	state->render_area = cmd_buffer->state.render_area;
     79 }
     80 
     81 void
     82 radv_meta_restore_pass(const struct radv_meta_saved_pass_state *state,
     83                        struct radv_cmd_buffer *cmd_buffer)
     84 {
     85 	cmd_buffer->state.pass = state->pass;
     86 	cmd_buffer->state.subpass = state->subpass;
     87 	cmd_buffer->state.framebuffer = state->framebuffer;
     88 	cmd_buffer->state.attachments = state->attachments;
     89 	cmd_buffer->state.render_area = state->render_area;
     90 	if (state->subpass)
     91 		radv_emit_framebuffer_state(cmd_buffer);
     92 }
     93 
     94 void
     95 radv_meta_save_compute(struct radv_meta_saved_compute_state *state,
     96                        const struct radv_cmd_buffer *cmd_buffer,
     97                        unsigned push_constant_size)
     98 {
     99 	state->old_pipeline = cmd_buffer->state.compute_pipeline;
    100 	state->old_descriptor_set0 = cmd_buffer->state.descriptors[0];
    101 
    102 	if (push_constant_size)
    103 		memcpy(state->push_constants, cmd_buffer->push_constants, push_constant_size);
    104 }
    105 
    106 void
    107 radv_meta_restore_compute(const struct radv_meta_saved_compute_state *state,
    108                           struct radv_cmd_buffer *cmd_buffer,
    109                           unsigned push_constant_size)
    110 {
    111 	radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE,
    112 			     radv_pipeline_to_handle(state->old_pipeline));
    113 	radv_bind_descriptor_set(cmd_buffer, state->old_descriptor_set0, 0);
    114 
    115 	if (push_constant_size) {
    116 		memcpy(cmd_buffer->push_constants, state->push_constants, push_constant_size);
    117 		cmd_buffer->push_constant_stages |= VK_SHADER_STAGE_COMPUTE_BIT;
    118 	}
    119 }
    120 
    121 VkImageViewType
    122 radv_meta_get_view_type(const struct radv_image *image)
    123 {
    124 	switch (image->type) {
    125 	case VK_IMAGE_TYPE_1D: return VK_IMAGE_VIEW_TYPE_1D;
    126 	case VK_IMAGE_TYPE_2D: return VK_IMAGE_VIEW_TYPE_2D;
    127 	case VK_IMAGE_TYPE_3D: return VK_IMAGE_VIEW_TYPE_3D;
    128 	default:
    129 		unreachable("bad VkImageViewType");
    130 	}
    131 }
    132 
    133 /**
    134  * When creating a destination VkImageView, this function provides the needed
    135  * VkImageViewCreateInfo::subresourceRange::baseArrayLayer.
    136  */
    137 uint32_t
    138 radv_meta_get_iview_layer(const struct radv_image *dest_image,
    139 			  const VkImageSubresourceLayers *dest_subresource,
    140 			  const VkOffset3D *dest_offset)
    141 {
    142 	switch (dest_image->type) {
    143 	case VK_IMAGE_TYPE_1D:
    144 	case VK_IMAGE_TYPE_2D:
    145 		return dest_subresource->baseArrayLayer;
    146 	case VK_IMAGE_TYPE_3D:
    147 		/* HACK: Vulkan does not allow attaching a 3D image to a framebuffer,
    148 		 * but meta does it anyway. When doing so, we translate the
    149 		 * destination's z offset into an array offset.
    150 		 */
    151 		return dest_offset->z;
    152 	default:
    153 		assert(!"bad VkImageType");
    154 		return 0;
    155 	}
    156 }
    157 
    158 static void *
    159 meta_alloc(void* _device, size_t size, size_t alignment,
    160            VkSystemAllocationScope allocationScope)
    161 {
    162 	struct radv_device *device = _device;
    163 	return device->alloc.pfnAllocation(device->alloc.pUserData, size, alignment,
    164 					   VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
    165 }
    166 
    167 static void *
    168 meta_realloc(void* _device, void *original, size_t size, size_t alignment,
    169              VkSystemAllocationScope allocationScope)
    170 {
    171 	struct radv_device *device = _device;
    172 	return device->alloc.pfnReallocation(device->alloc.pUserData, original,
    173 					     size, alignment,
    174 					     VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
    175 }
    176 
    177 static void
    178 meta_free(void* _device, void *data)
    179 {
    180 	struct radv_device *device = _device;
    181 	return device->alloc.pfnFree(device->alloc.pUserData, data);
    182 }
    183 
    184 static bool
    185 radv_builtin_cache_path(char *path)
    186 {
    187 	char *xdg_cache_home = getenv("XDG_CACHE_HOME");
    188 	const char *suffix = "/radv_builtin_shaders";
    189 	const char *suffix2 = "/.cache/radv_builtin_shaders";
    190 	struct passwd pwd, *result;
    191 	char path2[PATH_MAX + 1]; /* PATH_MAX is not a real max,but suffices here. */
    192 
    193 	if (xdg_cache_home) {
    194 
    195 		if (strlen(xdg_cache_home) + strlen(suffix) > PATH_MAX)
    196 			return false;
    197 
    198 		strcpy(path, xdg_cache_home);
    199 		strcat(path, suffix);
    200 		return true;
    201 	}
    202 
    203 	getpwuid_r(getuid(), &pwd, path2, PATH_MAX - strlen(suffix2), &result);
    204 	if (!result)
    205 		return false;
    206 
    207 	strcpy(path, pwd.pw_dir);
    208 	strcat(path, "/.cache");
    209 	mkdir(path, 0755);
    210 
    211 	strcat(path, suffix);
    212 	return true;
    213 }
    214 
    215 static void
    216 radv_load_meta_pipeline(struct radv_device *device)
    217 {
    218 	char path[PATH_MAX + 1];
    219 	struct stat st;
    220 	void *data = NULL;
    221 
    222 	if (!radv_builtin_cache_path(path))
    223 		return;
    224 
    225 	int fd = open(path, O_RDONLY);
    226 	if (fd < 0)
    227 		return;
    228 	if (fstat(fd, &st))
    229 		goto fail;
    230 	data = malloc(st.st_size);
    231 	if (!data)
    232 		goto fail;
    233 	if(read(fd, data, st.st_size) == -1)
    234 		goto fail;
    235 
    236 	radv_pipeline_cache_load(&device->meta_state.cache, data, st.st_size);
    237 fail:
    238 	free(data);
    239 	close(fd);
    240 }
    241 
    242 static void
    243 radv_store_meta_pipeline(struct radv_device *device)
    244 {
    245 	char path[PATH_MAX + 1], path2[PATH_MAX + 7];
    246 	size_t size;
    247 	void *data = NULL;
    248 
    249 	if (!device->meta_state.cache.modified)
    250 		return;
    251 
    252 	if (radv_GetPipelineCacheData(radv_device_to_handle(device),
    253 				      radv_pipeline_cache_to_handle(&device->meta_state.cache),
    254 				      &size, NULL))
    255 		return;
    256 
    257 	if (!radv_builtin_cache_path(path))
    258 		return;
    259 
    260 	strcpy(path2, path);
    261 	strcat(path2, "XXXXXX");
    262 	int fd = mkstemp(path2);//open(path, O_WRONLY | O_CREAT, 0600);
    263 	if (fd < 0)
    264 		return;
    265 	data = malloc(size);
    266 	if (!data)
    267 		goto fail;
    268 
    269 	if (radv_GetPipelineCacheData(radv_device_to_handle(device),
    270 				      radv_pipeline_cache_to_handle(&device->meta_state.cache),
    271 				      &size, data))
    272 		goto fail;
    273 	if(write(fd, data, size) == -1)
    274 		goto fail;
    275 
    276 	rename(path2, path);
    277 fail:
    278 	free(data);
    279 	close(fd);
    280 	unlink(path2);
    281 }
    282 
    283 VkResult
    284 radv_device_init_meta(struct radv_device *device)
    285 {
    286 	VkResult result;
    287 
    288 	device->meta_state.alloc = (VkAllocationCallbacks) {
    289 		.pUserData = device,
    290 		.pfnAllocation = meta_alloc,
    291 		.pfnReallocation = meta_realloc,
    292 		.pfnFree = meta_free,
    293 	};
    294 
    295 	device->meta_state.cache.alloc = device->meta_state.alloc;
    296 	radv_pipeline_cache_init(&device->meta_state.cache, device);
    297 	radv_load_meta_pipeline(device);
    298 
    299 	result = radv_device_init_meta_clear_state(device);
    300 	if (result != VK_SUCCESS)
    301 		goto fail_clear;
    302 
    303 	result = radv_device_init_meta_resolve_state(device);
    304 	if (result != VK_SUCCESS)
    305 		goto fail_resolve;
    306 
    307 	result = radv_device_init_meta_blit_state(device);
    308 	if (result != VK_SUCCESS)
    309 		goto fail_blit;
    310 
    311 	result = radv_device_init_meta_blit2d_state(device);
    312 	if (result != VK_SUCCESS)
    313 		goto fail_blit2d;
    314 
    315 	result = radv_device_init_meta_bufimage_state(device);
    316 	if (result != VK_SUCCESS)
    317 		goto fail_bufimage;
    318 
    319 	result = radv_device_init_meta_depth_decomp_state(device);
    320 	if (result != VK_SUCCESS)
    321 		goto fail_depth_decomp;
    322 
    323 	result = radv_device_init_meta_buffer_state(device);
    324 	if (result != VK_SUCCESS)
    325 		goto fail_buffer;
    326 
    327 	result = radv_device_init_meta_fast_clear_flush_state(device);
    328 	if (result != VK_SUCCESS)
    329 		goto fail_fast_clear;
    330 
    331 	result = radv_device_init_meta_resolve_compute_state(device);
    332 	if (result != VK_SUCCESS)
    333 		goto fail_resolve_compute;
    334 	return VK_SUCCESS;
    335 
    336 fail_resolve_compute:
    337 	radv_device_finish_meta_fast_clear_flush_state(device);
    338 fail_fast_clear:
    339 	radv_device_finish_meta_buffer_state(device);
    340 fail_buffer:
    341 	radv_device_finish_meta_depth_decomp_state(device);
    342 fail_depth_decomp:
    343 	radv_device_finish_meta_bufimage_state(device);
    344 fail_bufimage:
    345 	radv_device_finish_meta_blit2d_state(device);
    346 fail_blit2d:
    347 	radv_device_finish_meta_blit_state(device);
    348 fail_blit:
    349 	radv_device_finish_meta_resolve_state(device);
    350 fail_resolve:
    351 	radv_device_finish_meta_clear_state(device);
    352 fail_clear:
    353 	radv_pipeline_cache_finish(&device->meta_state.cache);
    354 	return result;
    355 }
    356 
    357 void
    358 radv_device_finish_meta(struct radv_device *device)
    359 {
    360 	radv_device_finish_meta_clear_state(device);
    361 	radv_device_finish_meta_resolve_state(device);
    362 	radv_device_finish_meta_blit_state(device);
    363 	radv_device_finish_meta_blit2d_state(device);
    364 	radv_device_finish_meta_bufimage_state(device);
    365 	radv_device_finish_meta_depth_decomp_state(device);
    366 	radv_device_finish_meta_buffer_state(device);
    367 	radv_device_finish_meta_fast_clear_flush_state(device);
    368 	radv_device_finish_meta_resolve_compute_state(device);
    369 
    370 	radv_store_meta_pipeline(device);
    371 	radv_pipeline_cache_finish(&device->meta_state.cache);
    372 }
    373 
    374 /*
    375  * The most common meta operations all want to have the viewport
    376  * reset and any scissors disabled. The rest of the dynamic state
    377  * should have no effect.
    378  */
    379 void
    380 radv_meta_save_graphics_reset_vport_scissor(struct radv_meta_saved_state *saved_state,
    381 					    struct radv_cmd_buffer *cmd_buffer)
    382 {
    383 	uint32_t dirty_state = (1 << VK_DYNAMIC_STATE_VIEWPORT) | (1 << VK_DYNAMIC_STATE_SCISSOR);
    384 	radv_meta_save(saved_state, cmd_buffer, dirty_state);
    385 	cmd_buffer->state.dynamic.viewport.count = 0;
    386 	cmd_buffer->state.dynamic.scissor.count = 0;
    387 	cmd_buffer->state.dirty |= dirty_state;
    388 }
    389