1 /* 2 * Copyright 2015 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 */ 23 24 #include <assert.h> 25 #include <stdbool.h> 26 #include <string.h> 27 #include <unistd.h> 28 #include <fcntl.h> 29 30 #include "util/mesa-sha1.h" 31 32 #include "anv_private.h" 33 34 /* 35 * Descriptor set layouts. 36 */ 37 38 VkResult anv_CreateDescriptorSetLayout( 39 VkDevice _device, 40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo, 41 const VkAllocationCallbacks* pAllocator, 42 VkDescriptorSetLayout* pSetLayout) 43 { 44 ANV_FROM_HANDLE(anv_device, device, _device); 45 46 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO); 47 48 uint32_t max_binding = 0; 49 uint32_t immutable_sampler_count = 0; 50 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { 51 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding); 52 if (pCreateInfo->pBindings[j].pImmutableSamplers) 53 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount; 54 } 55 56 struct anv_descriptor_set_layout *set_layout; 57 struct anv_descriptor_set_binding_layout *bindings; 58 struct anv_sampler **samplers; 59 60 ANV_MULTIALLOC(ma); 61 anv_multialloc_add(&ma, &set_layout, 1); 62 anv_multialloc_add(&ma, &bindings, max_binding + 1); 63 anv_multialloc_add(&ma, &samplers, immutable_sampler_count); 64 65 if (!anv_multialloc_alloc2(&ma, &device->alloc, pAllocator, 66 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT)) 67 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); 68 69 memset(set_layout, 0, sizeof(*set_layout)); 70 set_layout->binding_count = max_binding + 1; 71 72 for (uint32_t b = 0; b <= max_binding; b++) { 73 /* Initialize all binding_layout entries to -1 */ 74 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b])); 75 76 set_layout->binding[b].array_size = 0; 77 set_layout->binding[b].immutable_samplers = NULL; 78 } 79 80 /* Initialize all samplers to 0 */ 81 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers)); 82 83 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, }; 84 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, }; 85 uint32_t image_count[MESA_SHADER_STAGES] = { 0, }; 86 uint32_t buffer_count = 0; 87 uint32_t dynamic_offset_count = 0; 88 89 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { 90 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j]; 91 uint32_t b = binding->binding; 92 /* We temporarily store the pointer to the binding in the 93 * immutable_samplers pointer. This provides us with a quick-and-dirty 94 * way to sort the bindings by binding number. 95 */ 96 set_layout->binding[b].immutable_samplers = (void *)binding; 97 } 98 99 for (uint32_t b = 0; b <= max_binding; b++) { 100 const VkDescriptorSetLayoutBinding *binding = 101 (void *)set_layout->binding[b].immutable_samplers; 102 103 if (binding == NULL) 104 continue; 105 106 if (binding->descriptorCount == 0) 107 continue; 108 109 #ifndef NDEBUG 110 set_layout->binding[b].type = binding->descriptorType; 111 #endif 112 set_layout->binding[b].array_size = binding->descriptorCount; 113 set_layout->binding[b].descriptor_index = set_layout->size; 114 set_layout->size += binding->descriptorCount; 115 116 switch (binding->descriptorType) { 117 case VK_DESCRIPTOR_TYPE_SAMPLER: 118 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 119 anv_foreach_stage(s, binding->stageFlags) { 120 set_layout->binding[b].stage[s].sampler_index = sampler_count[s]; 121 sampler_count[s] += binding->descriptorCount; 122 } 123 break; 124 default: 125 break; 126 } 127 128 switch (binding->descriptorType) { 129 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 130 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 131 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 132 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 133 set_layout->binding[b].buffer_index = buffer_count; 134 buffer_count += binding->descriptorCount; 135 /* fall through */ 136 137 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 138 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 139 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 140 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 141 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 142 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 143 anv_foreach_stage(s, binding->stageFlags) { 144 set_layout->binding[b].stage[s].surface_index = surface_count[s]; 145 surface_count[s] += binding->descriptorCount; 146 } 147 break; 148 default: 149 break; 150 } 151 152 switch (binding->descriptorType) { 153 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 154 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 155 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count; 156 dynamic_offset_count += binding->descriptorCount; 157 break; 158 default: 159 break; 160 } 161 162 switch (binding->descriptorType) { 163 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 164 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 165 anv_foreach_stage(s, binding->stageFlags) { 166 set_layout->binding[b].stage[s].image_index = image_count[s]; 167 image_count[s] += binding->descriptorCount; 168 } 169 break; 170 default: 171 break; 172 } 173 174 if (binding->pImmutableSamplers) { 175 set_layout->binding[b].immutable_samplers = samplers; 176 samplers += binding->descriptorCount; 177 178 for (uint32_t i = 0; i < binding->descriptorCount; i++) 179 set_layout->binding[b].immutable_samplers[i] = 180 anv_sampler_from_handle(binding->pImmutableSamplers[i]); 181 } else { 182 set_layout->binding[b].immutable_samplers = NULL; 183 } 184 185 set_layout->shader_stages |= binding->stageFlags; 186 } 187 188 set_layout->buffer_count = buffer_count; 189 set_layout->dynamic_offset_count = dynamic_offset_count; 190 191 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout); 192 193 return VK_SUCCESS; 194 } 195 196 void anv_DestroyDescriptorSetLayout( 197 VkDevice _device, 198 VkDescriptorSetLayout _set_layout, 199 const VkAllocationCallbacks* pAllocator) 200 { 201 ANV_FROM_HANDLE(anv_device, device, _device); 202 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout); 203 204 if (!set_layout) 205 return; 206 207 vk_free2(&device->alloc, pAllocator, set_layout); 208 } 209 210 static void 211 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx, 212 const struct anv_descriptor_set_layout *layout) 213 { 214 size_t size = sizeof(*layout) + 215 sizeof(layout->binding[0]) * layout->binding_count; 216 _mesa_sha1_update(ctx, layout, size); 217 } 218 219 /* 220 * Pipeline layouts. These have nothing to do with the pipeline. They are 221 * just multiple descriptor set layouts pasted together 222 */ 223 224 VkResult anv_CreatePipelineLayout( 225 VkDevice _device, 226 const VkPipelineLayoutCreateInfo* pCreateInfo, 227 const VkAllocationCallbacks* pAllocator, 228 VkPipelineLayout* pPipelineLayout) 229 { 230 ANV_FROM_HANDLE(anv_device, device, _device); 231 struct anv_pipeline_layout *layout; 232 233 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO); 234 235 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8, 236 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); 237 if (layout == NULL) 238 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); 239 240 layout->num_sets = pCreateInfo->setLayoutCount; 241 242 unsigned dynamic_offset_count = 0; 243 244 memset(layout->stage, 0, sizeof(layout->stage)); 245 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) { 246 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, 247 pCreateInfo->pSetLayouts[set]); 248 layout->set[set].layout = set_layout; 249 250 layout->set[set].dynamic_offset_start = dynamic_offset_count; 251 for (uint32_t b = 0; b < set_layout->binding_count; b++) { 252 if (set_layout->binding[b].dynamic_offset_index < 0) 253 continue; 254 255 dynamic_offset_count += set_layout->binding[b].array_size; 256 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) { 257 if (set_layout->binding[b].stage[s].surface_index >= 0) 258 layout->stage[s].has_dynamic_offsets = true; 259 } 260 } 261 } 262 263 struct mesa_sha1 ctx; 264 _mesa_sha1_init(&ctx); 265 for (unsigned s = 0; s < layout->num_sets; s++) { 266 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout); 267 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start, 268 sizeof(layout->set[s].dynamic_offset_start)); 269 } 270 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets)); 271 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) { 272 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets, 273 sizeof(layout->stage[s].has_dynamic_offsets)); 274 } 275 _mesa_sha1_final(&ctx, layout->sha1); 276 277 *pPipelineLayout = anv_pipeline_layout_to_handle(layout); 278 279 return VK_SUCCESS; 280 } 281 282 void anv_DestroyPipelineLayout( 283 VkDevice _device, 284 VkPipelineLayout _pipelineLayout, 285 const VkAllocationCallbacks* pAllocator) 286 { 287 ANV_FROM_HANDLE(anv_device, device, _device); 288 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout); 289 290 if (!pipeline_layout) 291 return; 292 293 vk_free2(&device->alloc, pAllocator, pipeline_layout); 294 } 295 296 /* 297 * Descriptor pools. 298 * 299 * These are implemented using a big pool of memory and a free-list for the 300 * host memory allocations and a state_stream and a free list for the buffer 301 * view surface state. The spec allows us to fail to allocate due to 302 * fragmentation in all cases but two: 1) after pool reset, allocating up 303 * until the pool size with no freeing must succeed and 2) allocating and 304 * freeing only descriptor sets with the same layout. Case 1) is easy enogh, 305 * and the free lists lets us recycle blocks for case 2). 306 */ 307 308 #define EMPTY 1 309 310 VkResult anv_CreateDescriptorPool( 311 VkDevice _device, 312 const VkDescriptorPoolCreateInfo* pCreateInfo, 313 const VkAllocationCallbacks* pAllocator, 314 VkDescriptorPool* pDescriptorPool) 315 { 316 ANV_FROM_HANDLE(anv_device, device, _device); 317 struct anv_descriptor_pool *pool; 318 319 uint32_t descriptor_count = 0; 320 uint32_t buffer_count = 0; 321 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) { 322 switch (pCreateInfo->pPoolSizes[i].type) { 323 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 324 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 325 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 326 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 327 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount; 328 default: 329 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount; 330 break; 331 } 332 } 333 334 const size_t pool_size = 335 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) + 336 descriptor_count * sizeof(struct anv_descriptor) + 337 buffer_count * sizeof(struct anv_buffer_view); 338 const size_t total_size = sizeof(*pool) + pool_size; 339 340 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8, 341 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); 342 if (!pool) 343 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); 344 345 pool->size = pool_size; 346 pool->next = 0; 347 pool->free_list = EMPTY; 348 349 anv_state_stream_init(&pool->surface_state_stream, 350 &device->surface_state_pool, 4096); 351 pool->surface_state_free_list = NULL; 352 353 *pDescriptorPool = anv_descriptor_pool_to_handle(pool); 354 355 return VK_SUCCESS; 356 } 357 358 void anv_DestroyDescriptorPool( 359 VkDevice _device, 360 VkDescriptorPool _pool, 361 const VkAllocationCallbacks* pAllocator) 362 { 363 ANV_FROM_HANDLE(anv_device, device, _device); 364 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool); 365 366 if (!pool) 367 return; 368 369 anv_state_stream_finish(&pool->surface_state_stream); 370 vk_free2(&device->alloc, pAllocator, pool); 371 } 372 373 VkResult anv_ResetDescriptorPool( 374 VkDevice _device, 375 VkDescriptorPool descriptorPool, 376 VkDescriptorPoolResetFlags flags) 377 { 378 ANV_FROM_HANDLE(anv_device, device, _device); 379 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool); 380 381 pool->next = 0; 382 pool->free_list = EMPTY; 383 anv_state_stream_finish(&pool->surface_state_stream); 384 anv_state_stream_init(&pool->surface_state_stream, 385 &device->surface_state_pool, 4096); 386 pool->surface_state_free_list = NULL; 387 388 return VK_SUCCESS; 389 } 390 391 struct pool_free_list_entry { 392 uint32_t next; 393 uint32_t size; 394 }; 395 396 size_t 397 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout) 398 { 399 return 400 sizeof(struct anv_descriptor_set) + 401 layout->size * sizeof(struct anv_descriptor) + 402 layout->buffer_count * sizeof(struct anv_buffer_view); 403 } 404 405 size_t 406 anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout *binding) 407 { 408 if (!binding->immutable_samplers) 409 return binding->array_size; 410 411 uint32_t total_plane_count = 0; 412 for (uint32_t i = 0; i < binding->array_size; i++) 413 total_plane_count += binding->immutable_samplers[i]->n_planes; 414 415 return total_plane_count; 416 } 417 418 struct surface_state_free_list_entry { 419 void *next; 420 struct anv_state state; 421 }; 422 423 VkResult 424 anv_descriptor_set_create(struct anv_device *device, 425 struct anv_descriptor_pool *pool, 426 const struct anv_descriptor_set_layout *layout, 427 struct anv_descriptor_set **out_set) 428 { 429 struct anv_descriptor_set *set; 430 const size_t size = anv_descriptor_set_layout_size(layout); 431 432 set = NULL; 433 if (size <= pool->size - pool->next) { 434 set = (struct anv_descriptor_set *) (pool->data + pool->next); 435 pool->next += size; 436 } else { 437 struct pool_free_list_entry *entry; 438 uint32_t *link = &pool->free_list; 439 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) { 440 entry = (struct pool_free_list_entry *) (pool->data + f); 441 if (size <= entry->size) { 442 *link = entry->next; 443 set = (struct anv_descriptor_set *) entry; 444 break; 445 } 446 link = &entry->next; 447 } 448 } 449 450 if (set == NULL) { 451 if (pool->free_list != EMPTY) { 452 return vk_error(VK_ERROR_FRAGMENTED_POOL); 453 } else { 454 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR); 455 } 456 } 457 458 set->size = size; 459 set->layout = layout; 460 set->buffer_views = 461 (struct anv_buffer_view *) &set->descriptors[layout->size]; 462 set->buffer_count = layout->buffer_count; 463 464 /* By defining the descriptors to be zero now, we can later verify that 465 * a descriptor has not been populated with user data. 466 */ 467 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size); 468 469 /* Go through and fill out immutable samplers if we have any */ 470 struct anv_descriptor *desc = set->descriptors; 471 for (uint32_t b = 0; b < layout->binding_count; b++) { 472 if (layout->binding[b].immutable_samplers) { 473 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) { 474 /* The type will get changed to COMBINED_IMAGE_SAMPLER in 475 * UpdateDescriptorSets if needed. However, if the descriptor 476 * set has an immutable sampler, UpdateDescriptorSets may never 477 * touch it, so we need to make sure it's 100% valid now. 478 */ 479 desc[i] = (struct anv_descriptor) { 480 .type = VK_DESCRIPTOR_TYPE_SAMPLER, 481 .sampler = layout->binding[b].immutable_samplers[i], 482 }; 483 } 484 } 485 desc += layout->binding[b].array_size; 486 } 487 488 /* Allocate surface state for the buffer views. */ 489 for (uint32_t b = 0; b < layout->buffer_count; b++) { 490 struct surface_state_free_list_entry *entry = 491 pool->surface_state_free_list; 492 struct anv_state state; 493 494 if (entry) { 495 state = entry->state; 496 pool->surface_state_free_list = entry->next; 497 assert(state.alloc_size == 64); 498 } else { 499 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64); 500 } 501 502 set->buffer_views[b].surface_state = state; 503 } 504 505 *out_set = set; 506 507 return VK_SUCCESS; 508 } 509 510 void 511 anv_descriptor_set_destroy(struct anv_device *device, 512 struct anv_descriptor_pool *pool, 513 struct anv_descriptor_set *set) 514 { 515 /* Put the buffer view surface state back on the free list. */ 516 for (uint32_t b = 0; b < set->buffer_count; b++) { 517 struct surface_state_free_list_entry *entry = 518 set->buffer_views[b].surface_state.map; 519 entry->next = pool->surface_state_free_list; 520 entry->state = set->buffer_views[b].surface_state; 521 pool->surface_state_free_list = entry; 522 } 523 524 /* Put the descriptor set allocation back on the free list. */ 525 const uint32_t index = (char *) set - pool->data; 526 if (index + set->size == pool->next) { 527 pool->next = index; 528 } else { 529 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set; 530 entry->next = pool->free_list; 531 entry->size = set->size; 532 pool->free_list = (char *) entry - pool->data; 533 } 534 } 535 536 VkResult anv_AllocateDescriptorSets( 537 VkDevice _device, 538 const VkDescriptorSetAllocateInfo* pAllocateInfo, 539 VkDescriptorSet* pDescriptorSets) 540 { 541 ANV_FROM_HANDLE(anv_device, device, _device); 542 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool); 543 544 VkResult result = VK_SUCCESS; 545 struct anv_descriptor_set *set; 546 uint32_t i; 547 548 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) { 549 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, 550 pAllocateInfo->pSetLayouts[i]); 551 552 result = anv_descriptor_set_create(device, pool, layout, &set); 553 if (result != VK_SUCCESS) 554 break; 555 556 pDescriptorSets[i] = anv_descriptor_set_to_handle(set); 557 } 558 559 if (result != VK_SUCCESS) 560 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool, 561 i, pDescriptorSets); 562 563 return result; 564 } 565 566 VkResult anv_FreeDescriptorSets( 567 VkDevice _device, 568 VkDescriptorPool descriptorPool, 569 uint32_t count, 570 const VkDescriptorSet* pDescriptorSets) 571 { 572 ANV_FROM_HANDLE(anv_device, device, _device); 573 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool); 574 575 for (uint32_t i = 0; i < count; i++) { 576 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]); 577 578 if (!set) 579 continue; 580 581 anv_descriptor_set_destroy(device, pool, set); 582 } 583 584 return VK_SUCCESS; 585 } 586 587 void 588 anv_descriptor_set_write_image_view(struct anv_descriptor_set *set, 589 const struct gen_device_info * const devinfo, 590 const VkDescriptorImageInfo * const info, 591 VkDescriptorType type, 592 uint32_t binding, 593 uint32_t element) 594 { 595 const struct anv_descriptor_set_binding_layout *bind_layout = 596 &set->layout->binding[binding]; 597 struct anv_descriptor *desc = 598 &set->descriptors[bind_layout->descriptor_index + element]; 599 struct anv_image_view *image_view = NULL; 600 struct anv_sampler *sampler = NULL; 601 602 assert(type == bind_layout->type); 603 604 switch (type) { 605 case VK_DESCRIPTOR_TYPE_SAMPLER: 606 sampler = anv_sampler_from_handle(info->sampler); 607 break; 608 609 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 610 image_view = anv_image_view_from_handle(info->imageView); 611 sampler = anv_sampler_from_handle(info->sampler); 612 break; 613 614 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 615 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 616 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 617 image_view = anv_image_view_from_handle(info->imageView); 618 break; 619 620 default: 621 unreachable("invalid descriptor type"); 622 } 623 624 /* If this descriptor has an immutable sampler, we don't want to stomp on 625 * it. 626 */ 627 sampler = bind_layout->immutable_samplers ? 628 bind_layout->immutable_samplers[element] : 629 sampler; 630 631 *desc = (struct anv_descriptor) { 632 .type = type, 633 .layout = info->imageLayout, 634 .image_view = image_view, 635 .sampler = sampler, 636 }; 637 } 638 639 void 640 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set, 641 VkDescriptorType type, 642 struct anv_buffer_view *buffer_view, 643 uint32_t binding, 644 uint32_t element) 645 { 646 const struct anv_descriptor_set_binding_layout *bind_layout = 647 &set->layout->binding[binding]; 648 struct anv_descriptor *desc = 649 &set->descriptors[bind_layout->descriptor_index + element]; 650 651 assert(type == bind_layout->type); 652 653 *desc = (struct anv_descriptor) { 654 .type = type, 655 .buffer_view = buffer_view, 656 }; 657 } 658 659 void 660 anv_descriptor_set_write_buffer(struct anv_descriptor_set *set, 661 struct anv_device *device, 662 struct anv_state_stream *alloc_stream, 663 VkDescriptorType type, 664 struct anv_buffer *buffer, 665 uint32_t binding, 666 uint32_t element, 667 VkDeviceSize offset, 668 VkDeviceSize range) 669 { 670 const struct anv_descriptor_set_binding_layout *bind_layout = 671 &set->layout->binding[binding]; 672 struct anv_descriptor *desc = 673 &set->descriptors[bind_layout->descriptor_index + element]; 674 675 assert(type == bind_layout->type); 676 677 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || 678 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) { 679 *desc = (struct anv_descriptor) { 680 .type = type, 681 .buffer = buffer, 682 .offset = offset, 683 .range = range, 684 }; 685 } else { 686 struct anv_buffer_view *bview = 687 &set->buffer_views[bind_layout->buffer_index + element]; 688 689 bview->format = anv_isl_format_for_descriptor_type(type); 690 bview->bo = buffer->bo; 691 bview->offset = buffer->offset + offset; 692 bview->range = anv_buffer_get_range(buffer, offset, range); 693 694 /* If we're writing descriptors through a push command, we need to 695 * allocate the surface state from the command buffer. Otherwise it will 696 * be allocated by the descriptor pool when calling 697 * vkAllocateDescriptorSets. */ 698 if (alloc_stream) 699 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64); 700 701 anv_fill_buffer_surface_state(device, bview->surface_state, 702 bview->format, 703 bview->offset, bview->range, 1); 704 705 *desc = (struct anv_descriptor) { 706 .type = type, 707 .buffer_view = bview, 708 }; 709 } 710 } 711 712 void anv_UpdateDescriptorSets( 713 VkDevice _device, 714 uint32_t descriptorWriteCount, 715 const VkWriteDescriptorSet* pDescriptorWrites, 716 uint32_t descriptorCopyCount, 717 const VkCopyDescriptorSet* pDescriptorCopies) 718 { 719 ANV_FROM_HANDLE(anv_device, device, _device); 720 721 for (uint32_t i = 0; i < descriptorWriteCount; i++) { 722 const VkWriteDescriptorSet *write = &pDescriptorWrites[i]; 723 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet); 724 725 switch (write->descriptorType) { 726 case VK_DESCRIPTOR_TYPE_SAMPLER: 727 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 728 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 729 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 730 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 731 for (uint32_t j = 0; j < write->descriptorCount; j++) { 732 anv_descriptor_set_write_image_view(set, &device->info, 733 write->pImageInfo + j, 734 write->descriptorType, 735 write->dstBinding, 736 write->dstArrayElement + j); 737 } 738 break; 739 740 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 741 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 742 for (uint32_t j = 0; j < write->descriptorCount; j++) { 743 ANV_FROM_HANDLE(anv_buffer_view, bview, 744 write->pTexelBufferView[j]); 745 746 anv_descriptor_set_write_buffer_view(set, 747 write->descriptorType, 748 bview, 749 write->dstBinding, 750 write->dstArrayElement + j); 751 } 752 break; 753 754 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 755 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 756 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 757 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 758 for (uint32_t j = 0; j < write->descriptorCount; j++) { 759 assert(write->pBufferInfo[j].buffer); 760 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer); 761 assert(buffer); 762 763 anv_descriptor_set_write_buffer(set, 764 device, 765 NULL, 766 write->descriptorType, 767 buffer, 768 write->dstBinding, 769 write->dstArrayElement + j, 770 write->pBufferInfo[j].offset, 771 write->pBufferInfo[j].range); 772 } 773 break; 774 775 default: 776 break; 777 } 778 } 779 780 for (uint32_t i = 0; i < descriptorCopyCount; i++) { 781 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i]; 782 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet); 783 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet); 784 785 const struct anv_descriptor_set_binding_layout *src_layout = 786 &src->layout->binding[copy->srcBinding]; 787 struct anv_descriptor *src_desc = 788 &src->descriptors[src_layout->descriptor_index]; 789 src_desc += copy->srcArrayElement; 790 791 const struct anv_descriptor_set_binding_layout *dst_layout = 792 &dst->layout->binding[copy->dstBinding]; 793 struct anv_descriptor *dst_desc = 794 &dst->descriptors[dst_layout->descriptor_index]; 795 dst_desc += copy->dstArrayElement; 796 797 for (uint32_t j = 0; j < copy->descriptorCount; j++) 798 dst_desc[j] = src_desc[j]; 799 } 800 } 801 802 /* 803 * Descriptor update templates. 804 */ 805 806 void 807 anv_descriptor_set_write_template(struct anv_descriptor_set *set, 808 struct anv_device *device, 809 struct anv_state_stream *alloc_stream, 810 const struct anv_descriptor_update_template *template, 811 const void *data) 812 { 813 const struct anv_descriptor_set_layout *layout = set->layout; 814 815 for (uint32_t i = 0; i < template->entry_count; i++) { 816 const struct anv_descriptor_template_entry *entry = 817 &template->entries[i]; 818 const struct anv_descriptor_set_binding_layout *bind_layout = 819 &layout->binding[entry->binding]; 820 struct anv_descriptor *desc = &set->descriptors[bind_layout->descriptor_index]; 821 desc += entry->array_element; 822 823 switch (entry->type) { 824 case VK_DESCRIPTOR_TYPE_SAMPLER: 825 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: 826 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: 827 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: 828 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: 829 for (uint32_t j = 0; j < entry->array_count; j++) { 830 const VkDescriptorImageInfo *info = 831 data + entry->offset + j * entry->stride; 832 anv_descriptor_set_write_image_view(set, &device->info, 833 info, entry->type, 834 entry->binding, 835 entry->array_element + j); 836 } 837 break; 838 839 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: 840 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: 841 for (uint32_t j = 0; j < entry->array_count; j++) { 842 const VkBufferView *_bview = 843 data + entry->offset + j * entry->stride; 844 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview); 845 846 anv_descriptor_set_write_buffer_view(set, 847 entry->type, 848 bview, 849 entry->binding, 850 entry->array_element + j); 851 } 852 break; 853 854 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: 855 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: 856 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: 857 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: 858 for (uint32_t j = 0; j < entry->array_count; j++) { 859 const VkDescriptorBufferInfo *info = 860 data + entry->offset + j * entry->stride; 861 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer); 862 863 anv_descriptor_set_write_buffer(set, 864 device, 865 alloc_stream, 866 entry->type, 867 buffer, 868 entry->binding, 869 entry->array_element + j, 870 info->offset, info->range); 871 } 872 break; 873 874 default: 875 break; 876 } 877 } 878 } 879 880 VkResult anv_CreateDescriptorUpdateTemplateKHR( 881 VkDevice _device, 882 const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, 883 const VkAllocationCallbacks* pAllocator, 884 VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) 885 { 886 ANV_FROM_HANDLE(anv_device, device, _device); 887 struct anv_descriptor_update_template *template; 888 889 size_t size = sizeof(*template) + 890 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]); 891 template = vk_alloc2(&device->alloc, pAllocator, size, 8, 892 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); 893 if (template == NULL) 894 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); 895 896 template->bind_point = pCreateInfo->pipelineBindPoint; 897 898 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR) 899 template->set = pCreateInfo->set; 900 901 template->entry_count = pCreateInfo->descriptorUpdateEntryCount; 902 for (uint32_t i = 0; i < template->entry_count; i++) { 903 const VkDescriptorUpdateTemplateEntryKHR *pEntry = 904 &pCreateInfo->pDescriptorUpdateEntries[i]; 905 906 template->entries[i] = (struct anv_descriptor_template_entry) { 907 .type = pEntry->descriptorType, 908 .binding = pEntry->dstBinding, 909 .array_element = pEntry->dstArrayElement, 910 .array_count = pEntry->descriptorCount, 911 .offset = pEntry->offset, 912 .stride = pEntry->stride, 913 }; 914 } 915 916 *pDescriptorUpdateTemplate = 917 anv_descriptor_update_template_to_handle(template); 918 919 return VK_SUCCESS; 920 } 921 922 void anv_DestroyDescriptorUpdateTemplateKHR( 923 VkDevice _device, 924 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, 925 const VkAllocationCallbacks* pAllocator) 926 { 927 ANV_FROM_HANDLE(anv_device, device, _device); 928 ANV_FROM_HANDLE(anv_descriptor_update_template, template, 929 descriptorUpdateTemplate); 930 931 vk_free2(&device->alloc, pAllocator, template); 932 } 933 934 void anv_UpdateDescriptorSetWithTemplateKHR( 935 VkDevice _device, 936 VkDescriptorSet descriptorSet, 937 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, 938 const void* pData) 939 { 940 ANV_FROM_HANDLE(anv_device, device, _device); 941 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet); 942 ANV_FROM_HANDLE(anv_descriptor_update_template, template, 943 descriptorUpdateTemplate); 944 945 anv_descriptor_set_write_template(set, device, NULL, template, pData); 946 } 947