2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 VkResult anv_CreateDescriptorSetLayout(
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 const VkAllocationCallbacks* pAllocator,
42 VkDescriptorSetLayout* pSetLayout)
44 ANV_FROM_HANDLE(anv_device, device, _device);
46 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
48 uint32_t max_binding = 0;
49 uint32_t immutable_sampler_count = 0;
50 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
51 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
52 if (pCreateInfo->pBindings[j].pImmutableSamplers)
53 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
56 struct anv_descriptor_set_layout *set_layout;
57 struct anv_descriptor_set_binding_layout *bindings;
58 struct anv_sampler **samplers;
61 anv_multialloc_add(&ma, &set_layout, 1);
62 anv_multialloc_add(&ma, &bindings, max_binding + 1);
63 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
65 if (!anv_multialloc_alloc2(&ma, &device->alloc, pAllocator,
66 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
67 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
69 memset(set_layout, 0, sizeof(*set_layout));
70 set_layout->binding_count = max_binding + 1;
72 for (uint32_t b = 0; b <= max_binding; b++) {
73 /* Initialize all binding_layout entries to -1 */
74 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
76 set_layout->binding[b].array_size = 0;
77 set_layout->binding[b].immutable_samplers = NULL;
80 /* Initialize all samplers to 0 */
81 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
83 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
84 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
85 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
86 uint32_t buffer_count = 0;
87 uint32_t dynamic_offset_count = 0;
89 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
90 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
91 uint32_t b = binding->binding;
92 /* We temporarily store the pointer to the binding in the
93 * immutable_samplers pointer. This provides us with a quick-and-dirty
94 * way to sort the bindings by binding number.
96 set_layout->binding[b].immutable_samplers = (void *)binding;
99 for (uint32_t b = 0; b <= max_binding; b++) {
100 const VkDescriptorSetLayoutBinding *binding =
101 (void *)set_layout->binding[b].immutable_samplers;
106 assert(binding->descriptorCount > 0);
108 set_layout->binding[b].type = binding->descriptorType;
110 set_layout->binding[b].array_size = binding->descriptorCount;
111 set_layout->binding[b].descriptor_index = set_layout->size;
112 set_layout->size += binding->descriptorCount;
114 switch (binding->descriptorType) {
115 case VK_DESCRIPTOR_TYPE_SAMPLER:
116 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
117 anv_foreach_stage(s, binding->stageFlags) {
118 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
119 sampler_count[s] += binding->descriptorCount;
126 switch (binding->descriptorType) {
127 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
128 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
129 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
130 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
131 set_layout->binding[b].buffer_index = buffer_count;
132 buffer_count += binding->descriptorCount;
135 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
136 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
137 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
138 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
139 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
140 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
141 anv_foreach_stage(s, binding->stageFlags) {
142 set_layout->binding[b].stage[s].surface_index = surface_count[s];
143 surface_count[s] += binding->descriptorCount;
150 switch (binding->descriptorType) {
151 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
152 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
153 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
154 dynamic_offset_count += binding->descriptorCount;
160 switch (binding->descriptorType) {
161 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
162 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
163 anv_foreach_stage(s, binding->stageFlags) {
164 set_layout->binding[b].stage[s].image_index = image_count[s];
165 image_count[s] += binding->descriptorCount;
172 if (binding->pImmutableSamplers) {
173 set_layout->binding[b].immutable_samplers = samplers;
174 samplers += binding->descriptorCount;
176 for (uint32_t i = 0; i < binding->descriptorCount; i++)
177 set_layout->binding[b].immutable_samplers[i] =
178 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
180 set_layout->binding[b].immutable_samplers = NULL;
183 set_layout->shader_stages |= binding->stageFlags;
186 set_layout->buffer_count = buffer_count;
187 set_layout->dynamic_offset_count = dynamic_offset_count;
189 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
194 void anv_DestroyDescriptorSetLayout(
196 VkDescriptorSetLayout _set_layout,
197 const VkAllocationCallbacks* pAllocator)
199 ANV_FROM_HANDLE(anv_device, device, _device);
200 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
205 vk_free2(&device->alloc, pAllocator, set_layout);
209 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
210 const struct anv_descriptor_set_layout *layout)
212 size_t size = sizeof(*layout) +
213 sizeof(layout->binding[0]) * layout->binding_count;
214 _mesa_sha1_update(ctx, layout, size);
218 * Pipeline layouts. These have nothing to do with the pipeline. They are
219 * just multiple descriptor set layouts pasted together
222 VkResult anv_CreatePipelineLayout(
224 const VkPipelineLayoutCreateInfo* pCreateInfo,
225 const VkAllocationCallbacks* pAllocator,
226 VkPipelineLayout* pPipelineLayout)
228 ANV_FROM_HANDLE(anv_device, device, _device);
229 struct anv_pipeline_layout *layout;
231 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
233 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
234 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
236 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
238 layout->num_sets = pCreateInfo->setLayoutCount;
240 unsigned dynamic_offset_count = 0;
242 memset(layout->stage, 0, sizeof(layout->stage));
243 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
244 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
245 pCreateInfo->pSetLayouts[set]);
246 layout->set[set].layout = set_layout;
248 layout->set[set].dynamic_offset_start = dynamic_offset_count;
249 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
250 if (set_layout->binding[b].dynamic_offset_index < 0)
253 dynamic_offset_count += set_layout->binding[b].array_size;
254 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
255 if (set_layout->binding[b].stage[s].surface_index >= 0)
256 layout->stage[s].has_dynamic_offsets = true;
261 struct mesa_sha1 ctx;
262 _mesa_sha1_init(&ctx);
263 for (unsigned s = 0; s < layout->num_sets; s++) {
264 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
265 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
266 sizeof(layout->set[s].dynamic_offset_start));
268 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
269 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
270 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
271 sizeof(layout->stage[s].has_dynamic_offsets));
273 _mesa_sha1_final(&ctx, layout->sha1);
275 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
280 void anv_DestroyPipelineLayout(
282 VkPipelineLayout _pipelineLayout,
283 const VkAllocationCallbacks* pAllocator)
285 ANV_FROM_HANDLE(anv_device, device, _device);
286 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
288 if (!pipeline_layout)
291 vk_free2(&device->alloc, pAllocator, pipeline_layout);
297 * These are implemented using a big pool of memory and a free-list for the
298 * host memory allocations and a state_stream and a free list for the buffer
299 * view surface state. The spec allows us to fail to allocate due to
300 * fragmentation in all cases but two: 1) after pool reset, allocating up
301 * until the pool size with no freeing must succeed and 2) allocating and
302 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
303 * and the free lists lets us recycle blocks for case 2).
308 VkResult anv_CreateDescriptorPool(
310 const VkDescriptorPoolCreateInfo* pCreateInfo,
311 const VkAllocationCallbacks* pAllocator,
312 VkDescriptorPool* pDescriptorPool)
314 ANV_FROM_HANDLE(anv_device, device, _device);
315 struct anv_descriptor_pool *pool;
317 uint32_t descriptor_count = 0;
318 uint32_t buffer_count = 0;
319 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
320 switch (pCreateInfo->pPoolSizes[i].type) {
321 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
322 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
323 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
324 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
325 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
327 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
332 const size_t pool_size =
333 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
334 descriptor_count * sizeof(struct anv_descriptor) +
335 buffer_count * sizeof(struct anv_buffer_view);
336 const size_t total_size = sizeof(*pool) + pool_size;
338 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
339 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
341 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
343 pool->size = pool_size;
345 pool->free_list = EMPTY;
347 anv_state_stream_init(&pool->surface_state_stream,
348 &device->surface_state_block_pool);
349 pool->surface_state_free_list = NULL;
351 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
356 void anv_DestroyDescriptorPool(
358 VkDescriptorPool _pool,
359 const VkAllocationCallbacks* pAllocator)
361 ANV_FROM_HANDLE(anv_device, device, _device);
362 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
367 anv_state_stream_finish(&pool->surface_state_stream);
368 vk_free2(&device->alloc, pAllocator, pool);
371 VkResult anv_ResetDescriptorPool(
373 VkDescriptorPool descriptorPool,
374 VkDescriptorPoolResetFlags flags)
376 ANV_FROM_HANDLE(anv_device, device, _device);
377 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
380 pool->free_list = EMPTY;
381 anv_state_stream_finish(&pool->surface_state_stream);
382 anv_state_stream_init(&pool->surface_state_stream,
383 &device->surface_state_block_pool);
384 pool->surface_state_free_list = NULL;
389 struct pool_free_list_entry {
395 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
398 sizeof(struct anv_descriptor_set) +
399 layout->size * sizeof(struct anv_descriptor) +
400 layout->buffer_count * sizeof(struct anv_buffer_view);
403 struct surface_state_free_list_entry {
405 struct anv_state state;
409 anv_descriptor_set_create(struct anv_device *device,
410 struct anv_descriptor_pool *pool,
411 const struct anv_descriptor_set_layout *layout,
412 struct anv_descriptor_set **out_set)
414 struct anv_descriptor_set *set;
415 const size_t size = anv_descriptor_set_layout_size(layout);
418 if (size <= pool->size - pool->next) {
419 set = (struct anv_descriptor_set *) (pool->data + pool->next);
422 struct pool_free_list_entry *entry;
423 uint32_t *link = &pool->free_list;
424 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
425 entry = (struct pool_free_list_entry *) (pool->data + f);
426 if (size <= entry->size) {
428 set = (struct anv_descriptor_set *) entry;
436 if (pool->free_list != EMPTY) {
437 return vk_error(VK_ERROR_FRAGMENTED_POOL);
439 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR);
444 set->layout = layout;
446 (struct anv_buffer_view *) &set->descriptors[layout->size];
447 set->buffer_count = layout->buffer_count;
449 /* By defining the descriptors to be zero now, we can later verify that
450 * a descriptor has not been populated with user data.
452 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
454 /* Go through and fill out immutable samplers if we have any */
455 struct anv_descriptor *desc = set->descriptors;
456 for (uint32_t b = 0; b < layout->binding_count; b++) {
457 if (layout->binding[b].immutable_samplers) {
458 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
459 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
460 * UpdateDescriptorSets if needed. However, if the descriptor
461 * set has an immutable sampler, UpdateDescriptorSets may never
462 * touch it, so we need to make sure it's 100% valid now.
464 desc[i] = (struct anv_descriptor) {
465 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
466 .sampler = layout->binding[b].immutable_samplers[i],
470 desc += layout->binding[b].array_size;
473 /* Allocate surface state for the buffer views. */
474 for (uint32_t b = 0; b < layout->buffer_count; b++) {
475 struct surface_state_free_list_entry *entry =
476 pool->surface_state_free_list;
477 struct anv_state state;
480 state = entry->state;
481 pool->surface_state_free_list = entry->next;
482 assert(state.alloc_size == 64);
484 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
487 set->buffer_views[b].surface_state = state;
496 anv_descriptor_set_destroy(struct anv_device *device,
497 struct anv_descriptor_pool *pool,
498 struct anv_descriptor_set *set)
500 /* Put the buffer view surface state back on the free list. */
501 for (uint32_t b = 0; b < set->buffer_count; b++) {
502 struct surface_state_free_list_entry *entry =
503 set->buffer_views[b].surface_state.map;
504 entry->next = pool->surface_state_free_list;
505 entry->state = set->buffer_views[b].surface_state;
506 pool->surface_state_free_list = entry;
509 /* Put the descriptor set allocation back on the free list. */
510 const uint32_t index = (char *) set - pool->data;
511 if (index + set->size == pool->next) {
514 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
515 entry->next = pool->free_list;
516 entry->size = set->size;
517 pool->free_list = (char *) entry - pool->data;
521 VkResult anv_AllocateDescriptorSets(
523 const VkDescriptorSetAllocateInfo* pAllocateInfo,
524 VkDescriptorSet* pDescriptorSets)
526 ANV_FROM_HANDLE(anv_device, device, _device);
527 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
529 VkResult result = VK_SUCCESS;
530 struct anv_descriptor_set *set;
533 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
534 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
535 pAllocateInfo->pSetLayouts[i]);
537 result = anv_descriptor_set_create(device, pool, layout, &set);
538 if (result != VK_SUCCESS)
541 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
544 if (result != VK_SUCCESS)
545 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
551 VkResult anv_FreeDescriptorSets(
553 VkDescriptorPool descriptorPool,
555 const VkDescriptorSet* pDescriptorSets)
557 ANV_FROM_HANDLE(anv_device, device, _device);
558 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
560 for (uint32_t i = 0; i < count; i++) {
561 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
566 anv_descriptor_set_destroy(device, pool, set);
573 anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
574 const struct gen_device_info * const devinfo,
575 const VkDescriptorImageInfo * const info,
576 VkDescriptorType type,
580 const struct anv_descriptor_set_binding_layout *bind_layout =
581 &set->layout->binding[binding];
582 struct anv_descriptor *desc =
583 &set->descriptors[bind_layout->descriptor_index + element];
584 struct anv_image_view *image_view = NULL;
585 struct anv_sampler *sampler = NULL;
587 assert(type == bind_layout->type);
590 case VK_DESCRIPTOR_TYPE_SAMPLER:
591 sampler = anv_sampler_from_handle(info->sampler);
594 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
595 image_view = anv_image_view_from_handle(info->imageView);
596 sampler = anv_sampler_from_handle(info->sampler);
599 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
600 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
601 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
602 image_view = anv_image_view_from_handle(info->imageView);
606 unreachable("invalid descriptor type");
609 /* If this descriptor has an immutable sampler, we don't want to stomp on
612 sampler = bind_layout->immutable_samplers ?
613 bind_layout->immutable_samplers[element] :
616 *desc = (struct anv_descriptor) {
618 .image_view = image_view,
620 .aux_usage = image_view == NULL ? ISL_AUX_USAGE_NONE :
621 anv_layout_to_aux_usage(devinfo, image_view->image,
622 image_view->aspect_mask,
628 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
629 VkDescriptorType type,
630 struct anv_buffer_view *buffer_view,
634 const struct anv_descriptor_set_binding_layout *bind_layout =
635 &set->layout->binding[binding];
636 struct anv_descriptor *desc =
637 &set->descriptors[bind_layout->descriptor_index + element];
639 assert(type == bind_layout->type);
641 *desc = (struct anv_descriptor) {
643 .buffer_view = buffer_view,
648 anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
649 struct anv_device *device,
650 struct anv_state_stream *alloc_stream,
651 VkDescriptorType type,
652 struct anv_buffer *buffer,
658 const struct anv_descriptor_set_binding_layout *bind_layout =
659 &set->layout->binding[binding];
660 struct anv_descriptor *desc =
661 &set->descriptors[bind_layout->descriptor_index + element];
663 assert(type == bind_layout->type);
665 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
666 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
667 *desc = (struct anv_descriptor) {
674 struct anv_buffer_view *bview =
675 &set->buffer_views[bind_layout->buffer_index + element];
677 bview->format = anv_isl_format_for_descriptor_type(type);
678 bview->bo = buffer->bo;
679 bview->offset = buffer->offset + offset;
680 bview->range = anv_buffer_get_range(buffer, offset, range);
682 /* If we're writing descriptors through a push command, we need to
683 * allocate the surface state from the command buffer. Otherwise it will
684 * be allocated by the descriptor pool when calling
685 * vkAllocateDescriptorSets. */
687 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
689 anv_fill_buffer_surface_state(device, bview->surface_state,
691 bview->offset, bview->range, 1);
693 *desc = (struct anv_descriptor) {
695 .buffer_view = bview,
700 void anv_UpdateDescriptorSets(
702 uint32_t descriptorWriteCount,
703 const VkWriteDescriptorSet* pDescriptorWrites,
704 uint32_t descriptorCopyCount,
705 const VkCopyDescriptorSet* pDescriptorCopies)
707 ANV_FROM_HANDLE(anv_device, device, _device);
709 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
710 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
711 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
713 switch (write->descriptorType) {
714 case VK_DESCRIPTOR_TYPE_SAMPLER:
715 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
716 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
717 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
718 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
719 for (uint32_t j = 0; j < write->descriptorCount; j++) {
720 anv_descriptor_set_write_image_view(set, &device->info,
721 write->pImageInfo + j,
722 write->descriptorType,
724 write->dstArrayElement + j);
728 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
729 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
730 for (uint32_t j = 0; j < write->descriptorCount; j++) {
731 ANV_FROM_HANDLE(anv_buffer_view, bview,
732 write->pTexelBufferView[j]);
734 anv_descriptor_set_write_buffer_view(set,
735 write->descriptorType,
738 write->dstArrayElement + j);
742 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
743 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
744 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
745 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
746 for (uint32_t j = 0; j < write->descriptorCount; j++) {
747 assert(write->pBufferInfo[j].buffer);
748 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
751 anv_descriptor_set_write_buffer(set,
754 write->descriptorType,
757 write->dstArrayElement + j,
758 write->pBufferInfo[j].offset,
759 write->pBufferInfo[j].range);
768 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
769 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
770 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
771 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
773 const struct anv_descriptor_set_binding_layout *src_layout =
774 &src->layout->binding[copy->srcBinding];
775 struct anv_descriptor *src_desc =
776 &src->descriptors[src_layout->descriptor_index];
777 src_desc += copy->srcArrayElement;
779 const struct anv_descriptor_set_binding_layout *dst_layout =
780 &dst->layout->binding[copy->dstBinding];
781 struct anv_descriptor *dst_desc =
782 &dst->descriptors[dst_layout->descriptor_index];
783 dst_desc += copy->dstArrayElement;
785 for (uint32_t j = 0; j < copy->descriptorCount; j++)
786 dst_desc[j] = src_desc[j];
791 * Descriptor update templates.
795 anv_descriptor_set_write_template(struct anv_descriptor_set *set,
796 struct anv_device *device,
797 struct anv_state_stream *alloc_stream,
798 const struct anv_descriptor_update_template *template,
801 const struct anv_descriptor_set_layout *layout = set->layout;
803 for (uint32_t i = 0; i < template->entry_count; i++) {
804 const struct anv_descriptor_template_entry *entry =
805 &template->entries[i];
806 const struct anv_descriptor_set_binding_layout *bind_layout =
807 &layout->binding[entry->binding];
808 struct anv_descriptor *desc = &set->descriptors[bind_layout->descriptor_index];
809 desc += entry->array_element;
811 switch (entry->type) {
812 case VK_DESCRIPTOR_TYPE_SAMPLER:
813 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
814 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
815 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
816 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
817 for (uint32_t j = 0; j < entry->array_count; j++) {
818 const VkDescriptorImageInfo *info =
819 data + entry->offset + j * entry->stride;
820 anv_descriptor_set_write_image_view(set, &device->info,
823 entry->array_element + j);
827 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
828 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
829 for (uint32_t j = 0; j < entry->array_count; j++) {
830 const VkBufferView *_bview =
831 data + entry->offset + j * entry->stride;
832 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
834 anv_descriptor_set_write_buffer_view(set,
838 entry->array_element + j);
842 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
843 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
844 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
845 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
846 for (uint32_t j = 0; j < entry->array_count; j++) {
847 const VkDescriptorBufferInfo *info =
848 data + entry->offset + j * entry->stride;
849 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
851 anv_descriptor_set_write_buffer(set,
857 entry->array_element + j,
858 info->offset, info->range);
868 VkResult anv_CreateDescriptorUpdateTemplateKHR(
870 const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
871 const VkAllocationCallbacks* pAllocator,
872 VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate)
874 ANV_FROM_HANDLE(anv_device, device, _device);
875 struct anv_descriptor_update_template *template;
877 size_t size = sizeof(*template) +
878 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
879 template = vk_alloc2(&device->alloc, pAllocator, size, 8,
880 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
881 if (template == NULL)
882 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
884 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR)
885 template->set = pCreateInfo->set;
887 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
888 for (uint32_t i = 0; i < template->entry_count; i++) {
889 const VkDescriptorUpdateTemplateEntryKHR *pEntry =
890 &pCreateInfo->pDescriptorUpdateEntries[i];
892 template->entries[i] = (struct anv_descriptor_template_entry) {
893 .type = pEntry->descriptorType,
894 .binding = pEntry->dstBinding,
895 .array_element = pEntry->dstArrayElement,
896 .array_count = pEntry->descriptorCount,
897 .offset = pEntry->offset,
898 .stride = pEntry->stride,
902 *pDescriptorUpdateTemplate =
903 anv_descriptor_update_template_to_handle(template);
908 void anv_DestroyDescriptorUpdateTemplateKHR(
910 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
911 const VkAllocationCallbacks* pAllocator)
913 ANV_FROM_HANDLE(anv_device, device, _device);
914 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
915 descriptorUpdateTemplate);
917 vk_free2(&device->alloc, pAllocator, template);
920 void anv_UpdateDescriptorSetWithTemplateKHR(
922 VkDescriptorSet descriptorSet,
923 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
926 ANV_FROM_HANDLE(anv_device, device, _device);
927 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
928 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
929 descriptorUpdateTemplate);
931 anv_descriptor_set_write_template(set, device, NULL, template, pData);