OSDN Git Service

anv: Add a concept of a descriptor buffer
[android-x86/external-mesa.git] / src / intel / vulkan / anv_cmd_buffer.c
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "anv_private.h"
31
32 #include "vk_format_info.h"
33
34 /** \file anv_cmd_buffer.c
35  *
36  * This file contains all of the stuff for emitting commands into a command
37  * buffer.  This includes implementations of most of the vkCmd*
38  * entrypoints.  This file is concerned entirely with state emission and
39  * not with the command buffer data structure itself.  As far as this file
40  * is concerned, most of anv_cmd_buffer is magic.
41  */
42
43 /* TODO: These are taken from GLES.  We should check the Vulkan spec */
44 const struct anv_dynamic_state default_dynamic_state = {
45    .viewport = {
46       .count = 0,
47    },
48    .scissor = {
49       .count = 0,
50    },
51    .line_width = 1.0f,
52    .depth_bias = {
53       .bias = 0.0f,
54       .clamp = 0.0f,
55       .slope = 0.0f,
56    },
57    .blend_constants = { 0.0f, 0.0f, 0.0f, 0.0f },
58    .depth_bounds = {
59       .min = 0.0f,
60       .max = 1.0f,
61    },
62    .stencil_compare_mask = {
63       .front = ~0u,
64       .back = ~0u,
65    },
66    .stencil_write_mask = {
67       .front = ~0u,
68       .back = ~0u,
69    },
70    .stencil_reference = {
71       .front = 0u,
72       .back = 0u,
73    },
74 };
75
76 void
77 anv_dynamic_state_copy(struct anv_dynamic_state *dest,
78                        const struct anv_dynamic_state *src,
79                        uint32_t copy_mask)
80 {
81    if (copy_mask & (1 << VK_DYNAMIC_STATE_VIEWPORT)) {
82       dest->viewport.count = src->viewport.count;
83       typed_memcpy(dest->viewport.viewports, src->viewport.viewports,
84                    src->viewport.count);
85    }
86
87    if (copy_mask & (1 << VK_DYNAMIC_STATE_SCISSOR)) {
88       dest->scissor.count = src->scissor.count;
89       typed_memcpy(dest->scissor.scissors, src->scissor.scissors,
90                    src->scissor.count);
91    }
92
93    if (copy_mask & (1 << VK_DYNAMIC_STATE_LINE_WIDTH))
94       dest->line_width = src->line_width;
95
96    if (copy_mask & (1 << VK_DYNAMIC_STATE_DEPTH_BIAS))
97       dest->depth_bias = src->depth_bias;
98
99    if (copy_mask & (1 << VK_DYNAMIC_STATE_BLEND_CONSTANTS))
100       typed_memcpy(dest->blend_constants, src->blend_constants, 4);
101
102    if (copy_mask & (1 << VK_DYNAMIC_STATE_DEPTH_BOUNDS))
103       dest->depth_bounds = src->depth_bounds;
104
105    if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK))
106       dest->stencil_compare_mask = src->stencil_compare_mask;
107
108    if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_WRITE_MASK))
109       dest->stencil_write_mask = src->stencil_write_mask;
110
111    if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_REFERENCE))
112       dest->stencil_reference = src->stencil_reference;
113 }
114
115 static void
116 anv_cmd_state_init(struct anv_cmd_buffer *cmd_buffer)
117 {
118    struct anv_cmd_state *state = &cmd_buffer->state;
119
120    memset(state, 0, sizeof(*state));
121
122    state->current_pipeline = UINT32_MAX;
123    state->restart_index = UINT32_MAX;
124    state->gfx.dynamic = default_dynamic_state;
125 }
126
127 static void
128 anv_cmd_pipeline_state_finish(struct anv_cmd_buffer *cmd_buffer,
129                               struct anv_cmd_pipeline_state *pipe_state)
130 {
131    for (uint32_t i = 0; i < ARRAY_SIZE(pipe_state->push_descriptors); i++) {
132       if (pipe_state->push_descriptors[i]) {
133          anv_descriptor_set_layout_unref(cmd_buffer->device,
134              pipe_state->push_descriptors[i]->set.layout);
135          vk_free(&cmd_buffer->pool->alloc, pipe_state->push_descriptors[i]);
136       }
137    }
138 }
139
140 static void
141 anv_cmd_state_finish(struct anv_cmd_buffer *cmd_buffer)
142 {
143    struct anv_cmd_state *state = &cmd_buffer->state;
144
145    anv_cmd_pipeline_state_finish(cmd_buffer, &state->gfx.base);
146    anv_cmd_pipeline_state_finish(cmd_buffer, &state->compute.base);
147
148    for (uint32_t i = 0; i < MESA_SHADER_STAGES; i++)
149       vk_free(&cmd_buffer->pool->alloc, state->push_constants[i]);
150
151    vk_free(&cmd_buffer->pool->alloc, state->attachments);
152 }
153
154 static void
155 anv_cmd_state_reset(struct anv_cmd_buffer *cmd_buffer)
156 {
157    anv_cmd_state_finish(cmd_buffer);
158    anv_cmd_state_init(cmd_buffer);
159 }
160
161 /**
162  * This function updates the size of the push constant buffer we need to emit.
163  * This is called in various parts of the driver to ensure that different
164  * pieces of push constant data get emitted as needed. However, it is important
165  * that we never shrink the size of the buffer. For example, a compute shader
166  * dispatch will always call this for the base group id, which has an
167  * offset in the push constant buffer that is smaller than the offset for
168  * storage image data. If the compute shader has storage images, we will call
169  * this again with a larger size during binding table emission. However,
170  * if we dispatch the compute shader again without dirtying our descriptors,
171  * we would still call this function with a smaller size for the base group
172  * id, and not for the images, which would incorrectly shrink the size of the
173  * push constant data we emit with that dispatch, making us drop the image data.
174  */
175 VkResult
176 anv_cmd_buffer_ensure_push_constants_size(struct anv_cmd_buffer *cmd_buffer,
177                                           gl_shader_stage stage, uint32_t size)
178 {
179    struct anv_push_constants **ptr = &cmd_buffer->state.push_constants[stage];
180
181    if (*ptr == NULL) {
182       *ptr = vk_alloc(&cmd_buffer->pool->alloc, size, 8,
183                        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
184       if (*ptr == NULL) {
185          anv_batch_set_error(&cmd_buffer->batch, VK_ERROR_OUT_OF_HOST_MEMORY);
186          return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
187       }
188       (*ptr)->size = size;
189    } else if ((*ptr)->size < size) {
190       *ptr = vk_realloc(&cmd_buffer->pool->alloc, *ptr, size, 8,
191                          VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
192       if (*ptr == NULL) {
193          anv_batch_set_error(&cmd_buffer->batch, VK_ERROR_OUT_OF_HOST_MEMORY);
194          return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
195       }
196       (*ptr)->size = size;
197    }
198
199    return VK_SUCCESS;
200 }
201
202 static VkResult anv_create_cmd_buffer(
203     struct anv_device *                         device,
204     struct anv_cmd_pool *                       pool,
205     VkCommandBufferLevel                        level,
206     VkCommandBuffer*                            pCommandBuffer)
207 {
208    struct anv_cmd_buffer *cmd_buffer;
209    VkResult result;
210
211    cmd_buffer = vk_alloc(&pool->alloc, sizeof(*cmd_buffer), 8,
212                           VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
213    if (cmd_buffer == NULL)
214       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
215
216    cmd_buffer->batch.status = VK_SUCCESS;
217
218    cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
219    cmd_buffer->device = device;
220    cmd_buffer->pool = pool;
221    cmd_buffer->level = level;
222
223    result = anv_cmd_buffer_init_batch_bo_chain(cmd_buffer);
224    if (result != VK_SUCCESS)
225       goto fail;
226
227    anv_state_stream_init(&cmd_buffer->surface_state_stream,
228                          &device->surface_state_pool, 4096);
229    anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
230                          &device->dynamic_state_pool, 16384);
231
232    anv_cmd_state_init(cmd_buffer);
233
234    if (pool) {
235       list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers);
236    } else {
237       /* Init the pool_link so we can safefly call list_del when we destroy
238        * the command buffer
239        */
240       list_inithead(&cmd_buffer->pool_link);
241    }
242
243    *pCommandBuffer = anv_cmd_buffer_to_handle(cmd_buffer);
244
245    return VK_SUCCESS;
246
247  fail:
248    vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
249
250    return result;
251 }
252
253 VkResult anv_AllocateCommandBuffers(
254     VkDevice                                    _device,
255     const VkCommandBufferAllocateInfo*          pAllocateInfo,
256     VkCommandBuffer*                            pCommandBuffers)
257 {
258    ANV_FROM_HANDLE(anv_device, device, _device);
259    ANV_FROM_HANDLE(anv_cmd_pool, pool, pAllocateInfo->commandPool);
260
261    VkResult result = VK_SUCCESS;
262    uint32_t i;
263
264    for (i = 0; i < pAllocateInfo->commandBufferCount; i++) {
265       result = anv_create_cmd_buffer(device, pool, pAllocateInfo->level,
266                                      &pCommandBuffers[i]);
267       if (result != VK_SUCCESS)
268          break;
269    }
270
271    if (result != VK_SUCCESS) {
272       anv_FreeCommandBuffers(_device, pAllocateInfo->commandPool,
273                              i, pCommandBuffers);
274       for (i = 0; i < pAllocateInfo->commandBufferCount; i++)
275          pCommandBuffers[i] = VK_NULL_HANDLE;
276    }
277
278    return result;
279 }
280
281 static void
282 anv_cmd_buffer_destroy(struct anv_cmd_buffer *cmd_buffer)
283 {
284    list_del(&cmd_buffer->pool_link);
285
286    anv_cmd_buffer_fini_batch_bo_chain(cmd_buffer);
287
288    anv_state_stream_finish(&cmd_buffer->surface_state_stream);
289    anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
290
291    anv_cmd_state_finish(cmd_buffer);
292
293    vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
294 }
295
296 void anv_FreeCommandBuffers(
297     VkDevice                                    device,
298     VkCommandPool                               commandPool,
299     uint32_t                                    commandBufferCount,
300     const VkCommandBuffer*                      pCommandBuffers)
301 {
302    for (uint32_t i = 0; i < commandBufferCount; i++) {
303       ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, pCommandBuffers[i]);
304
305       if (!cmd_buffer)
306          continue;
307
308       anv_cmd_buffer_destroy(cmd_buffer);
309    }
310 }
311
312 VkResult
313 anv_cmd_buffer_reset(struct anv_cmd_buffer *cmd_buffer)
314 {
315    cmd_buffer->usage_flags = 0;
316    anv_cmd_buffer_reset_batch_bo_chain(cmd_buffer);
317    anv_cmd_state_reset(cmd_buffer);
318
319    anv_state_stream_finish(&cmd_buffer->surface_state_stream);
320    anv_state_stream_init(&cmd_buffer->surface_state_stream,
321                          &cmd_buffer->device->surface_state_pool, 4096);
322
323    anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
324    anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
325                          &cmd_buffer->device->dynamic_state_pool, 16384);
326    return VK_SUCCESS;
327 }
328
329 VkResult anv_ResetCommandBuffer(
330     VkCommandBuffer                             commandBuffer,
331     VkCommandBufferResetFlags                   flags)
332 {
333    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
334    return anv_cmd_buffer_reset(cmd_buffer);
335 }
336
337 #define anv_genX_call(devinfo, func, ...)          \
338    switch ((devinfo)->gen) {                       \
339    case 7:                                         \
340       if ((devinfo)->is_haswell) {                 \
341          gen75_##func(__VA_ARGS__);                \
342       } else {                                     \
343          gen7_##func(__VA_ARGS__);                 \
344       }                                            \
345       break;                                       \
346    case 8:                                         \
347       gen8_##func(__VA_ARGS__);                    \
348       break;                                       \
349    case 9:                                         \
350       gen9_##func(__VA_ARGS__);                    \
351       break;                                       \
352    case 10:                                        \
353       gen10_##func(__VA_ARGS__);                   \
354       break;                                       \
355    case 11:                                        \
356       gen11_##func(__VA_ARGS__);                   \
357       break;                                       \
358    default:                                        \
359       assert(!"Unknown hardware generation");      \
360    }
361
362 void
363 anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer *cmd_buffer)
364 {
365    anv_genX_call(&cmd_buffer->device->info,
366                  cmd_buffer_emit_state_base_address,
367                  cmd_buffer);
368 }
369
370 void
371 anv_cmd_buffer_mark_image_written(struct anv_cmd_buffer *cmd_buffer,
372                                   const struct anv_image *image,
373                                   VkImageAspectFlagBits aspect,
374                                   enum isl_aux_usage aux_usage,
375                                   uint32_t level,
376                                   uint32_t base_layer,
377                                   uint32_t layer_count)
378 {
379    anv_genX_call(&cmd_buffer->device->info,
380                  cmd_buffer_mark_image_written,
381                  cmd_buffer, image, aspect, aux_usage,
382                  level, base_layer, layer_count);
383 }
384
385 void
386 anv_cmd_emit_conditional_render_predicate(struct anv_cmd_buffer *cmd_buffer)
387 {
388    anv_genX_call(&cmd_buffer->device->info,
389                  cmd_emit_conditional_render_predicate,
390                  cmd_buffer);
391 }
392
393 void anv_CmdBindPipeline(
394     VkCommandBuffer                             commandBuffer,
395     VkPipelineBindPoint                         pipelineBindPoint,
396     VkPipeline                                  _pipeline)
397 {
398    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
399    ANV_FROM_HANDLE(anv_pipeline, pipeline, _pipeline);
400
401    switch (pipelineBindPoint) {
402    case VK_PIPELINE_BIND_POINT_COMPUTE:
403       cmd_buffer->state.compute.base.pipeline = pipeline;
404       cmd_buffer->state.compute.pipeline_dirty = true;
405       cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
406       cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
407       break;
408
409    case VK_PIPELINE_BIND_POINT_GRAPHICS:
410       cmd_buffer->state.gfx.base.pipeline = pipeline;
411       cmd_buffer->state.gfx.vb_dirty |= pipeline->vb_used;
412       cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_PIPELINE;
413       cmd_buffer->state.push_constants_dirty |= pipeline->active_stages;
414       cmd_buffer->state.descriptors_dirty |= pipeline->active_stages;
415
416       /* Apply the dynamic state from the pipeline */
417       cmd_buffer->state.gfx.dirty |= pipeline->dynamic_state_mask;
418       anv_dynamic_state_copy(&cmd_buffer->state.gfx.dynamic,
419                              &pipeline->dynamic_state,
420                              pipeline->dynamic_state_mask);
421       break;
422
423    default:
424       assert(!"invalid bind point");
425       break;
426    }
427 }
428
429 void anv_CmdSetViewport(
430     VkCommandBuffer                             commandBuffer,
431     uint32_t                                    firstViewport,
432     uint32_t                                    viewportCount,
433     const VkViewport*                           pViewports)
434 {
435    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
436
437    const uint32_t total_count = firstViewport + viewportCount;
438    if (cmd_buffer->state.gfx.dynamic.viewport.count < total_count)
439       cmd_buffer->state.gfx.dynamic.viewport.count = total_count;
440
441    memcpy(cmd_buffer->state.gfx.dynamic.viewport.viewports + firstViewport,
442           pViewports, viewportCount * sizeof(*pViewports));
443
444    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_VIEWPORT;
445 }
446
447 void anv_CmdSetScissor(
448     VkCommandBuffer                             commandBuffer,
449     uint32_t                                    firstScissor,
450     uint32_t                                    scissorCount,
451     const VkRect2D*                             pScissors)
452 {
453    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
454
455    const uint32_t total_count = firstScissor + scissorCount;
456    if (cmd_buffer->state.gfx.dynamic.scissor.count < total_count)
457       cmd_buffer->state.gfx.dynamic.scissor.count = total_count;
458
459    memcpy(cmd_buffer->state.gfx.dynamic.scissor.scissors + firstScissor,
460           pScissors, scissorCount * sizeof(*pScissors));
461
462    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_SCISSOR;
463 }
464
465 void anv_CmdSetLineWidth(
466     VkCommandBuffer                             commandBuffer,
467     float                                       lineWidth)
468 {
469    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
470
471    cmd_buffer->state.gfx.dynamic.line_width = lineWidth;
472    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH;
473 }
474
475 void anv_CmdSetDepthBias(
476     VkCommandBuffer                             commandBuffer,
477     float                                       depthBiasConstantFactor,
478     float                                       depthBiasClamp,
479     float                                       depthBiasSlopeFactor)
480 {
481    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
482
483    cmd_buffer->state.gfx.dynamic.depth_bias.bias = depthBiasConstantFactor;
484    cmd_buffer->state.gfx.dynamic.depth_bias.clamp = depthBiasClamp;
485    cmd_buffer->state.gfx.dynamic.depth_bias.slope = depthBiasSlopeFactor;
486
487    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS;
488 }
489
490 void anv_CmdSetBlendConstants(
491     VkCommandBuffer                             commandBuffer,
492     const float                                 blendConstants[4])
493 {
494    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
495
496    memcpy(cmd_buffer->state.gfx.dynamic.blend_constants,
497           blendConstants, sizeof(float) * 4);
498
499    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS;
500 }
501
502 void anv_CmdSetDepthBounds(
503     VkCommandBuffer                             commandBuffer,
504     float                                       minDepthBounds,
505     float                                       maxDepthBounds)
506 {
507    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
508
509    cmd_buffer->state.gfx.dynamic.depth_bounds.min = minDepthBounds;
510    cmd_buffer->state.gfx.dynamic.depth_bounds.max = maxDepthBounds;
511
512    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS;
513 }
514
515 void anv_CmdSetStencilCompareMask(
516     VkCommandBuffer                             commandBuffer,
517     VkStencilFaceFlags                          faceMask,
518     uint32_t                                    compareMask)
519 {
520    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
521
522    if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
523       cmd_buffer->state.gfx.dynamic.stencil_compare_mask.front = compareMask;
524    if (faceMask & VK_STENCIL_FACE_BACK_BIT)
525       cmd_buffer->state.gfx.dynamic.stencil_compare_mask.back = compareMask;
526
527    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK;
528 }
529
530 void anv_CmdSetStencilWriteMask(
531     VkCommandBuffer                             commandBuffer,
532     VkStencilFaceFlags                          faceMask,
533     uint32_t                                    writeMask)
534 {
535    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
536
537    if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
538       cmd_buffer->state.gfx.dynamic.stencil_write_mask.front = writeMask;
539    if (faceMask & VK_STENCIL_FACE_BACK_BIT)
540       cmd_buffer->state.gfx.dynamic.stencil_write_mask.back = writeMask;
541
542    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK;
543 }
544
545 void anv_CmdSetStencilReference(
546     VkCommandBuffer                             commandBuffer,
547     VkStencilFaceFlags                          faceMask,
548     uint32_t                                    reference)
549 {
550    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
551
552    if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
553       cmd_buffer->state.gfx.dynamic.stencil_reference.front = reference;
554    if (faceMask & VK_STENCIL_FACE_BACK_BIT)
555       cmd_buffer->state.gfx.dynamic.stencil_reference.back = reference;
556
557    cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE;
558 }
559
560 static void
561 anv_cmd_buffer_bind_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
562                                    VkPipelineBindPoint bind_point,
563                                    struct anv_pipeline_layout *layout,
564                                    uint32_t set_index,
565                                    struct anv_descriptor_set *set,
566                                    uint32_t *dynamic_offset_count,
567                                    const uint32_t **dynamic_offsets)
568 {
569    struct anv_descriptor_set_layout *set_layout =
570       layout->set[set_index].layout;
571
572    struct anv_cmd_pipeline_state *pipe_state;
573    if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
574       pipe_state = &cmd_buffer->state.compute.base;
575    } else {
576       assert(bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS);
577       pipe_state = &cmd_buffer->state.gfx.base;
578    }
579    pipe_state->descriptors[set_index] = set;
580
581    if (dynamic_offsets) {
582       if (set_layout->dynamic_offset_count > 0) {
583          uint32_t dynamic_offset_start =
584             layout->set[set_index].dynamic_offset_start;
585
586          /* Assert that everything is in range */
587          assert(set_layout->dynamic_offset_count <= *dynamic_offset_count);
588          assert(dynamic_offset_start + set_layout->dynamic_offset_count <=
589                 ARRAY_SIZE(pipe_state->dynamic_offsets));
590
591          typed_memcpy(&pipe_state->dynamic_offsets[dynamic_offset_start],
592                       *dynamic_offsets, set_layout->dynamic_offset_count);
593
594          *dynamic_offsets += set_layout->dynamic_offset_count;
595          *dynamic_offset_count -= set_layout->dynamic_offset_count;
596       }
597    }
598
599    if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
600       cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
601    } else {
602       assert(bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS);
603       cmd_buffer->state.descriptors_dirty |=
604          set_layout->shader_stages & VK_SHADER_STAGE_ALL_GRAPHICS;
605    }
606
607    /* Pipeline layout objects are required to live at least while any command
608     * buffers that use them are in recording state. We need to grab a reference
609     * to the pipeline layout being bound here so we can compute correct dynamic
610     * offsets for VK_DESCRIPTOR_TYPE_*_DYNAMIC in dynamic_offset_for_binding()
611     * when we record draw commands that come after this.
612     */
613    pipe_state->layout = layout;
614 }
615
616 void anv_CmdBindDescriptorSets(
617     VkCommandBuffer                             commandBuffer,
618     VkPipelineBindPoint                         pipelineBindPoint,
619     VkPipelineLayout                            _layout,
620     uint32_t                                    firstSet,
621     uint32_t                                    descriptorSetCount,
622     const VkDescriptorSet*                      pDescriptorSets,
623     uint32_t                                    dynamicOffsetCount,
624     const uint32_t*                             pDynamicOffsets)
625 {
626    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
627    ANV_FROM_HANDLE(anv_pipeline_layout, layout, _layout);
628
629    assert(firstSet + descriptorSetCount <= MAX_SETS);
630
631    for (uint32_t i = 0; i < descriptorSetCount; i++) {
632       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
633       anv_cmd_buffer_bind_descriptor_set(cmd_buffer, pipelineBindPoint,
634                                          layout, firstSet + i, set,
635                                          &dynamicOffsetCount,
636                                          &pDynamicOffsets);
637    }
638 }
639
640 void anv_CmdBindVertexBuffers(
641     VkCommandBuffer                             commandBuffer,
642     uint32_t                                    firstBinding,
643     uint32_t                                    bindingCount,
644     const VkBuffer*                             pBuffers,
645     const VkDeviceSize*                         pOffsets)
646 {
647    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
648    struct anv_vertex_binding *vb = cmd_buffer->state.vertex_bindings;
649
650    /* We have to defer setting up vertex buffer since we need the buffer
651     * stride from the pipeline. */
652
653    assert(firstBinding + bindingCount <= MAX_VBS);
654    for (uint32_t i = 0; i < bindingCount; i++) {
655       vb[firstBinding + i].buffer = anv_buffer_from_handle(pBuffers[i]);
656       vb[firstBinding + i].offset = pOffsets[i];
657       cmd_buffer->state.gfx.vb_dirty |= 1 << (firstBinding + i);
658    }
659 }
660
661 void anv_CmdBindTransformFeedbackBuffersEXT(
662     VkCommandBuffer                             commandBuffer,
663     uint32_t                                    firstBinding,
664     uint32_t                                    bindingCount,
665     const VkBuffer*                             pBuffers,
666     const VkDeviceSize*                         pOffsets,
667     const VkDeviceSize*                         pSizes)
668 {
669    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
670    struct anv_xfb_binding *xfb = cmd_buffer->state.xfb_bindings;
671
672    /* We have to defer setting up vertex buffer since we need the buffer
673     * stride from the pipeline. */
674
675    assert(firstBinding + bindingCount <= MAX_XFB_BUFFERS);
676    for (uint32_t i = 0; i < bindingCount; i++) {
677       if (pBuffers[i] == VK_NULL_HANDLE) {
678          xfb[firstBinding + i].buffer = NULL;
679       } else {
680          ANV_FROM_HANDLE(anv_buffer, buffer, pBuffers[i]);
681          xfb[firstBinding + i].buffer = buffer;
682          xfb[firstBinding + i].offset = pOffsets[i];
683          xfb[firstBinding + i].size =
684             anv_buffer_get_range(buffer, pOffsets[i],
685                                  pSizes ? pSizes[i] : VK_WHOLE_SIZE);
686       }
687    }
688 }
689
690 enum isl_format
691 anv_isl_format_for_descriptor_type(VkDescriptorType type)
692 {
693    switch (type) {
694    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
695    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
696       return ISL_FORMAT_R32G32B32A32_FLOAT;
697
698    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
699    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
700       return ISL_FORMAT_RAW;
701
702    default:
703       unreachable("Invalid descriptor type");
704    }
705 }
706
707 struct anv_state
708 anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
709                             const void *data, uint32_t size, uint32_t alignment)
710 {
711    struct anv_state state;
712
713    state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, size, alignment);
714    memcpy(state.map, data, size);
715
716    VG(VALGRIND_CHECK_MEM_IS_DEFINED(state.map, size));
717
718    return state;
719 }
720
721 struct anv_state
722 anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
723                              uint32_t *a, uint32_t *b,
724                              uint32_t dwords, uint32_t alignment)
725 {
726    struct anv_state state;
727    uint32_t *p;
728
729    state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
730                                               dwords * 4, alignment);
731    p = state.map;
732    for (uint32_t i = 0; i < dwords; i++)
733       p[i] = a[i] | b[i];
734
735    VG(VALGRIND_CHECK_MEM_IS_DEFINED(p, dwords * 4));
736
737    return state;
738 }
739
740 static uint32_t
741 anv_push_constant_value(struct anv_push_constants *data, uint32_t param)
742 {
743    if (BRW_PARAM_IS_BUILTIN(param)) {
744       switch (param) {
745       case BRW_PARAM_BUILTIN_ZERO:
746          return 0;
747       case BRW_PARAM_BUILTIN_BASE_WORK_GROUP_ID_X:
748          return data->base_work_group_id[0];
749       case BRW_PARAM_BUILTIN_BASE_WORK_GROUP_ID_Y:
750          return data->base_work_group_id[1];
751       case BRW_PARAM_BUILTIN_BASE_WORK_GROUP_ID_Z:
752          return data->base_work_group_id[2];
753       default:
754          unreachable("Invalid param builtin");
755       }
756    } else {
757       uint32_t offset = ANV_PARAM_PUSH_OFFSET(param);
758       assert(offset % sizeof(uint32_t) == 0);
759       if (offset < data->size)
760          return *(uint32_t *)((uint8_t *)data + offset);
761       else
762          return 0;
763    }
764 }
765
766 struct anv_state
767 anv_cmd_buffer_push_constants(struct anv_cmd_buffer *cmd_buffer,
768                               gl_shader_stage stage)
769 {
770    struct anv_pipeline *pipeline = cmd_buffer->state.gfx.base.pipeline;
771
772    /* If we don't have this stage, bail. */
773    if (!anv_pipeline_has_stage(pipeline, stage))
774       return (struct anv_state) { .offset = 0 };
775
776    struct anv_push_constants *data =
777       cmd_buffer->state.push_constants[stage];
778    const struct brw_stage_prog_data *prog_data =
779       pipeline->shaders[stage]->prog_data;
780
781    /* If we don't actually have any push constants, bail. */
782    if (data == NULL || prog_data == NULL || prog_data->nr_params == 0)
783       return (struct anv_state) { .offset = 0 };
784
785    struct anv_state state =
786       anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
787                                          prog_data->nr_params * sizeof(float),
788                                          32 /* bottom 5 bits MBZ */);
789
790    /* Walk through the param array and fill the buffer with data */
791    uint32_t *u32_map = state.map;
792    for (unsigned i = 0; i < prog_data->nr_params; i++)
793       u32_map[i] = anv_push_constant_value(data, prog_data->param[i]);
794
795    return state;
796 }
797
798 struct anv_state
799 anv_cmd_buffer_cs_push_constants(struct anv_cmd_buffer *cmd_buffer)
800 {
801    struct anv_push_constants *data =
802       cmd_buffer->state.push_constants[MESA_SHADER_COMPUTE];
803    struct anv_pipeline *pipeline = cmd_buffer->state.compute.base.pipeline;
804    const struct brw_cs_prog_data *cs_prog_data = get_cs_prog_data(pipeline);
805    const struct brw_stage_prog_data *prog_data = &cs_prog_data->base;
806
807    /* If we don't actually have any push constants, bail. */
808    if (cs_prog_data->push.total.size == 0)
809       return (struct anv_state) { .offset = 0 };
810
811    const unsigned push_constant_alignment =
812       cmd_buffer->device->info.gen < 8 ? 32 : 64;
813    const unsigned aligned_total_push_constants_size =
814       ALIGN(cs_prog_data->push.total.size, push_constant_alignment);
815    struct anv_state state =
816       anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
817                                          aligned_total_push_constants_size,
818                                          push_constant_alignment);
819
820    /* Walk through the param array and fill the buffer with data */
821    uint32_t *u32_map = state.map;
822
823    if (cs_prog_data->push.cross_thread.size > 0) {
824       for (unsigned i = 0;
825            i < cs_prog_data->push.cross_thread.dwords;
826            i++) {
827          assert(prog_data->param[i] != BRW_PARAM_BUILTIN_SUBGROUP_ID);
828          u32_map[i] = anv_push_constant_value(data, prog_data->param[i]);
829       }
830    }
831
832    if (cs_prog_data->push.per_thread.size > 0) {
833       for (unsigned t = 0; t < cs_prog_data->threads; t++) {
834          unsigned dst =
835             8 * (cs_prog_data->push.per_thread.regs * t +
836                  cs_prog_data->push.cross_thread.regs);
837          unsigned src = cs_prog_data->push.cross_thread.dwords;
838          for ( ; src < prog_data->nr_params; src++, dst++) {
839             if (prog_data->param[src] == BRW_PARAM_BUILTIN_SUBGROUP_ID) {
840                u32_map[dst] = t;
841             } else {
842                u32_map[dst] =
843                   anv_push_constant_value(data, prog_data->param[src]);
844             }
845          }
846       }
847    }
848
849    return state;
850 }
851
852 void anv_CmdPushConstants(
853     VkCommandBuffer                             commandBuffer,
854     VkPipelineLayout                            layout,
855     VkShaderStageFlags                          stageFlags,
856     uint32_t                                    offset,
857     uint32_t                                    size,
858     const void*                                 pValues)
859 {
860    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
861
862    anv_foreach_stage(stage, stageFlags) {
863       VkResult result =
864          anv_cmd_buffer_ensure_push_constant_field(cmd_buffer,
865                                                    stage, client_data);
866       if (result != VK_SUCCESS)
867          return;
868
869       memcpy(cmd_buffer->state.push_constants[stage]->client_data + offset,
870              pValues, size);
871    }
872
873    cmd_buffer->state.push_constants_dirty |= stageFlags;
874 }
875
876 VkResult anv_CreateCommandPool(
877     VkDevice                                    _device,
878     const VkCommandPoolCreateInfo*              pCreateInfo,
879     const VkAllocationCallbacks*                pAllocator,
880     VkCommandPool*                              pCmdPool)
881 {
882    ANV_FROM_HANDLE(anv_device, device, _device);
883    struct anv_cmd_pool *pool;
884
885    pool = vk_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
886                      VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
887    if (pool == NULL)
888       return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
889
890    if (pAllocator)
891       pool->alloc = *pAllocator;
892    else
893       pool->alloc = device->alloc;
894
895    list_inithead(&pool->cmd_buffers);
896
897    *pCmdPool = anv_cmd_pool_to_handle(pool);
898
899    return VK_SUCCESS;
900 }
901
902 void anv_DestroyCommandPool(
903     VkDevice                                    _device,
904     VkCommandPool                               commandPool,
905     const VkAllocationCallbacks*                pAllocator)
906 {
907    ANV_FROM_HANDLE(anv_device, device, _device);
908    ANV_FROM_HANDLE(anv_cmd_pool, pool, commandPool);
909
910    if (!pool)
911       return;
912
913    list_for_each_entry_safe(struct anv_cmd_buffer, cmd_buffer,
914                             &pool->cmd_buffers, pool_link) {
915       anv_cmd_buffer_destroy(cmd_buffer);
916    }
917
918    vk_free2(&device->alloc, pAllocator, pool);
919 }
920
921 VkResult anv_ResetCommandPool(
922     VkDevice                                    device,
923     VkCommandPool                               commandPool,
924     VkCommandPoolResetFlags                     flags)
925 {
926    ANV_FROM_HANDLE(anv_cmd_pool, pool, commandPool);
927
928    list_for_each_entry(struct anv_cmd_buffer, cmd_buffer,
929                        &pool->cmd_buffers, pool_link) {
930       anv_cmd_buffer_reset(cmd_buffer);
931    }
932
933    return VK_SUCCESS;
934 }
935
936 void anv_TrimCommandPool(
937     VkDevice                                    device,
938     VkCommandPool                               commandPool,
939     VkCommandPoolTrimFlags                      flags)
940 {
941    /* Nothing for us to do here.  Our pools stay pretty tidy. */
942 }
943
944 /**
945  * Return NULL if the current subpass has no depthstencil attachment.
946  */
947 const struct anv_image_view *
948 anv_cmd_buffer_get_depth_stencil_view(const struct anv_cmd_buffer *cmd_buffer)
949 {
950    const struct anv_subpass *subpass = cmd_buffer->state.subpass;
951    const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
952
953    if (subpass->depth_stencil_attachment == NULL)
954       return NULL;
955
956    const struct anv_image_view *iview =
957       fb->attachments[subpass->depth_stencil_attachment->attachment];
958
959    assert(iview->aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT |
960                                 VK_IMAGE_ASPECT_STENCIL_BIT));
961
962    return iview;
963 }
964
965 static struct anv_descriptor_set *
966 anv_cmd_buffer_push_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
967                                    VkPipelineBindPoint bind_point,
968                                    struct anv_descriptor_set_layout *layout,
969                                    uint32_t _set)
970 {
971    struct anv_cmd_pipeline_state *pipe_state;
972    if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
973       pipe_state = &cmd_buffer->state.compute.base;
974    } else {
975       assert(bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS);
976       pipe_state = &cmd_buffer->state.gfx.base;
977    }
978
979    struct anv_push_descriptor_set **push_set =
980       &pipe_state->push_descriptors[_set];
981
982    if (*push_set == NULL) {
983       *push_set = vk_zalloc(&cmd_buffer->pool->alloc,
984                             sizeof(struct anv_push_descriptor_set), 8,
985                             VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
986       if (*push_set == NULL) {
987          anv_batch_set_error(&cmd_buffer->batch, VK_ERROR_OUT_OF_HOST_MEMORY);
988          return NULL;
989       }
990    }
991
992    struct anv_descriptor_set *set = &(*push_set)->set;
993
994    if (set->layout != layout) {
995       if (set->layout)
996          anv_descriptor_set_layout_unref(cmd_buffer->device, set->layout);
997       anv_descriptor_set_layout_ref(layout);
998       set->layout = layout;
999    }
1000    set->size = anv_descriptor_set_layout_size(layout);
1001    set->buffer_view_count = layout->buffer_view_count;
1002    set->buffer_views = (*push_set)->buffer_views;
1003
1004    if (layout->descriptor_buffer_size &&
1005        ((*push_set)->set_used_on_gpu ||
1006         set->desc_mem.alloc_size < layout->descriptor_buffer_size)) {
1007       /* The previous buffer is either actively used by some GPU command (so
1008        * we can't modify it) or is too small.  Allocate a new one.
1009        */
1010       struct anv_state desc_mem =
1011          anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
1012                                 layout->descriptor_buffer_size, 32);
1013       if (set->desc_mem.alloc_size) {
1014          /* TODO: Do we really need to copy all the time? */
1015          memcpy(desc_mem.map, set->desc_mem.map,
1016                 MIN2(desc_mem.alloc_size, set->desc_mem.alloc_size));
1017       }
1018       set->desc_mem = desc_mem;
1019
1020       struct anv_address addr = {
1021          .bo = cmd_buffer->dynamic_state_stream.state_pool->block_pool.bo,
1022          .offset = set->desc_mem.offset,
1023       };
1024
1025       const struct isl_device *isl_dev = &cmd_buffer->device->isl_dev;
1026       set->desc_surface_state =
1027          anv_state_stream_alloc(&cmd_buffer->surface_state_stream,
1028                                 isl_dev->ss.size, isl_dev->ss.align);
1029       anv_fill_buffer_surface_state(cmd_buffer->device,
1030                                     set->desc_surface_state,
1031                                     ISL_FORMAT_R32G32B32A32_FLOAT,
1032                                     addr, layout->descriptor_buffer_size, 1);
1033    }
1034
1035    return set;
1036 }
1037
1038 void anv_CmdPushDescriptorSetKHR(
1039     VkCommandBuffer commandBuffer,
1040     VkPipelineBindPoint pipelineBindPoint,
1041     VkPipelineLayout _layout,
1042     uint32_t _set,
1043     uint32_t descriptorWriteCount,
1044     const VkWriteDescriptorSet* pDescriptorWrites)
1045 {
1046    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
1047    ANV_FROM_HANDLE(anv_pipeline_layout, layout, _layout);
1048
1049    assert(_set < MAX_SETS);
1050
1051    struct anv_descriptor_set_layout *set_layout = layout->set[_set].layout;
1052
1053    struct anv_descriptor_set *set =
1054       anv_cmd_buffer_push_descriptor_set(cmd_buffer, pipelineBindPoint,
1055                                          set_layout, _set);
1056    if (!set)
1057       return;
1058
1059    /* Go through the user supplied descriptors. */
1060    for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1061       const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1062
1063       switch (write->descriptorType) {
1064       case VK_DESCRIPTOR_TYPE_SAMPLER:
1065       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1066       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1067       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1068       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1069          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1070             anv_descriptor_set_write_image_view(cmd_buffer->device, set,
1071                                                 write->pImageInfo + j,
1072                                                 write->descriptorType,
1073                                                 write->dstBinding,
1074                                                 write->dstArrayElement + j);
1075          }
1076          break;
1077
1078       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1079       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1080          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1081             ANV_FROM_HANDLE(anv_buffer_view, bview,
1082                             write->pTexelBufferView[j]);
1083
1084             anv_descriptor_set_write_buffer_view(cmd_buffer->device, set,
1085                                                  write->descriptorType,
1086                                                  bview,
1087                                                  write->dstBinding,
1088                                                  write->dstArrayElement + j);
1089          }
1090          break;
1091
1092       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1093       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1094       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1095       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1096          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1097             assert(write->pBufferInfo[j].buffer);
1098             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1099             assert(buffer);
1100
1101             anv_descriptor_set_write_buffer(cmd_buffer->device, set,
1102                                             &cmd_buffer->surface_state_stream,
1103                                             write->descriptorType,
1104                                             buffer,
1105                                             write->dstBinding,
1106                                             write->dstArrayElement + j,
1107                                             write->pBufferInfo[j].offset,
1108                                             write->pBufferInfo[j].range);
1109          }
1110          break;
1111
1112       default:
1113          break;
1114       }
1115    }
1116
1117    anv_cmd_buffer_bind_descriptor_set(cmd_buffer, pipelineBindPoint,
1118                                       layout, _set, set, NULL, NULL);
1119 }
1120
1121 void anv_CmdPushDescriptorSetWithTemplateKHR(
1122     VkCommandBuffer                             commandBuffer,
1123     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
1124     VkPipelineLayout                            _layout,
1125     uint32_t                                    _set,
1126     const void*                                 pData)
1127 {
1128    ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
1129    ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1130                    descriptorUpdateTemplate);
1131    ANV_FROM_HANDLE(anv_pipeline_layout, layout, _layout);
1132
1133    assert(_set < MAX_PUSH_DESCRIPTORS);
1134
1135    struct anv_descriptor_set_layout *set_layout = layout->set[_set].layout;
1136
1137    struct anv_descriptor_set *set =
1138       anv_cmd_buffer_push_descriptor_set(cmd_buffer, template->bind_point,
1139                                          set_layout, _set);
1140    if (!set)
1141       return;
1142
1143    anv_descriptor_set_write_template(cmd_buffer->device, set,
1144                                      &cmd_buffer->surface_state_stream,
1145                                      template,
1146                                      pData);
1147
1148    anv_cmd_buffer_bind_descriptor_set(cmd_buffer, template->bind_point,
1149                                       layout, _set, set, NULL, NULL);
1150 }
1151
1152 void anv_CmdSetDeviceMask(
1153     VkCommandBuffer                             commandBuffer,
1154     uint32_t                                    deviceMask)
1155 {
1156    /* No-op */
1157 }