2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 #include "nir/nir_builder.h"
28 VkOffset3D src_offset;
29 VkExtent3D src_extent;
30 VkOffset3D dest_offset;
31 VkExtent3D dest_extent;
35 build_nir_vertex_shader(void)
37 const struct glsl_type *vec4 = glsl_vec4_type();
40 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_VERTEX, NULL);
41 b.shader->info.name = ralloc_strdup(b.shader, "meta_blit_vs");
43 nir_variable *pos_in = nir_variable_create(b.shader, nir_var_shader_in,
45 pos_in->data.location = VERT_ATTRIB_GENERIC0;
46 nir_variable *pos_out = nir_variable_create(b.shader, nir_var_shader_out,
48 pos_out->data.location = VARYING_SLOT_POS;
49 nir_copy_var(&b, pos_out, pos_in);
51 nir_variable *tex_pos_in = nir_variable_create(b.shader, nir_var_shader_in,
53 tex_pos_in->data.location = VERT_ATTRIB_GENERIC1;
54 nir_variable *tex_pos_out = nir_variable_create(b.shader, nir_var_shader_out,
56 tex_pos_out->data.location = VARYING_SLOT_VAR0;
57 tex_pos_out->data.interpolation = INTERP_QUALIFIER_SMOOTH;
58 nir_copy_var(&b, tex_pos_out, tex_pos_in);
64 build_nir_copy_fragment_shader(enum glsl_sampler_dim tex_dim)
66 const struct glsl_type *vec4 = glsl_vec4_type();
69 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
70 b.shader->info.name = ralloc_strdup(b.shader, "meta_blit_fs");
72 nir_variable *tex_pos_in = nir_variable_create(b.shader, nir_var_shader_in,
74 tex_pos_in->data.location = VARYING_SLOT_VAR0;
76 /* Swizzle the array index which comes in as Z coordinate into the right
79 unsigned swz[] = { 0, (tex_dim == GLSL_SAMPLER_DIM_1D ? 2 : 1), 2 };
80 nir_ssa_def *const tex_pos =
81 nir_swizzle(&b, nir_load_var(&b, tex_pos_in), swz,
82 (tex_dim == GLSL_SAMPLER_DIM_1D ? 2 : 3), false);
84 const struct glsl_type *sampler_type =
85 glsl_sampler_type(tex_dim, false, tex_dim != GLSL_SAMPLER_DIM_3D,
86 glsl_get_base_type(vec4));
87 nir_variable *sampler = nir_variable_create(b.shader, nir_var_uniform,
88 sampler_type, "s_tex");
89 sampler->data.descriptor_set = 0;
90 sampler->data.binding = 0;
92 nir_tex_instr *tex = nir_tex_instr_create(b.shader, 1);
93 tex->sampler_dim = tex_dim;
94 tex->op = nir_texop_tex;
95 tex->src[0].src_type = nir_tex_src_coord;
96 tex->src[0].src = nir_src_for_ssa(tex_pos);
97 tex->dest_type = nir_type_float; /* TODO */
98 tex->is_array = glsl_sampler_type_is_array(sampler_type);
99 tex->coord_components = tex_pos->num_components;
100 tex->texture = nir_deref_var_create(tex, sampler);
101 tex->sampler = nir_deref_var_create(tex, sampler);
103 nir_ssa_dest_init(&tex->instr, &tex->dest, 4, "tex");
104 nir_builder_instr_insert(&b, &tex->instr);
106 nir_variable *color_out = nir_variable_create(b.shader, nir_var_shader_out,
108 color_out->data.location = FRAG_RESULT_DATA0;
109 nir_store_var(&b, color_out, &tex->dest.ssa, 4);
115 meta_prepare_blit(struct anv_cmd_buffer *cmd_buffer,
116 struct anv_meta_saved_state *saved_state)
118 anv_meta_save(saved_state, cmd_buffer,
119 (1 << VK_DYNAMIC_STATE_VIEWPORT));
123 anv_meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
124 struct anv_image *src_image,
125 struct anv_image_view *src_iview,
126 VkOffset3D src_offset,
127 VkExtent3D src_extent,
128 struct anv_image *dest_image,
129 struct anv_image_view *dest_iview,
130 VkOffset3D dest_offset,
131 VkExtent3D dest_extent,
132 VkFilter blit_filter)
134 struct anv_device *device = cmd_buffer->device;
136 struct blit_vb_data {
141 assert(src_image->samples == dest_image->samples);
143 unsigned vb_size = sizeof(struct anv_vue_header) + 3 * sizeof(*vb_data);
145 struct anv_state vb_state =
146 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, vb_size, 16);
147 memset(vb_state.map, 0, sizeof(struct anv_vue_header));
148 vb_data = vb_state.map + sizeof(struct anv_vue_header);
150 vb_data[0] = (struct blit_vb_data) {
152 dest_offset.x + dest_extent.width,
153 dest_offset.y + dest_extent.height,
156 (float)(src_offset.x + src_extent.width)
157 / (float)src_iview->extent.width,
158 (float)(src_offset.y + src_extent.height)
159 / (float)src_iview->extent.height,
160 (float)src_offset.z / (float)src_iview->extent.depth,
164 vb_data[1] = (struct blit_vb_data) {
167 dest_offset.y + dest_extent.height,
170 (float)src_offset.x / (float)src_iview->extent.width,
171 (float)(src_offset.y + src_extent.height) /
172 (float)src_iview->extent.height,
173 (float)src_offset.z / (float)src_iview->extent.depth,
177 vb_data[2] = (struct blit_vb_data) {
183 (float)src_offset.x / (float)src_iview->extent.width,
184 (float)src_offset.y / (float)src_iview->extent.height,
185 (float)src_offset.z / (float)src_iview->extent.depth,
189 anv_state_clflush(vb_state);
191 struct anv_buffer vertex_buffer = {
194 .bo = &device->dynamic_state_block_pool.bo,
195 .offset = vb_state.offset,
198 anv_CmdBindVertexBuffers(anv_cmd_buffer_to_handle(cmd_buffer), 0, 2,
200 anv_buffer_to_handle(&vertex_buffer),
201 anv_buffer_to_handle(&vertex_buffer)
205 sizeof(struct anv_vue_header),
209 ANV_CALL(CreateSampler)(anv_device_to_handle(device),
210 &(VkSamplerCreateInfo) {
211 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
212 .magFilter = blit_filter,
213 .minFilter = blit_filter,
214 }, &cmd_buffer->pool->alloc, &sampler);
216 VkDescriptorPool desc_pool;
217 anv_CreateDescriptorPool(anv_device_to_handle(device),
218 &(const VkDescriptorPoolCreateInfo) {
219 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
224 .pPoolSizes = (VkDescriptorPoolSize[]) {
226 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
230 }, &cmd_buffer->pool->alloc, &desc_pool);
233 anv_AllocateDescriptorSets(anv_device_to_handle(device),
234 &(VkDescriptorSetAllocateInfo) {
235 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
236 .descriptorPool = desc_pool,
237 .descriptorSetCount = 1,
238 .pSetLayouts = &device->meta_state.blit.ds_layout
241 anv_UpdateDescriptorSets(anv_device_to_handle(device),
243 (VkWriteDescriptorSet[]) {
245 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
248 .dstArrayElement = 0,
249 .descriptorCount = 1,
250 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
251 .pImageInfo = (VkDescriptorImageInfo[]) {
254 .imageView = anv_image_view_to_handle(src_iview),
255 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
262 anv_CreateFramebuffer(anv_device_to_handle(device),
263 &(VkFramebufferCreateInfo) {
264 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
265 .attachmentCount = 1,
266 .pAttachments = (VkImageView[]) {
267 anv_image_view_to_handle(dest_iview),
269 .width = dest_iview->extent.width,
270 .height = dest_iview->extent.height,
272 }, &cmd_buffer->pool->alloc, &fb);
274 ANV_CALL(CmdBeginRenderPass)(anv_cmd_buffer_to_handle(cmd_buffer),
275 &(VkRenderPassBeginInfo) {
276 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
277 .renderPass = device->meta_state.blit.render_pass,
280 .offset = { dest_offset.x, dest_offset.y },
281 .extent = { dest_extent.width, dest_extent.height },
283 .clearValueCount = 0,
284 .pClearValues = NULL,
285 }, VK_SUBPASS_CONTENTS_INLINE);
289 switch (src_image->type) {
290 case VK_IMAGE_TYPE_1D:
291 pipeline = device->meta_state.blit.pipeline_1d_src;
293 case VK_IMAGE_TYPE_2D:
294 pipeline = device->meta_state.blit.pipeline_2d_src;
296 case VK_IMAGE_TYPE_3D:
297 pipeline = device->meta_state.blit.pipeline_3d_src;
300 unreachable(!"bad VkImageType");
303 if (cmd_buffer->state.pipeline != anv_pipeline_from_handle(pipeline)) {
304 anv_CmdBindPipeline(anv_cmd_buffer_to_handle(cmd_buffer),
305 VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
308 anv_CmdSetViewport(anv_cmd_buffer_to_handle(cmd_buffer), 0, 1,
312 .width = dest_iview->extent.width,
313 .height = dest_iview->extent.height,
318 anv_CmdBindDescriptorSets(anv_cmd_buffer_to_handle(cmd_buffer),
319 VK_PIPELINE_BIND_POINT_GRAPHICS,
320 device->meta_state.blit.pipeline_layout, 0, 1,
323 ANV_CALL(CmdDraw)(anv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
325 ANV_CALL(CmdEndRenderPass)(anv_cmd_buffer_to_handle(cmd_buffer));
327 /* At the point where we emit the draw call, all data from the
328 * descriptor sets, etc. has been used. We are free to delete it.
330 anv_DestroyDescriptorPool(anv_device_to_handle(device),
331 desc_pool, &cmd_buffer->pool->alloc);
332 anv_DestroySampler(anv_device_to_handle(device), sampler,
333 &cmd_buffer->pool->alloc);
334 anv_DestroyFramebuffer(anv_device_to_handle(device), fb,
335 &cmd_buffer->pool->alloc);
339 meta_finish_blit(struct anv_cmd_buffer *cmd_buffer,
340 const struct anv_meta_saved_state *saved_state)
342 anv_meta_restore(saved_state, cmd_buffer);
345 void anv_CmdBlitImage(
346 VkCommandBuffer commandBuffer,
348 VkImageLayout srcImageLayout,
350 VkImageLayout destImageLayout,
351 uint32_t regionCount,
352 const VkImageBlit* pRegions,
356 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
357 ANV_FROM_HANDLE(anv_image, src_image, srcImage);
358 ANV_FROM_HANDLE(anv_image, dest_image, destImage);
359 struct anv_meta_saved_state saved_state;
361 /* From the Vulkan 1.0 spec:
363 * vkCmdBlitImage must not be used for multisampled source or
364 * destination images. Use vkCmdResolveImage for this purpose.
366 assert(src_image->samples == 1);
367 assert(dest_image->samples == 1);
369 anv_finishme("respect VkFilter");
371 meta_prepare_blit(cmd_buffer, &saved_state);
373 for (unsigned r = 0; r < regionCount; r++) {
374 struct anv_image_view src_iview;
375 anv_image_view_init(&src_iview, cmd_buffer->device,
376 &(VkImageViewCreateInfo) {
377 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
379 .viewType = anv_meta_get_view_type(src_image),
380 .format = src_image->vk_format,
381 .subresourceRange = {
382 .aspectMask = pRegions[r].srcSubresource.aspectMask,
383 .baseMipLevel = pRegions[r].srcSubresource.mipLevel,
385 .baseArrayLayer = pRegions[r].srcSubresource.baseArrayLayer,
389 cmd_buffer, 0, VK_IMAGE_USAGE_SAMPLED_BIT);
391 const VkOffset3D dest_offset = {
392 .x = pRegions[r].dstOffsets[0].x,
393 .y = pRegions[r].dstOffsets[0].y,
397 if (pRegions[r].dstOffsets[1].x < pRegions[r].dstOffsets[0].x ||
398 pRegions[r].dstOffsets[1].y < pRegions[r].dstOffsets[0].y ||
399 pRegions[r].srcOffsets[1].x < pRegions[r].srcOffsets[0].x ||
400 pRegions[r].srcOffsets[1].y < pRegions[r].srcOffsets[0].y)
401 anv_finishme("FINISHME: Allow flipping in blits");
403 const VkExtent3D dest_extent = {
404 .width = pRegions[r].dstOffsets[1].x - pRegions[r].dstOffsets[0].x,
405 .height = pRegions[r].dstOffsets[1].y - pRegions[r].dstOffsets[0].y,
408 const VkExtent3D src_extent = {
409 .width = pRegions[r].srcOffsets[1].x - pRegions[r].srcOffsets[0].x,
410 .height = pRegions[r].srcOffsets[1].y - pRegions[r].srcOffsets[0].y,
413 const uint32_t dest_array_slice =
414 anv_meta_get_iview_layer(dest_image, &pRegions[r].dstSubresource,
415 &pRegions[r].dstOffsets[0]);
417 if (pRegions[r].srcSubresource.layerCount > 1)
418 anv_finishme("FINISHME: copy multiple array layers");
420 if (pRegions[r].srcOffsets[0].z + 1 != pRegions[r].srcOffsets[1].z ||
421 pRegions[r].dstOffsets[0].z + 1 != pRegions[r].dstOffsets[1].z)
422 anv_finishme("FINISHME: copy multiple depth layers");
424 struct anv_image_view dest_iview;
425 anv_image_view_init(&dest_iview, cmd_buffer->device,
426 &(VkImageViewCreateInfo) {
427 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
429 .viewType = anv_meta_get_view_type(dest_image),
430 .format = dest_image->vk_format,
431 .subresourceRange = {
432 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
433 .baseMipLevel = pRegions[r].dstSubresource.mipLevel,
435 .baseArrayLayer = dest_array_slice,
439 cmd_buffer, 0, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
441 anv_meta_emit_blit(cmd_buffer,
442 src_image, &src_iview,
443 pRegions[r].srcOffsets[0], src_extent,
444 dest_image, &dest_iview,
445 dest_offset, dest_extent,
449 meta_finish_blit(cmd_buffer, &saved_state);
453 anv_device_finish_meta_blit_state(struct anv_device *device)
455 anv_DestroyRenderPass(anv_device_to_handle(device),
456 device->meta_state.blit.render_pass,
457 &device->meta_state.alloc);
458 anv_DestroyPipeline(anv_device_to_handle(device),
459 device->meta_state.blit.pipeline_1d_src,
460 &device->meta_state.alloc);
461 anv_DestroyPipeline(anv_device_to_handle(device),
462 device->meta_state.blit.pipeline_2d_src,
463 &device->meta_state.alloc);
464 anv_DestroyPipeline(anv_device_to_handle(device),
465 device->meta_state.blit.pipeline_3d_src,
466 &device->meta_state.alloc);
467 anv_DestroyPipelineLayout(anv_device_to_handle(device),
468 device->meta_state.blit.pipeline_layout,
469 &device->meta_state.alloc);
470 anv_DestroyDescriptorSetLayout(anv_device_to_handle(device),
471 device->meta_state.blit.ds_layout,
472 &device->meta_state.alloc);
476 anv_device_init_meta_blit_state(struct anv_device *device)
480 result = anv_CreateRenderPass(anv_device_to_handle(device),
481 &(VkRenderPassCreateInfo) {
482 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
483 .attachmentCount = 1,
484 .pAttachments = &(VkAttachmentDescription) {
485 .format = VK_FORMAT_UNDEFINED, /* Our shaders don't care */
486 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
487 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
488 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
489 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
492 .pSubpasses = &(VkSubpassDescription) {
493 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
494 .inputAttachmentCount = 0,
495 .colorAttachmentCount = 1,
496 .pColorAttachments = &(VkAttachmentReference) {
498 .layout = VK_IMAGE_LAYOUT_GENERAL,
500 .pResolveAttachments = NULL,
501 .pDepthStencilAttachment = &(VkAttachmentReference) {
502 .attachment = VK_ATTACHMENT_UNUSED,
503 .layout = VK_IMAGE_LAYOUT_GENERAL,
505 .preserveAttachmentCount = 1,
506 .pPreserveAttachments = (uint32_t[]) { 0 },
508 .dependencyCount = 0,
509 }, &device->meta_state.alloc, &device->meta_state.blit.render_pass);
510 if (result != VK_SUCCESS)
513 /* We don't use a vertex shader for blitting, but instead build and pass
514 * the VUEs directly to the rasterization backend. However, we do need
515 * to provide GLSL source for the vertex shader so that the compiler
516 * does not dead-code our inputs.
518 struct anv_shader_module vs = {
519 .nir = build_nir_vertex_shader(),
522 struct anv_shader_module fs_1d = {
523 .nir = build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_1D),
526 struct anv_shader_module fs_2d = {
527 .nir = build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_2D),
530 struct anv_shader_module fs_3d = {
531 .nir = build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_3D),
534 VkPipelineVertexInputStateCreateInfo vi_create_info = {
535 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
536 .vertexBindingDescriptionCount = 2,
537 .pVertexBindingDescriptions = (VkVertexInputBindingDescription[]) {
541 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
545 .stride = 5 * sizeof(float),
546 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
549 .vertexAttributeDescriptionCount = 3,
550 .pVertexAttributeDescriptions = (VkVertexInputAttributeDescription[]) {
555 .format = VK_FORMAT_R32G32B32A32_UINT,
562 .format = VK_FORMAT_R32G32_SFLOAT,
566 /* Texture Coordinate */
569 .format = VK_FORMAT_R32G32B32_SFLOAT,
575 VkDescriptorSetLayoutCreateInfo ds_layout_info = {
576 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
578 .pBindings = (VkDescriptorSetLayoutBinding[]) {
581 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
582 .descriptorCount = 1,
583 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
584 .pImmutableSamplers = NULL
588 result = anv_CreateDescriptorSetLayout(anv_device_to_handle(device),
590 &device->meta_state.alloc,
591 &device->meta_state.blit.ds_layout);
592 if (result != VK_SUCCESS)
593 goto fail_render_pass;
595 result = anv_CreatePipelineLayout(anv_device_to_handle(device),
596 &(VkPipelineLayoutCreateInfo) {
597 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
599 .pSetLayouts = &device->meta_state.blit.ds_layout,
601 &device->meta_state.alloc, &device->meta_state.blit.pipeline_layout);
602 if (result != VK_SUCCESS)
603 goto fail_descriptor_set_layout;
605 VkPipelineShaderStageCreateInfo pipeline_shader_stages[] = {
607 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
608 .stage = VK_SHADER_STAGE_VERTEX_BIT,
609 .module = anv_shader_module_to_handle(&vs),
611 .pSpecializationInfo = NULL
613 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
614 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
615 .module = VK_NULL_HANDLE, /* TEMPLATE VALUE! FILL ME IN! */
617 .pSpecializationInfo = NULL
621 const VkGraphicsPipelineCreateInfo vk_pipeline_info = {
622 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
623 .stageCount = ARRAY_SIZE(pipeline_shader_stages),
624 .pStages = pipeline_shader_stages,
625 .pVertexInputState = &vi_create_info,
626 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
627 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
628 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
629 .primitiveRestartEnable = false,
631 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
632 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
636 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
637 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
638 .rasterizerDiscardEnable = false,
639 .polygonMode = VK_POLYGON_MODE_FILL,
640 .cullMode = VK_CULL_MODE_NONE,
641 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE
643 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
644 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
645 .rasterizationSamples = 1,
646 .sampleShadingEnable = false,
647 .pSampleMask = (VkSampleMask[]) { UINT32_MAX },
649 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
650 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
651 .attachmentCount = 1,
652 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
654 VK_COLOR_COMPONENT_A_BIT |
655 VK_COLOR_COMPONENT_R_BIT |
656 VK_COLOR_COMPONENT_G_BIT |
657 VK_COLOR_COMPONENT_B_BIT },
660 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
661 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
662 .dynamicStateCount = 9,
663 .pDynamicStates = (VkDynamicState[]) {
664 VK_DYNAMIC_STATE_VIEWPORT,
665 VK_DYNAMIC_STATE_SCISSOR,
666 VK_DYNAMIC_STATE_LINE_WIDTH,
667 VK_DYNAMIC_STATE_DEPTH_BIAS,
668 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
669 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
670 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
671 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
672 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
676 .layout = device->meta_state.blit.pipeline_layout,
677 .renderPass = device->meta_state.blit.render_pass,
681 const struct anv_graphics_pipeline_create_info anv_pipeline_info = {
682 .color_attachment_count = -1,
683 .use_repclear = false,
684 .disable_viewport = true,
685 .disable_scissor = true,
690 pipeline_shader_stages[1].module = anv_shader_module_to_handle(&fs_1d);
691 result = anv_graphics_pipeline_create(anv_device_to_handle(device),
693 &vk_pipeline_info, &anv_pipeline_info,
694 &device->meta_state.alloc, &device->meta_state.blit.pipeline_1d_src);
695 if (result != VK_SUCCESS)
696 goto fail_pipeline_layout;
698 pipeline_shader_stages[1].module = anv_shader_module_to_handle(&fs_2d);
699 result = anv_graphics_pipeline_create(anv_device_to_handle(device),
701 &vk_pipeline_info, &anv_pipeline_info,
702 &device->meta_state.alloc, &device->meta_state.blit.pipeline_2d_src);
703 if (result != VK_SUCCESS)
704 goto fail_pipeline_1d;
706 pipeline_shader_stages[1].module = anv_shader_module_to_handle(&fs_3d);
707 result = anv_graphics_pipeline_create(anv_device_to_handle(device),
709 &vk_pipeline_info, &anv_pipeline_info,
710 &device->meta_state.alloc, &device->meta_state.blit.pipeline_3d_src);
711 if (result != VK_SUCCESS)
712 goto fail_pipeline_2d;
715 ralloc_free(fs_1d.nir);
716 ralloc_free(fs_2d.nir);
717 ralloc_free(fs_3d.nir);
722 anv_DestroyPipeline(anv_device_to_handle(device),
723 device->meta_state.blit.pipeline_2d_src,
724 &device->meta_state.alloc);
727 anv_DestroyPipeline(anv_device_to_handle(device),
728 device->meta_state.blit.pipeline_1d_src,
729 &device->meta_state.alloc);
731 fail_pipeline_layout:
732 anv_DestroyPipelineLayout(anv_device_to_handle(device),
733 device->meta_state.blit.pipeline_layout,
734 &device->meta_state.alloc);
735 fail_descriptor_set_layout:
736 anv_DestroyDescriptorSetLayout(anv_device_to_handle(device),
737 device->meta_state.blit.ds_layout,
738 &device->meta_state.alloc);
740 anv_DestroyRenderPass(anv_device_to_handle(device),
741 device->meta_state.blit.render_pass,
742 &device->meta_state.alloc);
745 ralloc_free(fs_1d.nir);
746 ralloc_free(fs_2d.nir);
747 ralloc_free(fs_3d.nir);