2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
28 #include "anv_private.h"
29 #include "glsl/nir/nir_builder.h"
32 * Vertex attributes used by all pipelines.
35 struct anv_vue_header vue_header;
36 float position[2]; /**< 3DPRIM_RECTLIST */
37 float tex_position[2];
41 meta_resolve_save(struct anv_meta_saved_state *saved_state,
42 struct anv_cmd_buffer *cmd_buffer)
44 anv_meta_save(saved_state, cmd_buffer,
45 (1 << VK_DYNAMIC_STATE_VIEWPORT) |
46 (1 << VK_DYNAMIC_STATE_SCISSOR));
48 cmd_buffer->state.dynamic.viewport.count = 0;
49 cmd_buffer->state.dynamic.scissor.count = 0;
53 meta_resolve_restore(struct anv_meta_saved_state *saved_state,
54 struct anv_cmd_buffer *cmd_buffer)
56 anv_meta_restore(saved_state, cmd_buffer);
62 const struct glsl_type *vec4 = glsl_vec4_type();
65 nir_variable *a_position;
66 nir_variable *v_position;
67 nir_variable *a_tex_position;
68 nir_variable *v_tex_position;
70 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_VERTEX, NULL);
71 b.shader->info.name = ralloc_strdup(b.shader, "meta_resolve_vs");
73 a_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
75 a_position->data.location = VERT_ATTRIB_GENERIC0;
77 v_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
79 v_position->data.location = VARYING_SLOT_POS;
81 a_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
83 a_tex_position->data.location = VERT_ATTRIB_GENERIC1;
85 v_tex_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
87 v_tex_position->data.location = VARYING_SLOT_VAR0;
89 nir_copy_var(&b, v_position, a_position);
90 nir_copy_var(&b, v_tex_position, a_tex_position);
96 build_nir_fs(uint32_t num_samples)
98 const struct glsl_type *vec4 = glsl_vec4_type();
100 const struct glsl_type *sampler2DMS =
101 glsl_sampler_type(GLSL_SAMPLER_DIM_MS,
107 nir_variable *u_tex; /* uniform sampler */
108 nir_variable *v_position; /* vec4, varying fragment position */
109 nir_variable *v_tex_position; /* vec4, varying texture coordinate */
110 nir_variable *f_color; /* vec4, fragment output color */
111 nir_ssa_def *accum; /* vec4, accumulation of sample values */
113 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
114 b.shader->info.name = ralloc_asprintf(b.shader,
115 "meta_resolve_fs_samples%02d",
118 u_tex = nir_variable_create(b.shader, nir_var_uniform, sampler2DMS,
120 u_tex->data.descriptor_set = 0;
121 u_tex->data.binding = 0;
123 v_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
125 v_position->data.location = VARYING_SLOT_POS;
127 v_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
129 v_tex_position->data.location = VARYING_SLOT_VAR0;
131 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
133 f_color->data.location = FRAG_RESULT_DATA0;
135 accum = nir_imm_vec4(&b, 0, 0, 0, 0);
137 nir_ssa_def *tex_position_ivec =
138 nir_f2i(&b, nir_load_var(&b, v_tex_position));
140 for (uint32_t i = 0; i < num_samples; ++i) {
143 tex = nir_tex_instr_create(b.shader, /*num_srcs*/ 2);
144 tex->sampler = nir_deref_var_create(tex, u_tex);
145 tex->sampler_dim = GLSL_SAMPLER_DIM_MS;
146 tex->op = nir_texop_txf_ms;
147 tex->src[0].src = nir_src_for_ssa(tex_position_ivec);
148 tex->src[0].src_type = nir_tex_src_coord;
149 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, i));
150 tex->src[1].src_type = nir_tex_src_ms_index;
151 tex->dest_type = nir_type_float;
152 tex->is_array = false;
153 tex->coord_components = 3;
154 nir_ssa_dest_init(&tex->instr, &tex->dest, /*num_components*/ 4, "tex");
155 nir_builder_instr_insert(&b, &tex->instr);
157 accum = nir_fadd(&b, accum, &tex->dest.ssa);
160 accum = nir_fdiv(&b, accum, nir_imm_float(&b, num_samples));
161 nir_store_var(&b, f_color, accum, /*writemask*/ 4);
167 create_pass(struct anv_device *device)
170 VkDevice device_h = anv_device_to_handle(device);
171 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
173 result = anv_CreateRenderPass(device_h,
174 &(VkRenderPassCreateInfo) {
175 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
176 .attachmentCount = 1,
177 .pAttachments = &(VkAttachmentDescription) {
178 .format = VK_FORMAT_UNDEFINED, /* Our shaders don't care */
180 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
181 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
182 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
183 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
186 .pSubpasses = &(VkSubpassDescription) {
187 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
188 .inputAttachmentCount = 0,
189 .colorAttachmentCount = 1,
190 .pColorAttachments = &(VkAttachmentReference) {
192 .layout = VK_IMAGE_LAYOUT_GENERAL,
194 .pResolveAttachments = NULL,
195 .pDepthStencilAttachment = &(VkAttachmentReference) {
196 .attachment = VK_ATTACHMENT_UNUSED,
198 .preserveAttachmentCount = 0,
199 .pPreserveAttachments = NULL,
201 .dependencyCount = 0,
204 &device->meta_state.resolve.pass);
210 create_pipeline(struct anv_device *device,
211 uint32_t num_samples,
212 VkShaderModule vs_module_h)
215 VkDevice device_h = anv_device_to_handle(device);
216 uint32_t samples_log2 = ffs(num_samples) - 1;
218 struct anv_shader_module fs_module = {
219 .nir = build_nir_fs(num_samples),
222 if (!fs_module.nir) {
223 /* XXX: Need more accurate error */
224 result = VK_ERROR_OUT_OF_HOST_MEMORY;
228 result = anv_graphics_pipeline_create(device_h,
230 &(VkGraphicsPipelineCreateInfo) {
231 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
233 .pStages = (VkPipelineShaderStageCreateInfo[]) {
235 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
236 .stage = VK_SHADER_STAGE_VERTEX_BIT,
237 .module = vs_module_h,
241 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
242 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
243 .module = anv_shader_module_to_handle(&fs_module),
247 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
248 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
249 .vertexBindingDescriptionCount = 1,
250 .pVertexBindingDescriptions = (VkVertexInputBindingDescription[]) {
253 .stride = sizeof(struct vertex_attrs),
254 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
257 .vertexAttributeDescriptionCount = 3,
258 .pVertexAttributeDescriptions = (VkVertexInputAttributeDescription[]) {
263 .format = VK_FORMAT_R32G32B32A32_UINT,
264 .offset = offsetof(struct vertex_attrs, vue_header),
270 .format = VK_FORMAT_R32G32_SFLOAT,
271 .offset = offsetof(struct vertex_attrs, position),
274 /* Texture Coordinate */
277 .format = VK_FORMAT_R32G32_SFLOAT,
278 .offset = offsetof(struct vertex_attrs, tex_position),
282 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
283 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
284 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
285 .primitiveRestartEnable = false,
287 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
288 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
292 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
293 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
294 .depthClampEnable = false,
295 .rasterizerDiscardEnable = false,
296 .polygonMode = VK_POLYGON_MODE_FILL,
297 .cullMode = VK_CULL_MODE_NONE,
298 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
300 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
301 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
302 .rasterizationSamples = 1,
303 .sampleShadingEnable = false,
304 .pSampleMask = (VkSampleMask[]) { 0x1 },
305 .alphaToCoverageEnable = false,
306 .alphaToOneEnable = false,
308 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
309 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
310 .logicOpEnable = false,
311 .attachmentCount = 1,
312 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
314 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
315 VK_COLOR_COMPONENT_G_BIT |
316 VK_COLOR_COMPONENT_B_BIT |
317 VK_COLOR_COMPONENT_A_BIT,
321 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
322 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
323 .dynamicStateCount = 2,
324 .pDynamicStates = (VkDynamicState[]) {
325 VK_DYNAMIC_STATE_VIEWPORT,
326 VK_DYNAMIC_STATE_SCISSOR,
329 .layout = device->meta_state.resolve.pipeline_layout,
330 .renderPass = device->meta_state.resolve.pass,
333 &(struct anv_graphics_pipeline_create_info) {
334 .color_attachment_count = -1,
335 .use_repclear = false,
336 .disable_viewport = true,
337 .disable_scissor = true,
341 &device->meta_state.alloc,
342 &device->meta_state.resolve.pipelines[samples_log2]);
343 if (result != VK_SUCCESS)
349 ralloc_free(fs_module.nir);
354 anv_device_finish_meta_resolve_state(struct anv_device *device)
356 struct anv_meta_state *state = &device->meta_state;
357 VkDevice device_h = anv_device_to_handle(device);
358 VkRenderPass pass_h = device->meta_state.resolve.pass;
359 VkPipelineLayout pipeline_layout_h = device->meta_state.resolve.pipeline_layout;
360 VkDescriptorSetLayout ds_layout_h = device->meta_state.resolve.ds_layout;
361 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
364 ANV_CALL(DestroyRenderPass)(device_h, pass_h,
365 &device->meta_state.alloc);
367 if (pipeline_layout_h)
368 ANV_CALL(DestroyPipelineLayout)(device_h, pipeline_layout_h, alloc);
371 ANV_CALL(DestroyDescriptorSetLayout)(device_h, ds_layout_h, alloc);
373 for (uint32_t i = 0; i < ARRAY_SIZE(state->resolve.pipelines); ++i) {
374 VkPipeline pipeline_h = state->resolve.pipelines[i];
377 ANV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
383 anv_device_init_meta_resolve_state(struct anv_device *device)
385 VkResult res = VK_SUCCESS;
386 VkDevice device_h = anv_device_to_handle(device);
387 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
389 const isl_sample_count_mask_t sample_count_mask =
390 isl_device_get_sample_counts(&device->isl_dev);
392 zero(device->meta_state.resolve);
394 struct anv_shader_module vs_module = { .nir = build_nir_vs() };
395 if (!vs_module.nir) {
396 /* XXX: Need more accurate error */
397 res = VK_ERROR_OUT_OF_HOST_MEMORY;
401 VkShaderModule vs_module_h = anv_shader_module_to_handle(&vs_module);
403 res = anv_CreateDescriptorSetLayout(device_h,
404 &(VkDescriptorSetLayoutCreateInfo) {
405 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
407 .pBindings = (VkDescriptorSetLayoutBinding[]) {
410 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
411 .descriptorCount = 1,
412 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
417 &device->meta_state.resolve.ds_layout);
418 if (res != VK_SUCCESS)
421 res = anv_CreatePipelineLayout(device_h,
422 &(VkPipelineLayoutCreateInfo) {
423 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
425 .pSetLayouts = (VkDescriptorSetLayout[]) {
426 device->meta_state.resolve.ds_layout,
430 &device->meta_state.resolve.pipeline_layout);
431 if (res != VK_SUCCESS)
434 res = create_pass(device);
435 if (res != VK_SUCCESS)
439 i < ARRAY_SIZE(device->meta_state.resolve.pipelines); ++i) {
440 uint32_t sample_count = 1 << i;
442 if (!(sample_count_mask & sample_count))
445 res = create_pipeline(device, sample_count, vs_module_h);
446 if (res != VK_SUCCESS)
453 anv_device_finish_meta_resolve_state(device);
456 ralloc_free(vs_module.nir);
462 emit_resolve(struct anv_cmd_buffer *cmd_buffer,
463 struct anv_image_view *src_iview,
464 const VkOffset2D *src_offset,
465 struct anv_image_view *dest_iview,
466 const VkOffset2D *dest_offset,
467 const VkExtent2D *resolve_extent)
469 struct anv_device *device = cmd_buffer->device;
470 VkDevice device_h = anv_device_to_handle(device);
471 VkCommandBuffer cmd_buffer_h = anv_cmd_buffer_to_handle(cmd_buffer);
472 const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
473 const struct anv_image *src_image = src_iview->image;
474 VkDescriptorPool dummy_desc_pool_h = (VkDescriptorPool) 1;
475 uint32_t samples_log2 = ffs(src_image->samples) - 1;
477 const struct vertex_attrs vertex_data[3] = {
481 dest_offset->x + resolve_extent->width,
482 dest_offset->y + resolve_extent->height,
485 src_offset->x + resolve_extent->width,
486 src_offset->y + resolve_extent->height,
493 dest_offset->y + resolve_extent->height,
497 src_offset->y + resolve_extent->height,
513 struct anv_state vertex_mem =
514 anv_cmd_buffer_emit_dynamic(cmd_buffer, vertex_data,
515 sizeof(vertex_data), 16);
517 struct anv_buffer vertex_buffer = {
519 .size = sizeof(vertex_data),
520 .bo = &cmd_buffer->dynamic_state_stream.block_pool->bo,
521 .offset = vertex_mem.offset,
524 VkBuffer vertex_buffer_h = anv_buffer_to_handle(&vertex_buffer);
526 anv_CmdBindVertexBuffers(cmd_buffer_h,
529 (VkBuffer[]) { vertex_buffer_h },
530 (VkDeviceSize[]) { 0 });
533 ANV_CALL(CreateSampler)(device_h,
534 &(VkSamplerCreateInfo) {
535 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
536 .magFilter = VK_FILTER_NEAREST,
537 .minFilter = VK_FILTER_NEAREST,
538 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
539 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
540 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
541 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
543 .anisotropyEnable = false,
544 .compareEnable = false,
547 .unnormalizedCoordinates = false,
549 &cmd_buffer->pool->alloc,
552 VkDescriptorSet desc_set_h;
553 anv_AllocateDescriptorSets(device_h,
554 &(VkDescriptorSetAllocateInfo) {
555 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
556 .descriptorPool = dummy_desc_pool_h,
557 .descriptorSetCount = 1,
558 .pSetLayouts = (VkDescriptorSetLayout[]) {
559 device->meta_state.blit.ds_layout,
564 ANV_FROM_HANDLE(anv_descriptor_set, desc_set, desc_set_h);
566 anv_UpdateDescriptorSets(device_h,
568 (VkWriteDescriptorSet[]) {
570 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
571 .dstSet = desc_set_h,
573 .dstArrayElement = 0,
574 .descriptorCount = 1,
575 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
576 .pImageInfo = (VkDescriptorImageInfo[]) {
578 .sampler = sampler_h,
579 .imageView = anv_image_view_to_handle(src_iview),
580 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
588 ANV_CALL(CmdSetViewport)(cmd_buffer_h,
596 .height = fb->height,
602 ANV_CALL(CmdSetScissor)(cmd_buffer_h,
608 .extent = (VkExtent2D) { fb->width, fb->height },
612 VkPipeline pipeline_h = device->meta_state.resolve.pipelines[samples_log2];
613 ANV_FROM_HANDLE(anv_pipeline, pipeline, pipeline_h);
615 if (cmd_buffer->state.pipeline != pipeline) {
616 anv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
620 anv_CmdBindDescriptorSets(cmd_buffer_h,
621 VK_PIPELINE_BIND_POINT_GRAPHICS,
622 device->meta_state.resolve.pipeline_layout,
625 (VkDescriptorSet[]) {
631 ANV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
633 /* All objects below are consumed by the draw call. We may safely destroy
636 anv_descriptor_set_destroy(device, desc_set);
637 anv_DestroySampler(device_h, sampler_h,
638 &cmd_buffer->pool->alloc);
641 void anv_CmdResolveImage(
642 VkCommandBuffer cmd_buffer_h,
644 VkImageLayout src_image_layout,
645 VkImage dest_image_h,
646 VkImageLayout dest_image_layout,
647 uint32_t region_count,
648 const VkImageResolve* regions)
650 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmd_buffer_h);
651 ANV_FROM_HANDLE(anv_image, src_image, src_image_h);
652 ANV_FROM_HANDLE(anv_image, dest_image, dest_image_h);
653 struct anv_device *device = cmd_buffer->device;
654 struct anv_meta_saved_state state;
655 VkDevice device_h = anv_device_to_handle(device);
657 meta_resolve_save(&state, cmd_buffer);
659 assert(src_image->samples > 1);
660 assert(dest_image->samples == 1);
662 if (src_image->samples >= 16) {
663 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
664 * glBlitFramebuffer workaround for samples >= 16.
666 anv_finishme("vkCmdResolveImage: need interpolation workaround when "
670 if (src_image->array_size > 1)
671 anv_finishme("vkCmdResolveImage: multisample array images");
673 for (uint32_t r = 0; r < region_count; ++r) {
674 const VkImageResolve *region = ®ions[r];
676 /* From the Vulkan 1.0 spec:
678 * - The aspectMask member of srcSubresource and dstSubresource must
679 * only contain VK_IMAGE_ASPECT_COLOR_BIT
681 * - The layerCount member of srcSubresource and dstSubresource must
684 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
685 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
686 assert(region->srcSubresource.layerCount ==
687 region->dstSubresource.layerCount);
689 const uint32_t src_base_layer =
690 anv_meta_get_iview_layer(src_image, ®ion->srcSubresource,
693 const uint32_t dest_base_layer =
694 anv_meta_get_iview_layer(dest_image, ®ion->dstSubresource,
697 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
700 struct anv_image_view src_iview;
701 anv_image_view_init(&src_iview, cmd_buffer->device,
702 &(VkImageViewCreateInfo) {
703 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
704 .image = src_image_h,
705 .viewType = anv_meta_get_view_type(src_image),
706 .format = src_image->format->vk_format,
707 .subresourceRange = {
708 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
709 .baseMipLevel = region->srcSubresource.mipLevel,
711 .baseArrayLayer = src_base_layer + layer,
717 struct anv_image_view dest_iview;
718 anv_image_view_init(&dest_iview, cmd_buffer->device,
719 &(VkImageViewCreateInfo) {
720 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
721 .image = dest_image_h,
722 .viewType = anv_meta_get_view_type(dest_image),
723 .format = dest_image->format->vk_format,
724 .subresourceRange = {
725 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
726 .baseMipLevel = region->dstSubresource.mipLevel,
728 .baseArrayLayer = dest_base_layer + layer,
735 anv_CreateFramebuffer(device_h,
736 &(VkFramebufferCreateInfo) {
737 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
738 .attachmentCount = 1,
739 .pAttachments = (VkImageView[]) {
740 anv_image_view_to_handle(&dest_iview),
742 .width = anv_minify(dest_image->extent.width,
743 region->dstSubresource.mipLevel),
744 .height = anv_minify(dest_image->extent.height,
745 region->dstSubresource.mipLevel),
748 &cmd_buffer->pool->alloc,
751 ANV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
752 &(VkRenderPassBeginInfo) {
753 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
754 .renderPass = device->meta_state.resolve.pass,
762 region->extent.width,
763 region->extent.height,
766 .clearValueCount = 0,
767 .pClearValues = NULL,
769 VK_SUBPASS_CONTENTS_INLINE);
771 emit_resolve(cmd_buffer,
774 .x = region->srcOffset.x,
775 .y = region->srcOffset.y,
779 .x = region->dstOffset.x,
780 .y = region->dstOffset.y,
783 .width = region->extent.width,
784 .height = region->extent.height,
787 ANV_CALL(CmdEndRenderPass)(cmd_buffer_h);
789 anv_DestroyFramebuffer(device_h, fb_h,
790 &cmd_buffer->pool->alloc);
794 meta_resolve_restore(&state, cmd_buffer);