2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
28 #include "anv_private.h"
29 #include "glsl/nir/nir_builder.h"
32 * Vertex attributes used by all pipelines.
35 struct anv_vue_header vue_header;
36 float position[2]; /**< 3DPRIM_RECTLIST */
37 float tex_position[2];
43 const struct glsl_type *vec4 = glsl_vec4_type();
46 nir_variable *a_position;
47 nir_variable *v_position;
48 nir_variable *a_tex_position;
49 nir_variable *v_tex_position;
51 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_VERTEX, NULL);
52 b.shader->info.name = ralloc_strdup(b.shader, "meta_resolve_vs");
54 a_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
56 a_position->data.location = VERT_ATTRIB_GENERIC0;
58 v_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
60 v_position->data.location = VARYING_SLOT_POS;
62 a_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
64 a_tex_position->data.location = VERT_ATTRIB_GENERIC1;
66 v_tex_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
68 v_tex_position->data.location = VARYING_SLOT_VAR0;
70 nir_copy_var(&b, v_position, a_position);
71 nir_copy_var(&b, v_tex_position, a_tex_position);
77 build_nir_fs(uint32_t num_samples)
79 const struct glsl_type *vec4 = glsl_vec4_type();
81 const struct glsl_type *sampler2DMS =
82 glsl_sampler_type(GLSL_SAMPLER_DIM_MS,
88 nir_variable *u_tex; /* uniform sampler */
89 nir_variable *v_position; /* vec4, varying fragment position */
90 nir_variable *v_tex_position; /* vec4, varying texture coordinate */
91 nir_variable *f_color; /* vec4, fragment output color */
92 nir_ssa_def *accum; /* vec4, accumulation of sample values */
94 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
95 b.shader->info.name = ralloc_asprintf(b.shader,
96 "meta_resolve_fs_samples%02d",
99 u_tex = nir_variable_create(b.shader, nir_var_uniform, sampler2DMS,
101 u_tex->data.descriptor_set = 0;
102 u_tex->data.binding = 0;
104 v_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
106 v_position->data.location = VARYING_SLOT_POS;
108 v_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
110 v_tex_position->data.location = VARYING_SLOT_VAR0;
112 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
114 f_color->data.location = FRAG_RESULT_DATA0;
116 accum = nir_imm_vec4(&b, 0, 0, 0, 0);
118 nir_ssa_def *tex_position_ivec =
119 nir_f2i(&b, nir_load_var(&b, v_tex_position));
121 for (uint32_t i = 0; i < num_samples; ++i) {
124 tex = nir_tex_instr_create(b.shader, /*num_srcs*/ 2);
125 tex->sampler = nir_deref_var_create(tex, u_tex);
126 tex->sampler_dim = GLSL_SAMPLER_DIM_MS;
127 tex->op = nir_texop_txf_ms;
128 tex->src[0].src = nir_src_for_ssa(tex_position_ivec);
129 tex->src[0].src_type = nir_tex_src_coord;
130 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, i));
131 tex->src[1].src_type = nir_tex_src_ms_index;
132 tex->dest_type = nir_type_float;
133 tex->is_array = false;
134 tex->coord_components = 3;
135 nir_ssa_dest_init(&tex->instr, &tex->dest, /*num_components*/ 4, "tex");
136 nir_builder_instr_insert(&b, &tex->instr);
138 accum = nir_fadd(&b, accum, &tex->dest.ssa);
141 accum = nir_fdiv(&b, accum, nir_imm_float(&b, num_samples));
142 nir_store_var(&b, f_color, accum, /*writemask*/ 4);
148 create_pass(struct anv_device *device, VkRenderPass *pass_h)
151 VkDevice device_h = anv_device_to_handle(device);
152 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
154 result = anv_CreateRenderPass(device_h,
155 &(VkRenderPassCreateInfo) {
156 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
157 .attachmentCount = 1,
158 .pAttachments = &(VkAttachmentDescription) {
159 .format = VK_FORMAT_UNDEFINED, /* Our shaders don't care */
161 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
162 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
163 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
164 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
167 .pSubpasses = &(VkSubpassDescription) {
168 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
169 .inputAttachmentCount = 0,
170 .colorAttachmentCount = 1,
171 .pColorAttachments = &(VkAttachmentReference) {
173 .layout = VK_IMAGE_LAYOUT_GENERAL,
175 .pResolveAttachments = NULL,
176 .pDepthStencilAttachment = &(VkAttachmentReference) {
177 .attachment = VK_ATTACHMENT_UNUSED,
179 .preserveAttachmentCount = 0,
180 .pPreserveAttachments = NULL,
182 .dependencyCount = 0,
191 create_pipeline(struct anv_device *device,
192 uint32_t num_samples,
193 VkShaderModule vs_module_h,
195 VkPipeline *pipeline_h)
198 VkDevice device_h = anv_device_to_handle(device);
200 struct anv_shader_module fs_module = {
201 .nir = build_nir_fs(num_samples),
204 if (!fs_module.nir) {
205 /* XXX: Need more accurate error */
206 result = VK_ERROR_OUT_OF_HOST_MEMORY;
210 result = anv_graphics_pipeline_create(device_h,
212 &(VkGraphicsPipelineCreateInfo) {
213 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
215 .pStages = (VkPipelineShaderStageCreateInfo[]) {
217 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
218 .stage = VK_SHADER_STAGE_VERTEX_BIT,
219 .module = vs_module_h,
223 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
224 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
225 .module = anv_shader_module_to_handle(&fs_module),
229 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
230 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
231 .vertexBindingDescriptionCount = 1,
232 .pVertexBindingDescriptions = (VkVertexInputBindingDescription[]) {
235 .stride = sizeof(struct vertex_attrs),
236 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
239 .vertexAttributeDescriptionCount = 3,
240 .pVertexAttributeDescriptions = (VkVertexInputAttributeDescription[]) {
245 .format = VK_FORMAT_R32G32B32A32_UINT,
246 .offset = offsetof(struct vertex_attrs, vue_header),
252 .format = VK_FORMAT_R32G32_SFLOAT,
253 .offset = offsetof(struct vertex_attrs, position),
256 /* Texture Coordinate */
259 .format = VK_FORMAT_R32G32_SFLOAT,
260 .offset = offsetof(struct vertex_attrs, tex_position),
264 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
265 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
266 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
267 .primitiveRestartEnable = false,
269 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
270 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
274 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
275 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
276 .depthClampEnable = false,
277 .rasterizerDiscardEnable = false,
278 .polygonMode = VK_POLYGON_MODE_FILL,
279 .cullMode = VK_CULL_MODE_NONE,
280 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
282 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
283 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
284 .rasterizationSamples = 1,
285 .sampleShadingEnable = false,
286 .pSampleMask = (VkSampleMask[]) { 0x1 },
287 .alphaToCoverageEnable = false,
288 .alphaToOneEnable = false,
290 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
291 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
292 .logicOpEnable = false,
293 .attachmentCount = 1,
294 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
296 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
297 VK_COLOR_COMPONENT_G_BIT |
298 VK_COLOR_COMPONENT_B_BIT |
299 VK_COLOR_COMPONENT_A_BIT,
303 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
304 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
305 .dynamicStateCount = 2,
306 .pDynamicStates = (VkDynamicState[]) {
307 VK_DYNAMIC_STATE_VIEWPORT,
308 VK_DYNAMIC_STATE_SCISSOR,
311 .layout = device->meta_state.resolve.pipeline_layout,
312 .renderPass = pass_h,
315 &(struct anv_graphics_pipeline_create_info) {
316 .color_attachment_count = -1,
317 .use_repclear = false,
318 .disable_viewport = true,
319 .disable_scissor = true,
323 &device->meta_state.alloc,
325 if (result != VK_SUCCESS)
331 *pipeline_h = VK_NULL_HANDLE;
334 ralloc_free(fs_module.nir);
339 anv_device_finish_meta_resolve_state(struct anv_device *device)
341 struct anv_meta_state *state = &device->meta_state;
342 VkDevice device_h = anv_device_to_handle(device);
343 VkRenderPass pass_h = device->meta_state.resolve.pass;
344 VkPipelineLayout pipeline_layout_h = device->meta_state.resolve.pipeline_layout;
345 VkDescriptorSetLayout ds_layout_h = device->meta_state.resolve.ds_layout;
346 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
349 ANV_CALL(DestroyRenderPass)(device_h, pass_h,
350 &device->meta_state.alloc);
352 if (pipeline_layout_h)
353 ANV_CALL(DestroyPipelineLayout)(device_h, pipeline_layout_h, alloc);
356 ANV_CALL(DestroyDescriptorSetLayout)(device_h, ds_layout_h, alloc);
358 for (uint32_t i = 0; i < ARRAY_SIZE(state->resolve.pipelines); ++i) {
359 VkPipeline pipeline_h = state->resolve.pipelines[i];
362 ANV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
368 anv_device_init_meta_resolve_state(struct anv_device *device)
370 VkResult res = VK_SUCCESS;
371 VkDevice device_h = anv_device_to_handle(device);
372 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
374 const isl_sample_count_mask_t sample_count_mask =
375 isl_device_get_sample_counts(&device->isl_dev);
377 zero(device->meta_state.resolve);
379 struct anv_shader_module vs_module = { .nir = build_nir_vs() };
380 if (!vs_module.nir) {
381 /* XXX: Need more accurate error */
382 res = VK_ERROR_OUT_OF_HOST_MEMORY;
386 VkShaderModule vs_module_h = anv_shader_module_to_handle(&vs_module);
388 res = anv_CreateDescriptorSetLayout(device_h,
389 &(VkDescriptorSetLayoutCreateInfo) {
390 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
392 .pBindings = (VkDescriptorSetLayoutBinding[]) {
395 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
396 .descriptorCount = 1,
397 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
402 &device->meta_state.resolve.ds_layout);
403 if (res != VK_SUCCESS)
406 res = anv_CreatePipelineLayout(device_h,
407 &(VkPipelineLayoutCreateInfo) {
408 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
410 .pSetLayouts = (VkDescriptorSetLayout[]) {
411 device->meta_state.resolve.ds_layout,
415 &device->meta_state.resolve.pipeline_layout);
416 if (res != VK_SUCCESS)
420 res = create_pass(device, &device->meta_state.resolve.pass);
421 if (res != VK_SUCCESS)
426 i < ARRAY_SIZE(device->meta_state.resolve.pipelines); ++i) {
427 uint32_t sample_count = 1 << i;
429 if (!(sample_count_mask & sample_count))
432 res = create_pipeline(device, sample_count, vs_module_h,
433 device->meta_state.resolve.pass,
434 &device->meta_state.resolve.pipelines[i]);
435 if (res != VK_SUCCESS)
442 anv_device_finish_meta_resolve_state(device);
445 ralloc_free(vs_module.nir);
451 emit_resolve(struct anv_cmd_buffer *cmd_buffer,
452 struct anv_image_view *src_iview,
453 const VkOffset2D *src_offset,
454 struct anv_image_view *dest_iview,
455 const VkOffset2D *dest_offset,
456 const VkExtent2D *resolve_extent)
458 struct anv_device *device = cmd_buffer->device;
459 VkDevice device_h = anv_device_to_handle(device);
460 VkCommandBuffer cmd_buffer_h = anv_cmd_buffer_to_handle(cmd_buffer);
461 const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
462 const struct anv_image *src_image = src_iview->image;
463 VkDescriptorPool dummy_desc_pool_h = (VkDescriptorPool) 1;
464 uint32_t samples_log2 = ffs(src_image->samples) - 1;
466 const struct vertex_attrs vertex_data[3] = {
470 dest_offset->x + resolve_extent->width,
471 dest_offset->y + resolve_extent->height,
474 src_offset->x + resolve_extent->width,
475 src_offset->y + resolve_extent->height,
482 dest_offset->y + resolve_extent->height,
486 src_offset->y + resolve_extent->height,
502 struct anv_state vertex_mem =
503 anv_cmd_buffer_emit_dynamic(cmd_buffer, vertex_data,
504 sizeof(vertex_data), 16);
506 struct anv_buffer vertex_buffer = {
508 .size = sizeof(vertex_data),
509 .bo = &cmd_buffer->dynamic_state_stream.block_pool->bo,
510 .offset = vertex_mem.offset,
513 VkBuffer vertex_buffer_h = anv_buffer_to_handle(&vertex_buffer);
515 anv_CmdBindVertexBuffers(cmd_buffer_h,
518 (VkBuffer[]) { vertex_buffer_h },
519 (VkDeviceSize[]) { 0 });
522 ANV_CALL(CreateSampler)(device_h,
523 &(VkSamplerCreateInfo) {
524 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
525 .magFilter = VK_FILTER_NEAREST,
526 .minFilter = VK_FILTER_NEAREST,
527 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
528 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
529 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
530 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
532 .anisotropyEnable = false,
533 .compareEnable = false,
536 .unnormalizedCoordinates = false,
538 &cmd_buffer->pool->alloc,
541 VkDescriptorSet desc_set_h;
542 anv_AllocateDescriptorSets(device_h,
543 &(VkDescriptorSetAllocateInfo) {
544 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
545 .descriptorPool = dummy_desc_pool_h,
546 .descriptorSetCount = 1,
547 .pSetLayouts = (VkDescriptorSetLayout[]) {
548 device->meta_state.blit.ds_layout,
553 ANV_FROM_HANDLE(anv_descriptor_set, desc_set, desc_set_h);
555 anv_UpdateDescriptorSets(device_h,
557 (VkWriteDescriptorSet[]) {
559 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
560 .dstSet = desc_set_h,
562 .dstArrayElement = 0,
563 .descriptorCount = 1,
564 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
565 .pImageInfo = (VkDescriptorImageInfo[]) {
567 .sampler = sampler_h,
568 .imageView = anv_image_view_to_handle(src_iview),
569 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
577 ANV_CALL(CmdSetViewport)(cmd_buffer_h,
585 .height = fb->height,
591 ANV_CALL(CmdSetScissor)(cmd_buffer_h,
597 .extent = (VkExtent2D) { fb->width, fb->height },
601 VkPipeline pipeline_h = device->meta_state.resolve.pipelines[samples_log2];
602 ANV_FROM_HANDLE(anv_pipeline, pipeline, pipeline_h);
604 if (cmd_buffer->state.pipeline != pipeline) {
605 anv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
609 anv_CmdBindDescriptorSets(cmd_buffer_h,
610 VK_PIPELINE_BIND_POINT_GRAPHICS,
611 device->meta_state.resolve.pipeline_layout,
614 (VkDescriptorSet[]) {
620 ANV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
622 /* All objects below are consumed by the draw call. We may safely destroy
625 anv_descriptor_set_destroy(device, desc_set);
626 anv_DestroySampler(device_h, sampler_h,
627 &cmd_buffer->pool->alloc);
630 void anv_CmdResolveImage(
631 VkCommandBuffer cmd_buffer_h,
633 VkImageLayout src_image_layout,
634 VkImage dest_image_h,
635 VkImageLayout dest_image_layout,
636 uint32_t region_count,
637 const VkImageResolve* regions)
639 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmd_buffer_h);
640 ANV_FROM_HANDLE(anv_image, src_image, src_image_h);
641 ANV_FROM_HANDLE(anv_image, dest_image, dest_image_h);
642 struct anv_device *device = cmd_buffer->device;
643 struct anv_meta_saved_state state;
644 VkDevice device_h = anv_device_to_handle(device);
646 anv_meta_save(&state, cmd_buffer, 0);
648 assert(src_image->samples > 1);
649 assert(dest_image->samples == 1);
651 if (src_image->samples >= 16) {
652 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
653 * glBlitFramebuffer workaround for samples >= 16.
655 anv_finishme("vkCmdResolveImage: need interpolation workaround when "
659 if (src_image->array_size > 1)
660 anv_finishme("vkCmdResolveImage: multisample array images");
662 for (uint32_t r = 0; r < region_count; ++r) {
663 const VkImageResolve *region = ®ions[r];
665 /* From the Vulkan 1.0 spec:
667 * - The aspectMask member of srcSubresource and dstSubresource must
668 * only contain VK_IMAGE_ASPECT_COLOR_BIT
670 * - The layerCount member of srcSubresource and dstSubresource must
673 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
674 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
675 assert(region->srcSubresource.layerCount ==
676 region->dstSubresource.layerCount);
678 const uint32_t src_base_layer =
679 anv_meta_get_iview_layer(src_image, ®ion->srcSubresource,
682 const uint32_t dest_base_layer =
683 anv_meta_get_iview_layer(dest_image, ®ion->dstSubresource,
686 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
689 struct anv_image_view src_iview;
690 anv_image_view_init(&src_iview, cmd_buffer->device,
691 &(VkImageViewCreateInfo) {
692 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
693 .image = src_image_h,
694 .viewType = anv_meta_get_view_type(src_image),
695 .format = src_image->format->vk_format,
696 .subresourceRange = {
697 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
698 .baseMipLevel = region->srcSubresource.mipLevel,
700 .baseArrayLayer = src_base_layer + layer,
706 struct anv_image_view dest_iview;
707 anv_image_view_init(&dest_iview, cmd_buffer->device,
708 &(VkImageViewCreateInfo) {
709 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
710 .image = dest_image_h,
711 .viewType = anv_meta_get_view_type(dest_image),
712 .format = dest_image->format->vk_format,
713 .subresourceRange = {
714 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
715 .baseMipLevel = region->dstSubresource.mipLevel,
717 .baseArrayLayer = dest_base_layer + layer,
724 anv_CreateFramebuffer(device_h,
725 &(VkFramebufferCreateInfo) {
726 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
727 .attachmentCount = 1,
728 .pAttachments = (VkImageView[]) {
729 anv_image_view_to_handle(&dest_iview),
731 .width = anv_minify(dest_image->extent.width,
732 region->dstSubresource.mipLevel),
733 .height = anv_minify(dest_image->extent.height,
734 region->dstSubresource.mipLevel),
737 &cmd_buffer->pool->alloc,
740 ANV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
741 &(VkRenderPassBeginInfo) {
742 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
743 .renderPass = device->meta_state.resolve.pass,
751 region->extent.width,
752 region->extent.height,
755 .clearValueCount = 0,
756 .pClearValues = NULL,
758 VK_SUBPASS_CONTENTS_INLINE);
760 emit_resolve(cmd_buffer,
763 .x = region->srcOffset.x,
764 .y = region->srcOffset.y,
768 .x = region->dstOffset.x,
769 .y = region->dstOffset.y,
772 .width = region->extent.width,
773 .height = region->extent.height,
776 ANV_CALL(CmdEndRenderPass)(cmd_buffer_h);
778 anv_DestroyFramebuffer(device_h, fb_h,
779 &cmd_buffer->pool->alloc);
783 anv_meta_restore(&state, cmd_buffer);