2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "vk_format.h"
30 #include "nir/nir_builder.h"
37 const struct glsl_type
*vec4
= glsl_vec4_type();
39 nir_variable
*f_color
; /* vec4, fragment output color */
41 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_FRAGMENT
, NULL
);
42 b
.shader
->info
.name
= ralloc_asprintf(b
.shader
,
45 f_color
= nir_variable_create(b
.shader
, nir_var_shader_out
, vec4
,
47 f_color
->data
.location
= FRAG_RESULT_DATA0
;
48 nir_store_var(&b
, f_color
, nir_imm_vec4(&b
, 0.0, 0.0, 0.0, 1.0), 0xf);
54 create_pass(struct radv_device
*device
, VkFormat vk_format
, VkRenderPass
*pass
)
57 VkDevice device_h
= radv_device_to_handle(device
);
58 const VkAllocationCallbacks
*alloc
= &device
->meta_state
.alloc
;
59 VkAttachmentDescription attachments
[2];
62 for (i
= 0; i
< 2; i
++) {
63 attachments
[i
].format
= vk_format
;
64 attachments
[i
].samples
= 1;
65 attachments
[i
].loadOp
= VK_ATTACHMENT_LOAD_OP_LOAD
;
66 attachments
[i
].storeOp
= VK_ATTACHMENT_STORE_OP_STORE
;
68 attachments
[0].initialLayout
= VK_IMAGE_LAYOUT_GENERAL
;
69 attachments
[0].finalLayout
= VK_IMAGE_LAYOUT_GENERAL
;
70 attachments
[1].initialLayout
= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
;
71 attachments
[1].finalLayout
= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
;
73 result
= radv_CreateRenderPass(device_h
,
74 &(VkRenderPassCreateInfo
) {
75 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
,
77 .pAttachments
= attachments
,
79 .pSubpasses
= &(VkSubpassDescription
) {
80 .pipelineBindPoint
= VK_PIPELINE_BIND_POINT_GRAPHICS
,
81 .inputAttachmentCount
= 0,
82 .colorAttachmentCount
= 2,
83 .pColorAttachments
= (VkAttachmentReference
[]) {
86 .layout
= VK_IMAGE_LAYOUT_GENERAL
,
90 .layout
= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
,
93 .pResolveAttachments
= NULL
,
94 .pDepthStencilAttachment
= &(VkAttachmentReference
) {
95 .attachment
= VK_ATTACHMENT_UNUSED
,
97 .preserveAttachmentCount
= 0,
98 .pPreserveAttachments
= NULL
,
100 .dependencyCount
= 0,
109 create_pipeline(struct radv_device
*device
,
110 VkShaderModule vs_module_h
,
111 VkPipeline
*pipeline
,
115 VkDevice device_h
= radv_device_to_handle(device
);
117 struct radv_shader_module fs_module
= {
118 .nir
= build_nir_fs(),
121 if (!fs_module
.nir
) {
122 /* XXX: Need more accurate error */
123 result
= VK_ERROR_OUT_OF_HOST_MEMORY
;
127 VkPipelineLayoutCreateInfo pl_create_info
= {
128 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
131 .pushConstantRangeCount
= 0,
132 .pPushConstantRanges
= NULL
,
135 if (!device
->meta_state
.resolve
.p_layout
) {
136 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
138 &device
->meta_state
.alloc
,
139 &device
->meta_state
.resolve
.p_layout
);
140 if (result
!= VK_SUCCESS
)
144 result
= radv_graphics_pipeline_create(device_h
,
145 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
146 &(VkGraphicsPipelineCreateInfo
) {
147 .sType
= VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
,
149 .pStages
= (VkPipelineShaderStageCreateInfo
[]) {
151 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
152 .stage
= VK_SHADER_STAGE_VERTEX_BIT
,
153 .module
= vs_module_h
,
157 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
158 .stage
= VK_SHADER_STAGE_FRAGMENT_BIT
,
159 .module
= radv_shader_module_to_handle(&fs_module
),
163 .pVertexInputState
= &(VkPipelineVertexInputStateCreateInfo
) {
164 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
,
165 .vertexBindingDescriptionCount
= 0,
166 .vertexAttributeDescriptionCount
= 0,
168 .pInputAssemblyState
= &(VkPipelineInputAssemblyStateCreateInfo
) {
169 .sType
= VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
,
170 .topology
= VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
,
171 .primitiveRestartEnable
= false,
173 .pViewportState
= &(VkPipelineViewportStateCreateInfo
) {
174 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
,
178 .pRasterizationState
= &(VkPipelineRasterizationStateCreateInfo
) {
179 .sType
= VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
,
180 .depthClampEnable
= false,
181 .rasterizerDiscardEnable
= false,
182 .polygonMode
= VK_POLYGON_MODE_FILL
,
183 .cullMode
= VK_CULL_MODE_NONE
,
184 .frontFace
= VK_FRONT_FACE_COUNTER_CLOCKWISE
,
186 .pMultisampleState
= &(VkPipelineMultisampleStateCreateInfo
) {
187 .sType
= VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
,
188 .rasterizationSamples
= 1,
189 .sampleShadingEnable
= false,
191 .alphaToCoverageEnable
= false,
192 .alphaToOneEnable
= false,
194 .pColorBlendState
= &(VkPipelineColorBlendStateCreateInfo
) {
195 .sType
= VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
,
196 .logicOpEnable
= false,
197 .attachmentCount
= 2,
198 .pAttachments
= (VkPipelineColorBlendAttachmentState
[]) {
200 .colorWriteMask
= VK_COLOR_COMPONENT_R_BIT
|
201 VK_COLOR_COMPONENT_G_BIT
|
202 VK_COLOR_COMPONENT_B_BIT
|
203 VK_COLOR_COMPONENT_A_BIT
,
211 .pDynamicState
= &(VkPipelineDynamicStateCreateInfo
) {
212 .sType
= VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
,
213 .dynamicStateCount
= 2,
214 .pDynamicStates
= (VkDynamicState
[]) {
215 VK_DYNAMIC_STATE_VIEWPORT
,
216 VK_DYNAMIC_STATE_SCISSOR
,
219 .layout
= device
->meta_state
.resolve
.p_layout
,
223 &(struct radv_graphics_pipeline_create_info
) {
224 .use_rectlist
= true,
225 .custom_blend_mode
= V_028808_CB_RESOLVE
,
227 &device
->meta_state
.alloc
, pipeline
);
228 if (result
!= VK_SUCCESS
)
234 ralloc_free(fs_module
.nir
);
239 radv_device_finish_meta_resolve_state(struct radv_device
*device
)
241 struct radv_meta_state
*state
= &device
->meta_state
;
243 for (uint32_t j
= 0; j
< NUM_META_FS_KEYS
; j
++) {
244 radv_DestroyRenderPass(radv_device_to_handle(device
),
245 state
->resolve
.pass
[j
], &state
->alloc
);
246 radv_DestroyPipeline(radv_device_to_handle(device
),
247 state
->resolve
.pipeline
[j
], &state
->alloc
);
249 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
250 state
->resolve
.p_layout
, &state
->alloc
);
254 static VkFormat pipeline_formats
[] = {
255 VK_FORMAT_R8G8B8A8_UNORM
,
256 VK_FORMAT_R8G8B8A8_UINT
,
257 VK_FORMAT_R8G8B8A8_SINT
,
258 VK_FORMAT_A2R10G10B10_UINT_PACK32
,
259 VK_FORMAT_A2R10G10B10_SINT_PACK32
,
260 VK_FORMAT_R16G16B16A16_UNORM
,
261 VK_FORMAT_R16G16B16A16_SNORM
,
262 VK_FORMAT_R16G16B16A16_UINT
,
263 VK_FORMAT_R16G16B16A16_SINT
,
264 VK_FORMAT_R32_SFLOAT
,
265 VK_FORMAT_R32G32_SFLOAT
,
266 VK_FORMAT_R32G32B32A32_SFLOAT
270 radv_device_init_meta_resolve_state(struct radv_device
*device
)
272 VkResult res
= VK_SUCCESS
;
273 struct radv_meta_state
*state
= &device
->meta_state
;
274 struct radv_shader_module vs_module
= { .nir
= radv_meta_build_nir_vs_generate_vertices() };
275 if (!vs_module
.nir
) {
276 /* XXX: Need more accurate error */
277 res
= VK_ERROR_OUT_OF_HOST_MEMORY
;
281 for (uint32_t i
= 0; i
< ARRAY_SIZE(pipeline_formats
); ++i
) {
282 VkFormat format
= pipeline_formats
[i
];
283 unsigned fs_key
= radv_format_meta_fs_key(format
);
284 res
= create_pass(device
, format
, &state
->resolve
.pass
[fs_key
]);
285 if (res
!= VK_SUCCESS
)
288 VkShaderModule vs_module_h
= radv_shader_module_to_handle(&vs_module
);
289 res
= create_pipeline(device
, vs_module_h
,
290 &state
->resolve
.pipeline
[fs_key
], state
->resolve
.pass
[fs_key
]);
291 if (res
!= VK_SUCCESS
)
298 radv_device_finish_meta_resolve_state(device
);
301 ralloc_free(vs_module
.nir
);
307 emit_resolve(struct radv_cmd_buffer
*cmd_buffer
,
309 const VkOffset2D
*dest_offset
,
310 const VkExtent2D
*resolve_extent
)
312 struct radv_device
*device
= cmd_buffer
->device
;
313 VkCommandBuffer cmd_buffer_h
= radv_cmd_buffer_to_handle(cmd_buffer
);
314 unsigned fs_key
= radv_format_meta_fs_key(vk_format
);
316 cmd_buffer
->state
.flush_bits
|= RADV_CMD_FLAG_FLUSH_AND_INV_CB
;
318 radv_CmdBindPipeline(cmd_buffer_h
, VK_PIPELINE_BIND_POINT_GRAPHICS
,
319 device
->meta_state
.resolve
.pipeline
[fs_key
]);
321 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer
), 0, 1, &(VkViewport
) {
324 .width
= resolve_extent
->width
,
325 .height
= resolve_extent
->height
,
330 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer
), 0, 1, &(VkRect2D
) {
331 .offset
= *dest_offset
,
332 .extent
= *resolve_extent
,
335 radv_CmdDraw(cmd_buffer_h
, 3, 1, 0, 0);
336 cmd_buffer
->state
.flush_bits
|= RADV_CMD_FLAG_FLUSH_AND_INV_CB
;
339 enum radv_resolve_method
{
345 static void radv_pick_resolve_method_images(struct radv_image
*src_image
,
346 struct radv_image
*dest_image
,
347 VkImageLayout dest_image_layout
,
348 struct radv_cmd_buffer
*cmd_buffer
,
349 enum radv_resolve_method
*method
)
352 uint32_t queue_mask
= radv_image_queue_family_mask(dest_image
,
353 cmd_buffer
->queue_family_index
,
354 cmd_buffer
->queue_family_index
);
356 if (src_image
->vk_format
== VK_FORMAT_R16G16_UNORM
||
357 src_image
->vk_format
== VK_FORMAT_R16G16_SNORM
)
358 *method
= RESOLVE_COMPUTE
;
359 else if (vk_format_is_int(src_image
->vk_format
))
360 *method
= RESOLVE_COMPUTE
;
361 else if (src_image
->info
.array_size
> 1)
362 *method
= RESOLVE_COMPUTE
;
364 if (radv_layout_dcc_compressed(dest_image
, dest_image_layout
, queue_mask
)) {
365 *method
= RESOLVE_FRAGMENT
;
366 } else if (dest_image
->surface
.micro_tile_mode
!= src_image
->surface
.micro_tile_mode
) {
367 *method
= RESOLVE_COMPUTE
;
371 void radv_CmdResolveImage(
372 VkCommandBuffer cmd_buffer_h
,
374 VkImageLayout src_image_layout
,
375 VkImage dest_image_h
,
376 VkImageLayout dest_image_layout
,
377 uint32_t region_count
,
378 const VkImageResolve
* regions
)
380 RADV_FROM_HANDLE(radv_cmd_buffer
, cmd_buffer
, cmd_buffer_h
);
381 RADV_FROM_HANDLE(radv_image
, src_image
, src_image_h
);
382 RADV_FROM_HANDLE(radv_image
, dest_image
, dest_image_h
);
383 struct radv_device
*device
= cmd_buffer
->device
;
384 struct radv_meta_saved_state saved_state
;
385 VkDevice device_h
= radv_device_to_handle(device
);
386 enum radv_resolve_method resolve_method
= RESOLVE_HW
;
387 /* we can use the hw resolve only for single full resolves */
388 if (region_count
== 1) {
389 if (regions
[0].srcOffset
.x
||
390 regions
[0].srcOffset
.y
||
391 regions
[0].srcOffset
.z
)
392 resolve_method
= RESOLVE_COMPUTE
;
393 if (regions
[0].dstOffset
.x
||
394 regions
[0].dstOffset
.y
||
395 regions
[0].dstOffset
.z
)
396 resolve_method
= RESOLVE_COMPUTE
;
398 if (regions
[0].extent
.width
!= src_image
->info
.width
||
399 regions
[0].extent
.height
!= src_image
->info
.height
||
400 regions
[0].extent
.depth
!= src_image
->info
.depth
)
401 resolve_method
= RESOLVE_COMPUTE
;
403 resolve_method
= RESOLVE_COMPUTE
;
405 radv_pick_resolve_method_images(src_image
, dest_image
,
406 dest_image_layout
, cmd_buffer
,
409 if (resolve_method
== RESOLVE_FRAGMENT
) {
410 radv_meta_resolve_fragment_image(cmd_buffer
,
415 region_count
, regions
);
419 if (resolve_method
== RESOLVE_COMPUTE
) {
420 radv_meta_resolve_compute_image(cmd_buffer
,
425 region_count
, regions
);
429 radv_meta_save(&saved_state
, cmd_buffer
,
430 RADV_META_SAVE_GRAPHICS_PIPELINE
);
432 assert(src_image
->info
.samples
> 1);
433 if (src_image
->info
.samples
<= 1) {
434 /* this causes GPU hangs if we get past here */
435 fprintf(stderr
, "radv: Illegal resolve operation (src not multisampled), will hang GPU.");
438 assert(dest_image
->info
.samples
== 1);
440 if (src_image
->info
.samples
>= 16) {
441 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
442 * glBlitFramebuffer workaround for samples >= 16.
444 radv_finishme("vkCmdResolveImage: need interpolation workaround when "
448 if (src_image
->info
.array_size
> 1)
449 radv_finishme("vkCmdResolveImage: multisample array images");
451 if (radv_image_has_dcc(dest_image
)) {
452 radv_initialize_dcc(cmd_buffer
, dest_image
, 0xffffffff);
454 unsigned fs_key
= radv_format_meta_fs_key(dest_image
->vk_format
);
455 for (uint32_t r
= 0; r
< region_count
; ++r
) {
456 const VkImageResolve
*region
= ®ions
[r
];
458 /* From the Vulkan 1.0 spec:
460 * - The aspectMask member of srcSubresource and dstSubresource must
461 * only contain VK_IMAGE_ASPECT_COLOR_BIT
463 * - The layerCount member of srcSubresource and dstSubresource must
466 assert(region
->srcSubresource
.aspectMask
== VK_IMAGE_ASPECT_COLOR_BIT
);
467 assert(region
->dstSubresource
.aspectMask
== VK_IMAGE_ASPECT_COLOR_BIT
);
468 assert(region
->srcSubresource
.layerCount
==
469 region
->dstSubresource
.layerCount
);
471 const uint32_t src_base_layer
=
472 radv_meta_get_iview_layer(src_image
, ®ion
->srcSubresource
,
475 const uint32_t dest_base_layer
=
476 radv_meta_get_iview_layer(dest_image
, ®ion
->dstSubresource
,
480 * From Vulkan 1.0.6 spec: 18.6 Resolving Multisample Images
482 * extent is the size in texels of the source image to resolve in width,
483 * height and depth. 1D images use only x and width. 2D images use x, y,
484 * width and height. 3D images use x, y, z, width, height and depth.
486 * srcOffset and dstOffset select the initial x, y, and z offsets in
487 * texels of the sub-regions of the source and destination image data.
488 * extent is the size in texels of the source image to resolve in width,
489 * height and depth. 1D images use only x and width. 2D images use x, y,
490 * width and height. 3D images use x, y, z, width, height and depth.
492 const struct VkExtent3D extent
=
493 radv_sanitize_image_extent(src_image
->type
, region
->extent
);
494 const struct VkOffset3D dstOffset
=
495 radv_sanitize_image_offset(dest_image
->type
, region
->dstOffset
);
498 for (uint32_t layer
= 0; layer
< region
->srcSubresource
.layerCount
;
501 struct radv_image_view src_iview
;
502 radv_image_view_init(&src_iview
, cmd_buffer
->device
,
503 &(VkImageViewCreateInfo
) {
504 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
505 .image
= src_image_h
,
506 .viewType
= radv_meta_get_view_type(src_image
),
507 .format
= src_image
->vk_format
,
508 .subresourceRange
= {
509 .aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
,
510 .baseMipLevel
= region
->srcSubresource
.mipLevel
,
512 .baseArrayLayer
= src_base_layer
+ layer
,
517 struct radv_image_view dest_iview
;
518 radv_image_view_init(&dest_iview
, cmd_buffer
->device
,
519 &(VkImageViewCreateInfo
) {
520 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
521 .image
= dest_image_h
,
522 .viewType
= radv_meta_get_view_type(dest_image
),
523 .format
= dest_image
->vk_format
,
524 .subresourceRange
= {
525 .aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
,
526 .baseMipLevel
= region
->dstSubresource
.mipLevel
,
528 .baseArrayLayer
= dest_base_layer
+ layer
,
534 radv_CreateFramebuffer(device_h
,
535 &(VkFramebufferCreateInfo
) {
536 .sType
= VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
,
537 .attachmentCount
= 2,
538 .pAttachments
= (VkImageView
[]) {
539 radv_image_view_to_handle(&src_iview
),
540 radv_image_view_to_handle(&dest_iview
),
542 .width
= radv_minify(dest_image
->info
.width
,
543 region
->dstSubresource
.mipLevel
),
544 .height
= radv_minify(dest_image
->info
.height
,
545 region
->dstSubresource
.mipLevel
),
548 &cmd_buffer
->pool
->alloc
,
551 radv_CmdBeginRenderPass(cmd_buffer_h
,
552 &(VkRenderPassBeginInfo
) {
553 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
,
554 .renderPass
= device
->meta_state
.resolve
.pass
[fs_key
],
566 .clearValueCount
= 0,
567 .pClearValues
= NULL
,
569 VK_SUBPASS_CONTENTS_INLINE
);
571 emit_resolve(cmd_buffer
,
572 dest_iview
.vk_format
,
578 .width
= extent
.width
,
579 .height
= extent
.height
,
582 radv_CmdEndRenderPass(cmd_buffer_h
);
584 radv_DestroyFramebuffer(device_h
, fb_h
,
585 &cmd_buffer
->pool
->alloc
);
589 radv_meta_restore(&saved_state
, cmd_buffer
);
593 * Emit any needed resolves for the current subpass.
596 radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer
*cmd_buffer
)
598 struct radv_framebuffer
*fb
= cmd_buffer
->state
.framebuffer
;
599 const struct radv_subpass
*subpass
= cmd_buffer
->state
.subpass
;
600 struct radv_meta_saved_state saved_state
;
601 enum radv_resolve_method resolve_method
= RESOLVE_HW
;
603 /* FINISHME(perf): Skip clears for resolve attachments.
605 * From the Vulkan 1.0 spec:
607 * If the first use of an attachment in a render pass is as a resolve
608 * attachment, then the loadOp is effectively ignored as the resolve is
609 * guaranteed to overwrite all pixels in the render area.
612 if (!subpass
->has_resolve
)
615 for (uint32_t i
= 0; i
< subpass
->color_count
; ++i
) {
616 VkAttachmentReference src_att
= subpass
->color_attachments
[i
];
617 VkAttachmentReference dest_att
= subpass
->resolve_attachments
[i
];
619 if (src_att
.attachment
== VK_ATTACHMENT_UNUSED
||
620 dest_att
.attachment
== VK_ATTACHMENT_UNUSED
)
623 struct radv_image
*dst_img
= cmd_buffer
->state
.framebuffer
->attachments
[dest_att
.attachment
].attachment
->image
;
624 struct radv_image
*src_img
= cmd_buffer
->state
.framebuffer
->attachments
[src_att
.attachment
].attachment
->image
;
626 radv_pick_resolve_method_images(src_img
, dst_img
, dest_att
.layout
, cmd_buffer
, &resolve_method
);
627 if (resolve_method
== RESOLVE_FRAGMENT
) {
632 if (resolve_method
== RESOLVE_COMPUTE
) {
633 radv_cmd_buffer_resolve_subpass_cs(cmd_buffer
);
635 } else if (resolve_method
== RESOLVE_FRAGMENT
) {
636 radv_cmd_buffer_resolve_subpass_fs(cmd_buffer
);
640 radv_meta_save(&saved_state
, cmd_buffer
,
641 RADV_META_SAVE_GRAPHICS_PIPELINE
);
643 for (uint32_t i
= 0; i
< subpass
->color_count
; ++i
) {
644 VkAttachmentReference src_att
= subpass
->color_attachments
[i
];
645 VkAttachmentReference dest_att
= subpass
->resolve_attachments
[i
];
647 if (src_att
.attachment
== VK_ATTACHMENT_UNUSED
||
648 dest_att
.attachment
== VK_ATTACHMENT_UNUSED
)
651 struct radv_image
*dst_img
= cmd_buffer
->state
.framebuffer
->attachments
[dest_att
.attachment
].attachment
->image
;
653 if (radv_image_has_dcc(dst_img
)) {
654 radv_initialize_dcc(cmd_buffer
, dst_img
, 0xffffffff);
655 cmd_buffer
->state
.attachments
[dest_att
.attachment
].current_layout
= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
;
658 struct radv_subpass resolve_subpass
= {
660 .color_attachments
= (VkAttachmentReference
[]) { src_att
, dest_att
},
661 .depth_stencil_attachment
= { .attachment
= VK_ATTACHMENT_UNUSED
},
664 radv_cmd_buffer_set_subpass(cmd_buffer
, &resolve_subpass
, false);
666 emit_resolve(cmd_buffer
,
668 &(VkOffset2D
) { 0, 0 },
669 &(VkExtent2D
) { fb
->width
, fb
->height
});
672 cmd_buffer
->state
.subpass
= subpass
;
673 radv_meta_restore(&saved_state
, cmd_buffer
);
677 * Decompress CMask/FMask before resolving a multisampled source image inside a
681 radv_decompress_resolve_subpass_src(struct radv_cmd_buffer
*cmd_buffer
)
683 const struct radv_subpass
*subpass
= cmd_buffer
->state
.subpass
;
684 struct radv_framebuffer
*fb
= cmd_buffer
->state
.framebuffer
;
686 for (uint32_t i
= 0; i
< subpass
->color_count
; ++i
) {
687 VkAttachmentReference src_att
= subpass
->color_attachments
[i
];
688 VkAttachmentReference dest_att
= subpass
->resolve_attachments
[i
];
690 if (src_att
.attachment
== VK_ATTACHMENT_UNUSED
||
691 dest_att
.attachment
== VK_ATTACHMENT_UNUSED
)
694 struct radv_image
*src_image
=
695 fb
->attachments
[src_att
.attachment
].attachment
->image
;
697 VkImageResolve region
= {};
698 region
.srcSubresource
.baseArrayLayer
= 0;
699 region
.srcSubresource
.mipLevel
= 0;
700 region
.srcSubresource
.layerCount
= src_image
->info
.array_size
;
702 radv_decompress_resolve_src(cmd_buffer
, src_image
,
703 src_att
.layout
, 1, ®ion
);
708 * Decompress CMask/FMask before resolving a multisampled source image.
711 radv_decompress_resolve_src(struct radv_cmd_buffer
*cmd_buffer
,
712 struct radv_image
*src_image
,
713 VkImageLayout src_image_layout
,
714 uint32_t region_count
,
715 const VkImageResolve
*regions
)
717 for (uint32_t r
= 0; r
< region_count
; ++r
) {
718 const VkImageResolve
*region
= ®ions
[r
];
719 const uint32_t src_base_layer
=
720 radv_meta_get_iview_layer(src_image
, ®ion
->srcSubresource
,
722 VkImageSubresourceRange range
;
723 range
.aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
;
724 range
.baseMipLevel
= region
->srcSubresource
.mipLevel
;
725 range
.levelCount
= 1;
726 range
.baseArrayLayer
= src_base_layer
;
727 range
.layerCount
= region
->srcSubresource
.layerCount
;
729 uint32_t queue_mask
=
730 radv_image_queue_family_mask(src_image
,
731 cmd_buffer
->queue_family_index
,
732 cmd_buffer
->queue_family_index
);
734 if (radv_layout_dcc_compressed(src_image
, src_image_layout
,
736 radv_decompress_dcc(cmd_buffer
, src_image
, &range
);
738 radv_fast_clear_flush_image_inplace(cmd_buffer
,