nir: Embed the shader_info in the nir_shader again
[mesa.git] / src / amd / vulkan / radv_meta_resolve.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "nir/nir_builder.h"
30 #include "sid.h"
31
32 /* emit 0, 0, 0, 1 */
33 static nir_shader *
34 build_nir_fs(void)
35 {
36 const struct glsl_type *vec4 = glsl_vec4_type();
37 nir_builder b;
38 nir_variable *f_color; /* vec4, fragment output color */
39
40 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
41 b.shader->info.name = ralloc_asprintf(b.shader,
42 "meta_resolve_fs");
43
44 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
45 "f_color");
46 f_color->data.location = FRAG_RESULT_DATA0;
47 nir_store_var(&b, f_color, nir_imm_vec4(&b, 0.0, 0.0, 0.0, 1.0), 0xf);
48
49 return b.shader;
50 }
51
52 static VkResult
53 create_pass(struct radv_device *device)
54 {
55 VkResult result;
56 VkDevice device_h = radv_device_to_handle(device);
57 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
58 VkAttachmentDescription attachments[2];
59 int i;
60
61 for (i = 0; i < 2; i++) {
62 attachments[i].format = VK_FORMAT_UNDEFINED;
63 attachments[i].samples = 1;
64 attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
65 attachments[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
66 }
67 attachments[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
68 attachments[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
69 attachments[1].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
70 attachments[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
71
72 result = radv_CreateRenderPass(device_h,
73 &(VkRenderPassCreateInfo) {
74 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
75 .attachmentCount = 2,
76 .pAttachments = attachments,
77 .subpassCount = 1,
78 .pSubpasses = &(VkSubpassDescription) {
79 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
80 .inputAttachmentCount = 0,
81 .colorAttachmentCount = 2,
82 .pColorAttachments = (VkAttachmentReference[]) {
83 {
84 .attachment = 0,
85 .layout = VK_IMAGE_LAYOUT_GENERAL,
86 },
87 {
88 .attachment = 1,
89 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
90 },
91 },
92 .pResolveAttachments = NULL,
93 .pDepthStencilAttachment = &(VkAttachmentReference) {
94 .attachment = VK_ATTACHMENT_UNUSED,
95 },
96 .preserveAttachmentCount = 0,
97 .pPreserveAttachments = NULL,
98 },
99 .dependencyCount = 0,
100 },
101 alloc,
102 &device->meta_state.resolve.pass);
103
104 return result;
105 }
106
107 static VkResult
108 create_pipeline(struct radv_device *device,
109 VkShaderModule vs_module_h)
110 {
111 VkResult result;
112 VkDevice device_h = radv_device_to_handle(device);
113
114 struct radv_shader_module fs_module = {
115 .nir = build_nir_fs(),
116 };
117
118 if (!fs_module.nir) {
119 /* XXX: Need more accurate error */
120 result = VK_ERROR_OUT_OF_HOST_MEMORY;
121 goto cleanup;
122 }
123
124 result = radv_graphics_pipeline_create(device_h,
125 radv_pipeline_cache_to_handle(&device->meta_state.cache),
126 &(VkGraphicsPipelineCreateInfo) {
127 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
128 .stageCount = 2,
129 .pStages = (VkPipelineShaderStageCreateInfo[]) {
130 {
131 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
132 .stage = VK_SHADER_STAGE_VERTEX_BIT,
133 .module = vs_module_h,
134 .pName = "main",
135 },
136 {
137 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
138 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
139 .module = radv_shader_module_to_handle(&fs_module),
140 .pName = "main",
141 },
142 },
143 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
144 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
145 .vertexBindingDescriptionCount = 0,
146 .vertexAttributeDescriptionCount = 0,
147 },
148 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
149 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
150 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
151 .primitiveRestartEnable = false,
152 },
153 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
154 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
155 .viewportCount = 1,
156 .scissorCount = 1,
157 },
158 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
159 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
160 .depthClampEnable = false,
161 .rasterizerDiscardEnable = false,
162 .polygonMode = VK_POLYGON_MODE_FILL,
163 .cullMode = VK_CULL_MODE_NONE,
164 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
165 },
166 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
167 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
168 .rasterizationSamples = 1,
169 .sampleShadingEnable = false,
170 .pSampleMask = NULL,
171 .alphaToCoverageEnable = false,
172 .alphaToOneEnable = false,
173 },
174 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
175 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
176 .logicOpEnable = false,
177 .attachmentCount = 2,
178 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
179 {
180 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
181 VK_COLOR_COMPONENT_G_BIT |
182 VK_COLOR_COMPONENT_B_BIT |
183 VK_COLOR_COMPONENT_A_BIT,
184 },
185 {
186 .colorWriteMask = 0,
187
188 }
189 },
190 },
191 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
192 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
193 .dynamicStateCount = 2,
194 .pDynamicStates = (VkDynamicState[]) {
195 VK_DYNAMIC_STATE_VIEWPORT,
196 VK_DYNAMIC_STATE_SCISSOR,
197 },
198 },
199 .renderPass = device->meta_state.resolve.pass,
200 .subpass = 0,
201 },
202 &(struct radv_graphics_pipeline_create_info) {
203 .use_rectlist = true,
204 .custom_blend_mode = V_028808_CB_RESOLVE,
205 },
206 &device->meta_state.alloc,
207 &device->meta_state.resolve.pipeline);
208 if (result != VK_SUCCESS)
209 goto cleanup;
210
211 goto cleanup;
212
213 cleanup:
214 ralloc_free(fs_module.nir);
215 return result;
216 }
217
218 void
219 radv_device_finish_meta_resolve_state(struct radv_device *device)
220 {
221 struct radv_meta_state *state = &device->meta_state;
222 VkDevice device_h = radv_device_to_handle(device);
223 VkRenderPass pass_h = device->meta_state.resolve.pass;
224 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
225
226 if (pass_h)
227 radv_DestroyRenderPass(device_h, pass_h,
228 &device->meta_state.alloc);
229
230 VkPipeline pipeline_h = state->resolve.pipeline;
231 if (pipeline_h) {
232 radv_DestroyPipeline(device_h, pipeline_h, alloc);
233 }
234 }
235
236 VkResult
237 radv_device_init_meta_resolve_state(struct radv_device *device)
238 {
239 VkResult res = VK_SUCCESS;
240
241 zero(device->meta_state.resolve);
242
243 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
244 if (!vs_module.nir) {
245 /* XXX: Need more accurate error */
246 res = VK_ERROR_OUT_OF_HOST_MEMORY;
247 goto fail;
248 }
249
250 res = create_pass(device);
251 if (res != VK_SUCCESS)
252 goto fail;
253
254 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
255 res = create_pipeline(device, vs_module_h);
256 if (res != VK_SUCCESS)
257 goto fail;
258
259 goto cleanup;
260
261 fail:
262 radv_device_finish_meta_resolve_state(device);
263
264 cleanup:
265 ralloc_free(vs_module.nir);
266
267 return res;
268 }
269
270 static void
271 emit_resolve(struct radv_cmd_buffer *cmd_buffer,
272 const VkOffset2D *dest_offset,
273 const VkExtent2D *resolve_extent)
274 {
275 struct radv_device *device = cmd_buffer->device;
276 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
277
278 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
279
280 VkPipeline pipeline_h = device->meta_state.resolve.pipeline;
281 RADV_FROM_HANDLE(radv_pipeline, pipeline, pipeline_h);
282
283 if (cmd_buffer->state.pipeline != pipeline) {
284 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
285 pipeline_h);
286 }
287
288 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
289 .x = dest_offset->x,
290 .y = dest_offset->y,
291 .width = resolve_extent->width,
292 .height = resolve_extent->height,
293 .minDepth = 0.0f,
294 .maxDepth = 1.0f
295 });
296
297 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) {
298 .offset = *dest_offset,
299 .extent = *resolve_extent,
300 });
301
302 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
303 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
304 }
305
306 enum radv_resolve_method {
307 RESOLVE_HW,
308 RESOLVE_COMPUTE,
309 RESOLVE_FRAGMENT,
310 };
311
312 static void radv_pick_resolve_method_images(struct radv_image *src_image,
313 struct radv_image *dest_image,
314 enum radv_resolve_method *method)
315
316 {
317 if (dest_image->surface.micro_tile_mode != src_image->surface.micro_tile_mode) {
318 if (dest_image->surface.level[0].dcc_enabled)
319 *method = RESOLVE_FRAGMENT;
320 else
321 *method = RESOLVE_COMPUTE;
322 }
323 }
324
325 void radv_CmdResolveImage(
326 VkCommandBuffer cmd_buffer_h,
327 VkImage src_image_h,
328 VkImageLayout src_image_layout,
329 VkImage dest_image_h,
330 VkImageLayout dest_image_layout,
331 uint32_t region_count,
332 const VkImageResolve* regions)
333 {
334 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, cmd_buffer_h);
335 RADV_FROM_HANDLE(radv_image, src_image, src_image_h);
336 RADV_FROM_HANDLE(radv_image, dest_image, dest_image_h);
337 struct radv_device *device = cmd_buffer->device;
338 struct radv_meta_saved_state saved_state;
339 VkDevice device_h = radv_device_to_handle(device);
340 enum radv_resolve_method resolve_method = RESOLVE_HW;
341 /* we can use the hw resolve only for single full resolves */
342 if (region_count == 1) {
343 if (regions[0].srcOffset.x ||
344 regions[0].srcOffset.y ||
345 regions[0].srcOffset.z)
346 resolve_method = RESOLVE_COMPUTE;
347 if (regions[0].dstOffset.x ||
348 regions[0].dstOffset.y ||
349 regions[0].dstOffset.z)
350 resolve_method = RESOLVE_COMPUTE;
351
352 if (regions[0].extent.width != src_image->info.width ||
353 regions[0].extent.height != src_image->info.height ||
354 regions[0].extent.depth != src_image->info.depth)
355 resolve_method = RESOLVE_COMPUTE;
356 } else
357 resolve_method = RESOLVE_COMPUTE;
358
359 radv_pick_resolve_method_images(src_image, dest_image,
360 &resolve_method);
361
362 if (resolve_method == RESOLVE_FRAGMENT) {
363 radv_meta_resolve_fragment_image(cmd_buffer,
364 src_image,
365 src_image_layout,
366 dest_image,
367 dest_image_layout,
368 region_count, regions);
369 return;
370 }
371
372 if (resolve_method == RESOLVE_COMPUTE) {
373 radv_meta_resolve_compute_image(cmd_buffer,
374 src_image,
375 src_image_layout,
376 dest_image,
377 dest_image_layout,
378 region_count, regions);
379 return;
380 }
381
382 radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
383
384 assert(src_image->info.samples > 1);
385 assert(dest_image->info.samples == 1);
386
387 if (src_image->info.samples >= 16) {
388 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
389 * glBlitFramebuffer workaround for samples >= 16.
390 */
391 radv_finishme("vkCmdResolveImage: need interpolation workaround when "
392 "samples >= 16");
393 }
394
395 if (src_image->info.array_size > 1)
396 radv_finishme("vkCmdResolveImage: multisample array images");
397
398 if (dest_image->surface.dcc_size) {
399 radv_initialize_dcc(cmd_buffer, dest_image, 0xffffffff);
400 }
401 for (uint32_t r = 0; r < region_count; ++r) {
402 const VkImageResolve *region = &regions[r];
403
404 /* From the Vulkan 1.0 spec:
405 *
406 * - The aspectMask member of srcSubresource and dstSubresource must
407 * only contain VK_IMAGE_ASPECT_COLOR_BIT
408 *
409 * - The layerCount member of srcSubresource and dstSubresource must
410 * match
411 */
412 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
413 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
414 assert(region->srcSubresource.layerCount ==
415 region->dstSubresource.layerCount);
416
417 const uint32_t src_base_layer =
418 radv_meta_get_iview_layer(src_image, &region->srcSubresource,
419 &region->srcOffset);
420
421 const uint32_t dest_base_layer =
422 radv_meta_get_iview_layer(dest_image, &region->dstSubresource,
423 &region->dstOffset);
424
425 /**
426 * From Vulkan 1.0.6 spec: 18.6 Resolving Multisample Images
427 *
428 * extent is the size in texels of the source image to resolve in width,
429 * height and depth. 1D images use only x and width. 2D images use x, y,
430 * width and height. 3D images use x, y, z, width, height and depth.
431 *
432 * srcOffset and dstOffset select the initial x, y, and z offsets in
433 * texels of the sub-regions of the source and destination image data.
434 * extent is the size in texels of the source image to resolve in width,
435 * height and depth. 1D images use only x and width. 2D images use x, y,
436 * width and height. 3D images use x, y, z, width, height and depth.
437 */
438 const struct VkExtent3D extent =
439 radv_sanitize_image_extent(src_image->type, region->extent);
440 const struct VkOffset3D dstOffset =
441 radv_sanitize_image_offset(dest_image->type, region->dstOffset);
442
443
444 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
445 ++layer) {
446
447 struct radv_image_view src_iview;
448 radv_image_view_init(&src_iview, cmd_buffer->device,
449 &(VkImageViewCreateInfo) {
450 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
451 .image = src_image_h,
452 .viewType = radv_meta_get_view_type(src_image),
453 .format = src_image->vk_format,
454 .subresourceRange = {
455 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
456 .baseMipLevel = region->srcSubresource.mipLevel,
457 .levelCount = 1,
458 .baseArrayLayer = src_base_layer + layer,
459 .layerCount = 1,
460 },
461 },
462 cmd_buffer, VK_IMAGE_USAGE_SAMPLED_BIT);
463
464 struct radv_image_view dest_iview;
465 radv_image_view_init(&dest_iview, cmd_buffer->device,
466 &(VkImageViewCreateInfo) {
467 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
468 .image = dest_image_h,
469 .viewType = radv_meta_get_view_type(dest_image),
470 .format = dest_image->vk_format,
471 .subresourceRange = {
472 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
473 .baseMipLevel = region->dstSubresource.mipLevel,
474 .levelCount = 1,
475 .baseArrayLayer = dest_base_layer + layer,
476 .layerCount = 1,
477 },
478 },
479 cmd_buffer, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
480
481 VkFramebuffer fb_h;
482 radv_CreateFramebuffer(device_h,
483 &(VkFramebufferCreateInfo) {
484 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
485 .attachmentCount = 2,
486 .pAttachments = (VkImageView[]) {
487 radv_image_view_to_handle(&src_iview),
488 radv_image_view_to_handle(&dest_iview),
489 },
490 .width = radv_minify(dest_image->info.width,
491 region->dstSubresource.mipLevel),
492 .height = radv_minify(dest_image->info.height,
493 region->dstSubresource.mipLevel),
494 .layers = 1
495 },
496 &cmd_buffer->pool->alloc,
497 &fb_h);
498
499 radv_CmdBeginRenderPass(cmd_buffer_h,
500 &(VkRenderPassBeginInfo) {
501 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
502 .renderPass = device->meta_state.resolve.pass,
503 .framebuffer = fb_h,
504 .renderArea = {
505 .offset = {
506 dstOffset.x,
507 dstOffset.y,
508 },
509 .extent = {
510 extent.width,
511 extent.height,
512 }
513 },
514 .clearValueCount = 0,
515 .pClearValues = NULL,
516 },
517 VK_SUBPASS_CONTENTS_INLINE);
518
519 emit_resolve(cmd_buffer,
520 &(VkOffset2D) {
521 .x = dstOffset.x,
522 .y = dstOffset.y,
523 },
524 &(VkExtent2D) {
525 .width = extent.width,
526 .height = extent.height,
527 });
528
529 radv_CmdEndRenderPass(cmd_buffer_h);
530
531 radv_DestroyFramebuffer(device_h, fb_h,
532 &cmd_buffer->pool->alloc);
533 }
534 }
535
536 radv_meta_restore(&saved_state, cmd_buffer);
537 }
538
539 /**
540 * Emit any needed resolves for the current subpass.
541 */
542 void
543 radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer)
544 {
545 struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
546 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
547 struct radv_meta_saved_state saved_state;
548 enum radv_resolve_method resolve_method = RESOLVE_HW;
549
550 /* FINISHME(perf): Skip clears for resolve attachments.
551 *
552 * From the Vulkan 1.0 spec:
553 *
554 * If the first use of an attachment in a render pass is as a resolve
555 * attachment, then the loadOp is effectively ignored as the resolve is
556 * guaranteed to overwrite all pixels in the render area.
557 */
558
559 if (!subpass->has_resolve)
560 return;
561
562 for (uint32_t i = 0; i < subpass->color_count; ++i) {
563 VkAttachmentReference src_att = subpass->color_attachments[i];
564 VkAttachmentReference dest_att = subpass->resolve_attachments[i];
565 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
566 struct radv_image *src_img = cmd_buffer->state.framebuffer->attachments[src_att.attachment].attachment->image;
567
568 radv_pick_resolve_method_images(dst_img, src_img, &resolve_method);
569 if (resolve_method == RESOLVE_FRAGMENT) {
570 break;
571 }
572 }
573
574 if (resolve_method == RESOLVE_COMPUTE) {
575 radv_cmd_buffer_resolve_subpass_cs(cmd_buffer);
576 return;
577 } else if (resolve_method == RESOLVE_FRAGMENT) {
578 radv_cmd_buffer_resolve_subpass_fs(cmd_buffer);
579 return;
580 }
581
582 radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
583
584 for (uint32_t i = 0; i < subpass->color_count; ++i) {
585 VkAttachmentReference src_att = subpass->color_attachments[i];
586 VkAttachmentReference dest_att = subpass->resolve_attachments[i];
587 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
588 if (dest_att.attachment == VK_ATTACHMENT_UNUSED)
589 continue;
590
591 if (dst_img->surface.dcc_size) {
592 radv_initialize_dcc(cmd_buffer, dst_img, 0xffffffff);
593 cmd_buffer->state.attachments[dest_att.attachment].current_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
594 }
595
596 struct radv_subpass resolve_subpass = {
597 .color_count = 2,
598 .color_attachments = (VkAttachmentReference[]) { src_att, dest_att },
599 .depth_stencil_attachment = { .attachment = VK_ATTACHMENT_UNUSED },
600 };
601
602 radv_cmd_buffer_set_subpass(cmd_buffer, &resolve_subpass, false);
603
604 /* Subpass resolves must respect the render area. We can ignore the
605 * render area here because vkCmdBeginRenderPass set the render area
606 * with 3DSTATE_DRAWING_RECTANGLE.
607 *
608 * XXX(chadv): Does the hardware really respect
609 * 3DSTATE_DRAWING_RECTANGLE when draing a 3DPRIM_RECTLIST?
610 */
611 emit_resolve(cmd_buffer,
612 &(VkOffset2D) { 0, 0 },
613 &(VkExtent2D) { fb->width, fb->height });
614 }
615
616 cmd_buffer->state.subpass = subpass;
617 radv_meta_restore(&saved_state, cmd_buffer);
618 }