e932976df280ebdffe9edb8dc3c77dc8ae3c581b
[mesa.git] / src / amd / vulkan / radv_meta_resolve.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "vk_format.h"
30 #include "nir/nir_builder.h"
31 #include "sid.h"
32
33 /* emit 0, 0, 0, 1 */
34 static nir_shader *
35 build_nir_fs(void)
36 {
37 const struct glsl_type *vec4 = glsl_vec4_type();
38 nir_builder b;
39 nir_variable *f_color; /* vec4, fragment output color */
40
41 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
42 b.shader->info.name = ralloc_asprintf(b.shader,
43 "meta_resolve_fs");
44
45 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
46 "f_color");
47 f_color->data.location = FRAG_RESULT_DATA0;
48 nir_store_var(&b, f_color, nir_imm_vec4(&b, 0.0, 0.0, 0.0, 1.0), 0xf);
49
50 return b.shader;
51 }
52
53 static VkResult
54 create_pass(struct radv_device *device, VkFormat vk_format, VkRenderPass *pass)
55 {
56 VkResult result;
57 VkDevice device_h = radv_device_to_handle(device);
58 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
59 VkAttachmentDescription attachments[2];
60 int i;
61
62 for (i = 0; i < 2; i++) {
63 attachments[i].format = vk_format;
64 attachments[i].samples = 1;
65 attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
66 attachments[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
67 }
68 attachments[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
69 attachments[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
70 attachments[1].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
71 attachments[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
72
73 result = radv_CreateRenderPass(device_h,
74 &(VkRenderPassCreateInfo) {
75 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
76 .attachmentCount = 2,
77 .pAttachments = attachments,
78 .subpassCount = 1,
79 .pSubpasses = &(VkSubpassDescription) {
80 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
81 .inputAttachmentCount = 0,
82 .colorAttachmentCount = 2,
83 .pColorAttachments = (VkAttachmentReference[]) {
84 {
85 .attachment = 0,
86 .layout = VK_IMAGE_LAYOUT_GENERAL,
87 },
88 {
89 .attachment = 1,
90 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
91 },
92 },
93 .pResolveAttachments = NULL,
94 .pDepthStencilAttachment = &(VkAttachmentReference) {
95 .attachment = VK_ATTACHMENT_UNUSED,
96 },
97 .preserveAttachmentCount = 0,
98 .pPreserveAttachments = NULL,
99 },
100 .dependencyCount = 0,
101 },
102 alloc,
103 pass);
104
105 return result;
106 }
107
108 static VkResult
109 create_pipeline(struct radv_device *device,
110 VkShaderModule vs_module_h,
111 VkPipeline *pipeline,
112 VkRenderPass pass)
113 {
114 VkResult result;
115 VkDevice device_h = radv_device_to_handle(device);
116
117 struct radv_shader_module fs_module = {
118 .nir = build_nir_fs(),
119 };
120
121 if (!fs_module.nir) {
122 /* XXX: Need more accurate error */
123 result = VK_ERROR_OUT_OF_HOST_MEMORY;
124 goto cleanup;
125 }
126
127 VkPipelineLayoutCreateInfo pl_create_info = {
128 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
129 .setLayoutCount = 0,
130 .pSetLayouts = NULL,
131 .pushConstantRangeCount = 0,
132 .pPushConstantRanges = NULL,
133 };
134
135 if (!device->meta_state.resolve.p_layout) {
136 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
137 &pl_create_info,
138 &device->meta_state.alloc,
139 &device->meta_state.resolve.p_layout);
140 if (result != VK_SUCCESS)
141 goto cleanup;
142 }
143
144 result = radv_graphics_pipeline_create(device_h,
145 radv_pipeline_cache_to_handle(&device->meta_state.cache),
146 &(VkGraphicsPipelineCreateInfo) {
147 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
148 .stageCount = 2,
149 .pStages = (VkPipelineShaderStageCreateInfo[]) {
150 {
151 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
152 .stage = VK_SHADER_STAGE_VERTEX_BIT,
153 .module = vs_module_h,
154 .pName = "main",
155 },
156 {
157 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
158 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
159 .module = radv_shader_module_to_handle(&fs_module),
160 .pName = "main",
161 },
162 },
163 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
164 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
165 .vertexBindingDescriptionCount = 0,
166 .vertexAttributeDescriptionCount = 0,
167 },
168 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
169 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
170 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
171 .primitiveRestartEnable = false,
172 },
173 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
174 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
175 .viewportCount = 1,
176 .scissorCount = 1,
177 },
178 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
179 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
180 .depthClampEnable = false,
181 .rasterizerDiscardEnable = false,
182 .polygonMode = VK_POLYGON_MODE_FILL,
183 .cullMode = VK_CULL_MODE_NONE,
184 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
185 },
186 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
187 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
188 .rasterizationSamples = 1,
189 .sampleShadingEnable = false,
190 .pSampleMask = NULL,
191 .alphaToCoverageEnable = false,
192 .alphaToOneEnable = false,
193 },
194 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
195 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
196 .logicOpEnable = false,
197 .attachmentCount = 2,
198 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
199 {
200 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
201 VK_COLOR_COMPONENT_G_BIT |
202 VK_COLOR_COMPONENT_B_BIT |
203 VK_COLOR_COMPONENT_A_BIT,
204 },
205 {
206 .colorWriteMask = 0,
207
208 }
209 },
210 },
211 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
212 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
213 .dynamicStateCount = 2,
214 .pDynamicStates = (VkDynamicState[]) {
215 VK_DYNAMIC_STATE_VIEWPORT,
216 VK_DYNAMIC_STATE_SCISSOR,
217 },
218 },
219 .layout = device->meta_state.resolve.p_layout,
220 .renderPass = pass,
221 .subpass = 0,
222 },
223 &(struct radv_graphics_pipeline_create_info) {
224 .use_rectlist = true,
225 .custom_blend_mode = V_028808_CB_RESOLVE,
226 },
227 &device->meta_state.alloc, pipeline);
228 if (result != VK_SUCCESS)
229 goto cleanup;
230
231 goto cleanup;
232
233 cleanup:
234 ralloc_free(fs_module.nir);
235 return result;
236 }
237
238 void
239 radv_device_finish_meta_resolve_state(struct radv_device *device)
240 {
241 struct radv_meta_state *state = &device->meta_state;
242
243 for (uint32_t j = 0; j < NUM_META_FS_KEYS; j++) {
244 radv_DestroyRenderPass(radv_device_to_handle(device),
245 state->resolve.pass[j], &state->alloc);
246 radv_DestroyPipeline(radv_device_to_handle(device),
247 state->resolve.pipeline[j], &state->alloc);
248 }
249 radv_DestroyPipelineLayout(radv_device_to_handle(device),
250 state->resolve.p_layout, &state->alloc);
251
252 }
253
254 static VkFormat pipeline_formats[] = {
255 VK_FORMAT_R8G8B8A8_UNORM,
256 VK_FORMAT_R8G8B8A8_UINT,
257 VK_FORMAT_R8G8B8A8_SINT,
258 VK_FORMAT_A2R10G10B10_UINT_PACK32,
259 VK_FORMAT_A2R10G10B10_SINT_PACK32,
260 VK_FORMAT_R16G16B16A16_UNORM,
261 VK_FORMAT_R16G16B16A16_SNORM,
262 VK_FORMAT_R16G16B16A16_UINT,
263 VK_FORMAT_R16G16B16A16_SINT,
264 VK_FORMAT_R32_SFLOAT,
265 VK_FORMAT_R32G32_SFLOAT,
266 VK_FORMAT_R32G32B32A32_SFLOAT
267 };
268
269 VkResult
270 radv_device_init_meta_resolve_state(struct radv_device *device)
271 {
272 VkResult res = VK_SUCCESS;
273 struct radv_meta_state *state = &device->meta_state;
274 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
275 if (!vs_module.nir) {
276 /* XXX: Need more accurate error */
277 res = VK_ERROR_OUT_OF_HOST_MEMORY;
278 goto fail;
279 }
280
281 for (uint32_t i = 0; i < ARRAY_SIZE(pipeline_formats); ++i) {
282 VkFormat format = pipeline_formats[i];
283 unsigned fs_key = radv_format_meta_fs_key(format);
284 res = create_pass(device, format, &state->resolve.pass[fs_key]);
285 if (res != VK_SUCCESS)
286 goto fail;
287
288 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
289 res = create_pipeline(device, vs_module_h,
290 &state->resolve.pipeline[fs_key], state->resolve.pass[fs_key]);
291 if (res != VK_SUCCESS)
292 goto fail;
293 }
294
295 goto cleanup;
296
297 fail:
298 radv_device_finish_meta_resolve_state(device);
299
300 cleanup:
301 ralloc_free(vs_module.nir);
302
303 return res;
304 }
305
306 static void
307 emit_resolve(struct radv_cmd_buffer *cmd_buffer,
308 VkFormat vk_format,
309 const VkOffset2D *dest_offset,
310 const VkExtent2D *resolve_extent)
311 {
312 struct radv_device *device = cmd_buffer->device;
313 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
314 unsigned fs_key = radv_format_meta_fs_key(vk_format);
315
316 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
317
318 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
319 device->meta_state.resolve.pipeline[fs_key]);
320
321 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
322 .x = dest_offset->x,
323 .y = dest_offset->y,
324 .width = resolve_extent->width,
325 .height = resolve_extent->height,
326 .minDepth = 0.0f,
327 .maxDepth = 1.0f
328 });
329
330 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) {
331 .offset = *dest_offset,
332 .extent = *resolve_extent,
333 });
334
335 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
336 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
337 }
338
339 enum radv_resolve_method {
340 RESOLVE_HW,
341 RESOLVE_COMPUTE,
342 RESOLVE_FRAGMENT,
343 };
344
345 static void radv_pick_resolve_method_images(struct radv_image *src_image,
346 struct radv_image *dest_image,
347 VkImageLayout dest_image_layout,
348 struct radv_cmd_buffer *cmd_buffer,
349 enum radv_resolve_method *method)
350
351 {
352 uint32_t queue_mask = radv_image_queue_family_mask(dest_image,
353 cmd_buffer->queue_family_index,
354 cmd_buffer->queue_family_index);
355
356 if (src_image->vk_format == VK_FORMAT_R16G16_UNORM ||
357 src_image->vk_format == VK_FORMAT_R16G16_SNORM)
358 *method = RESOLVE_COMPUTE;
359 else if (vk_format_is_int(src_image->vk_format))
360 *method = RESOLVE_COMPUTE;
361
362 if (radv_layout_dcc_compressed(dest_image, dest_image_layout, queue_mask)) {
363 *method = RESOLVE_FRAGMENT;
364 } else if (dest_image->surface.micro_tile_mode != src_image->surface.micro_tile_mode) {
365 *method = RESOLVE_COMPUTE;
366 }
367 }
368
369 void radv_CmdResolveImage(
370 VkCommandBuffer cmd_buffer_h,
371 VkImage src_image_h,
372 VkImageLayout src_image_layout,
373 VkImage dest_image_h,
374 VkImageLayout dest_image_layout,
375 uint32_t region_count,
376 const VkImageResolve* regions)
377 {
378 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, cmd_buffer_h);
379 RADV_FROM_HANDLE(radv_image, src_image, src_image_h);
380 RADV_FROM_HANDLE(radv_image, dest_image, dest_image_h);
381 struct radv_device *device = cmd_buffer->device;
382 struct radv_meta_saved_state saved_state;
383 VkDevice device_h = radv_device_to_handle(device);
384 enum radv_resolve_method resolve_method = RESOLVE_HW;
385 /* we can use the hw resolve only for single full resolves */
386 if (region_count == 1) {
387 if (regions[0].srcOffset.x ||
388 regions[0].srcOffset.y ||
389 regions[0].srcOffset.z)
390 resolve_method = RESOLVE_COMPUTE;
391 if (regions[0].dstOffset.x ||
392 regions[0].dstOffset.y ||
393 regions[0].dstOffset.z)
394 resolve_method = RESOLVE_COMPUTE;
395
396 if (regions[0].extent.width != src_image->info.width ||
397 regions[0].extent.height != src_image->info.height ||
398 regions[0].extent.depth != src_image->info.depth)
399 resolve_method = RESOLVE_COMPUTE;
400 } else
401 resolve_method = RESOLVE_COMPUTE;
402
403 radv_pick_resolve_method_images(src_image, dest_image,
404 dest_image_layout, cmd_buffer,
405 &resolve_method);
406
407 if (resolve_method == RESOLVE_FRAGMENT) {
408 radv_meta_resolve_fragment_image(cmd_buffer,
409 src_image,
410 src_image_layout,
411 dest_image,
412 dest_image_layout,
413 region_count, regions);
414 return;
415 }
416
417 if (resolve_method == RESOLVE_COMPUTE) {
418 radv_meta_resolve_compute_image(cmd_buffer,
419 src_image,
420 src_image_layout,
421 dest_image,
422 dest_image_layout,
423 region_count, regions);
424 return;
425 }
426
427 radv_meta_save(&saved_state, cmd_buffer,
428 RADV_META_SAVE_GRAPHICS_PIPELINE);
429
430 assert(src_image->info.samples > 1);
431 if (src_image->info.samples <= 1) {
432 /* this causes GPU hangs if we get past here */
433 fprintf(stderr, "radv: Illegal resolve operation (src not multisampled), will hang GPU.");
434 return;
435 }
436 assert(dest_image->info.samples == 1);
437
438 if (src_image->info.samples >= 16) {
439 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
440 * glBlitFramebuffer workaround for samples >= 16.
441 */
442 radv_finishme("vkCmdResolveImage: need interpolation workaround when "
443 "samples >= 16");
444 }
445
446 if (src_image->info.array_size > 1)
447 radv_finishme("vkCmdResolveImage: multisample array images");
448
449 if (radv_image_has_dcc(dest_image)) {
450 radv_initialize_dcc(cmd_buffer, dest_image, 0xffffffff);
451 }
452 unsigned fs_key = radv_format_meta_fs_key(dest_image->vk_format);
453 for (uint32_t r = 0; r < region_count; ++r) {
454 const VkImageResolve *region = &regions[r];
455
456 /* From the Vulkan 1.0 spec:
457 *
458 * - The aspectMask member of srcSubresource and dstSubresource must
459 * only contain VK_IMAGE_ASPECT_COLOR_BIT
460 *
461 * - The layerCount member of srcSubresource and dstSubresource must
462 * match
463 */
464 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
465 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
466 assert(region->srcSubresource.layerCount ==
467 region->dstSubresource.layerCount);
468
469 const uint32_t src_base_layer =
470 radv_meta_get_iview_layer(src_image, &region->srcSubresource,
471 &region->srcOffset);
472
473 const uint32_t dest_base_layer =
474 radv_meta_get_iview_layer(dest_image, &region->dstSubresource,
475 &region->dstOffset);
476
477 /**
478 * From Vulkan 1.0.6 spec: 18.6 Resolving Multisample Images
479 *
480 * extent is the size in texels of the source image to resolve in width,
481 * height and depth. 1D images use only x and width. 2D images use x, y,
482 * width and height. 3D images use x, y, z, width, height and depth.
483 *
484 * srcOffset and dstOffset select the initial x, y, and z offsets in
485 * texels of the sub-regions of the source and destination image data.
486 * extent is the size in texels of the source image to resolve in width,
487 * height and depth. 1D images use only x and width. 2D images use x, y,
488 * width and height. 3D images use x, y, z, width, height and depth.
489 */
490 const struct VkExtent3D extent =
491 radv_sanitize_image_extent(src_image->type, region->extent);
492 const struct VkOffset3D dstOffset =
493 radv_sanitize_image_offset(dest_image->type, region->dstOffset);
494
495
496 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
497 ++layer) {
498
499 struct radv_image_view src_iview;
500 radv_image_view_init(&src_iview, cmd_buffer->device,
501 &(VkImageViewCreateInfo) {
502 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
503 .image = src_image_h,
504 .viewType = radv_meta_get_view_type(src_image),
505 .format = src_image->vk_format,
506 .subresourceRange = {
507 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
508 .baseMipLevel = region->srcSubresource.mipLevel,
509 .levelCount = 1,
510 .baseArrayLayer = src_base_layer + layer,
511 .layerCount = 1,
512 },
513 });
514
515 struct radv_image_view dest_iview;
516 radv_image_view_init(&dest_iview, cmd_buffer->device,
517 &(VkImageViewCreateInfo) {
518 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
519 .image = dest_image_h,
520 .viewType = radv_meta_get_view_type(dest_image),
521 .format = dest_image->vk_format,
522 .subresourceRange = {
523 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
524 .baseMipLevel = region->dstSubresource.mipLevel,
525 .levelCount = 1,
526 .baseArrayLayer = dest_base_layer + layer,
527 .layerCount = 1,
528 },
529 });
530
531 VkFramebuffer fb_h;
532 radv_CreateFramebuffer(device_h,
533 &(VkFramebufferCreateInfo) {
534 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
535 .attachmentCount = 2,
536 .pAttachments = (VkImageView[]) {
537 radv_image_view_to_handle(&src_iview),
538 radv_image_view_to_handle(&dest_iview),
539 },
540 .width = radv_minify(dest_image->info.width,
541 region->dstSubresource.mipLevel),
542 .height = radv_minify(dest_image->info.height,
543 region->dstSubresource.mipLevel),
544 .layers = 1
545 },
546 &cmd_buffer->pool->alloc,
547 &fb_h);
548
549 radv_CmdBeginRenderPass(cmd_buffer_h,
550 &(VkRenderPassBeginInfo) {
551 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
552 .renderPass = device->meta_state.resolve.pass[fs_key],
553 .framebuffer = fb_h,
554 .renderArea = {
555 .offset = {
556 dstOffset.x,
557 dstOffset.y,
558 },
559 .extent = {
560 extent.width,
561 extent.height,
562 }
563 },
564 .clearValueCount = 0,
565 .pClearValues = NULL,
566 },
567 VK_SUBPASS_CONTENTS_INLINE);
568
569 emit_resolve(cmd_buffer,
570 dest_iview.vk_format,
571 &(VkOffset2D) {
572 .x = dstOffset.x,
573 .y = dstOffset.y,
574 },
575 &(VkExtent2D) {
576 .width = extent.width,
577 .height = extent.height,
578 });
579
580 radv_CmdEndRenderPass(cmd_buffer_h);
581
582 radv_DestroyFramebuffer(device_h, fb_h,
583 &cmd_buffer->pool->alloc);
584 }
585 }
586
587 radv_meta_restore(&saved_state, cmd_buffer);
588 }
589
590 /**
591 * Emit any needed resolves for the current subpass.
592 */
593 void
594 radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer)
595 {
596 struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
597 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
598 struct radv_meta_saved_state saved_state;
599 enum radv_resolve_method resolve_method = RESOLVE_HW;
600
601 /* FINISHME(perf): Skip clears for resolve attachments.
602 *
603 * From the Vulkan 1.0 spec:
604 *
605 * If the first use of an attachment in a render pass is as a resolve
606 * attachment, then the loadOp is effectively ignored as the resolve is
607 * guaranteed to overwrite all pixels in the render area.
608 */
609
610 if (!subpass->has_resolve)
611 return;
612
613 for (uint32_t i = 0; i < subpass->color_count; ++i) {
614 VkAttachmentReference src_att = subpass->color_attachments[i];
615 VkAttachmentReference dest_att = subpass->resolve_attachments[i];
616
617 if (src_att.attachment == VK_ATTACHMENT_UNUSED ||
618 dest_att.attachment == VK_ATTACHMENT_UNUSED)
619 continue;
620
621 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
622 struct radv_image *src_img = cmd_buffer->state.framebuffer->attachments[src_att.attachment].attachment->image;
623
624 radv_pick_resolve_method_images(src_img, dst_img, dest_att.layout, cmd_buffer, &resolve_method);
625 if (resolve_method == RESOLVE_FRAGMENT) {
626 break;
627 }
628 }
629
630 if (resolve_method == RESOLVE_COMPUTE) {
631 radv_cmd_buffer_resolve_subpass_cs(cmd_buffer);
632 return;
633 } else if (resolve_method == RESOLVE_FRAGMENT) {
634 radv_cmd_buffer_resolve_subpass_fs(cmd_buffer);
635 return;
636 }
637
638 radv_meta_save(&saved_state, cmd_buffer,
639 RADV_META_SAVE_GRAPHICS_PIPELINE);
640
641 for (uint32_t i = 0; i < subpass->color_count; ++i) {
642 VkAttachmentReference src_att = subpass->color_attachments[i];
643 VkAttachmentReference dest_att = subpass->resolve_attachments[i];
644
645 if (src_att.attachment == VK_ATTACHMENT_UNUSED ||
646 dest_att.attachment == VK_ATTACHMENT_UNUSED)
647 continue;
648
649 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
650
651 if (radv_image_has_dcc(dst_img)) {
652 radv_initialize_dcc(cmd_buffer, dst_img, 0xffffffff);
653 cmd_buffer->state.attachments[dest_att.attachment].current_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
654 }
655
656 struct radv_subpass resolve_subpass = {
657 .color_count = 2,
658 .color_attachments = (VkAttachmentReference[]) { src_att, dest_att },
659 .depth_stencil_attachment = { .attachment = VK_ATTACHMENT_UNUSED },
660 };
661
662 radv_cmd_buffer_set_subpass(cmd_buffer, &resolve_subpass, false);
663
664 emit_resolve(cmd_buffer,
665 dst_img->vk_format,
666 &(VkOffset2D) { 0, 0 },
667 &(VkExtent2D) { fb->width, fb->height });
668 }
669
670 cmd_buffer->state.subpass = subpass;
671 radv_meta_restore(&saved_state, cmd_buffer);
672 }