radv: drop unused code related to 16 sample locations
[mesa.git] / src / amd / vulkan / radv_meta_resolve.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "vk_format.h"
30 #include "nir/nir_builder.h"
31 #include "sid.h"
32
33 /* emit 0, 0, 0, 1 */
34 static nir_shader *
35 build_nir_fs(void)
36 {
37 const struct glsl_type *vec4 = glsl_vec4_type();
38 nir_builder b;
39 nir_variable *f_color; /* vec4, fragment output color */
40
41 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
42 b.shader->info.name = ralloc_asprintf(b.shader,
43 "meta_resolve_fs");
44
45 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
46 "f_color");
47 f_color->data.location = FRAG_RESULT_DATA0;
48 nir_store_var(&b, f_color, nir_imm_vec4(&b, 0.0, 0.0, 0.0, 1.0), 0xf);
49
50 return b.shader;
51 }
52
53 static VkResult
54 create_pass(struct radv_device *device, VkFormat vk_format, VkRenderPass *pass)
55 {
56 VkResult result;
57 VkDevice device_h = radv_device_to_handle(device);
58 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
59 VkAttachmentDescription attachments[2];
60 int i;
61
62 for (i = 0; i < 2; i++) {
63 attachments[i].format = vk_format;
64 attachments[i].samples = 1;
65 attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
66 attachments[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
67 }
68 attachments[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
69 attachments[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
70 attachments[1].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
71 attachments[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
72
73 result = radv_CreateRenderPass(device_h,
74 &(VkRenderPassCreateInfo) {
75 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
76 .attachmentCount = 2,
77 .pAttachments = attachments,
78 .subpassCount = 1,
79 .pSubpasses = &(VkSubpassDescription) {
80 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
81 .inputAttachmentCount = 0,
82 .colorAttachmentCount = 2,
83 .pColorAttachments = (VkAttachmentReference[]) {
84 {
85 .attachment = 0,
86 .layout = VK_IMAGE_LAYOUT_GENERAL,
87 },
88 {
89 .attachment = 1,
90 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
91 },
92 },
93 .pResolveAttachments = NULL,
94 .pDepthStencilAttachment = &(VkAttachmentReference) {
95 .attachment = VK_ATTACHMENT_UNUSED,
96 },
97 .preserveAttachmentCount = 0,
98 .pPreserveAttachments = NULL,
99 },
100 .dependencyCount = 0,
101 },
102 alloc,
103 pass);
104
105 return result;
106 }
107
108 static VkResult
109 create_pipeline(struct radv_device *device,
110 VkShaderModule vs_module_h,
111 VkPipeline *pipeline,
112 VkRenderPass pass)
113 {
114 VkResult result;
115 VkDevice device_h = radv_device_to_handle(device);
116
117 struct radv_shader_module fs_module = {
118 .nir = build_nir_fs(),
119 };
120
121 if (!fs_module.nir) {
122 /* XXX: Need more accurate error */
123 result = VK_ERROR_OUT_OF_HOST_MEMORY;
124 goto cleanup;
125 }
126
127 VkPipelineLayoutCreateInfo pl_create_info = {
128 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
129 .setLayoutCount = 0,
130 .pSetLayouts = NULL,
131 .pushConstantRangeCount = 0,
132 .pPushConstantRanges = NULL,
133 };
134
135 if (!device->meta_state.resolve.p_layout) {
136 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
137 &pl_create_info,
138 &device->meta_state.alloc,
139 &device->meta_state.resolve.p_layout);
140 if (result != VK_SUCCESS)
141 goto cleanup;
142 }
143
144 result = radv_graphics_pipeline_create(device_h,
145 radv_pipeline_cache_to_handle(&device->meta_state.cache),
146 &(VkGraphicsPipelineCreateInfo) {
147 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
148 .stageCount = 2,
149 .pStages = (VkPipelineShaderStageCreateInfo[]) {
150 {
151 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
152 .stage = VK_SHADER_STAGE_VERTEX_BIT,
153 .module = vs_module_h,
154 .pName = "main",
155 },
156 {
157 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
158 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
159 .module = radv_shader_module_to_handle(&fs_module),
160 .pName = "main",
161 },
162 },
163 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
164 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
165 .vertexBindingDescriptionCount = 0,
166 .vertexAttributeDescriptionCount = 0,
167 },
168 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
169 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
170 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
171 .primitiveRestartEnable = false,
172 },
173 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
174 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
175 .viewportCount = 1,
176 .scissorCount = 1,
177 },
178 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
179 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
180 .depthClampEnable = false,
181 .rasterizerDiscardEnable = false,
182 .polygonMode = VK_POLYGON_MODE_FILL,
183 .cullMode = VK_CULL_MODE_NONE,
184 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
185 },
186 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
187 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
188 .rasterizationSamples = 1,
189 .sampleShadingEnable = false,
190 .pSampleMask = NULL,
191 .alphaToCoverageEnable = false,
192 .alphaToOneEnable = false,
193 },
194 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
195 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
196 .logicOpEnable = false,
197 .attachmentCount = 2,
198 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
199 {
200 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
201 VK_COLOR_COMPONENT_G_BIT |
202 VK_COLOR_COMPONENT_B_BIT |
203 VK_COLOR_COMPONENT_A_BIT,
204 },
205 {
206 .colorWriteMask = 0,
207
208 }
209 },
210 },
211 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
212 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
213 .dynamicStateCount = 2,
214 .pDynamicStates = (VkDynamicState[]) {
215 VK_DYNAMIC_STATE_VIEWPORT,
216 VK_DYNAMIC_STATE_SCISSOR,
217 },
218 },
219 .layout = device->meta_state.resolve.p_layout,
220 .renderPass = pass,
221 .subpass = 0,
222 },
223 &(struct radv_graphics_pipeline_create_info) {
224 .use_rectlist = true,
225 .custom_blend_mode = V_028808_CB_RESOLVE,
226 },
227 &device->meta_state.alloc, pipeline);
228 if (result != VK_SUCCESS)
229 goto cleanup;
230
231 goto cleanup;
232
233 cleanup:
234 ralloc_free(fs_module.nir);
235 return result;
236 }
237
238 void
239 radv_device_finish_meta_resolve_state(struct radv_device *device)
240 {
241 struct radv_meta_state *state = &device->meta_state;
242
243 for (uint32_t j = 0; j < NUM_META_FS_KEYS; j++) {
244 radv_DestroyRenderPass(radv_device_to_handle(device),
245 state->resolve.pass[j], &state->alloc);
246 radv_DestroyPipeline(radv_device_to_handle(device),
247 state->resolve.pipeline[j], &state->alloc);
248 }
249 radv_DestroyPipelineLayout(radv_device_to_handle(device),
250 state->resolve.p_layout, &state->alloc);
251
252 }
253
254 VkResult
255 radv_device_init_meta_resolve_state(struct radv_device *device, bool on_demand)
256 {
257 if (on_demand)
258 return VK_SUCCESS;
259
260 VkResult res = VK_SUCCESS;
261 struct radv_meta_state *state = &device->meta_state;
262 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
263 if (!vs_module.nir) {
264 /* XXX: Need more accurate error */
265 res = VK_ERROR_OUT_OF_HOST_MEMORY;
266 goto fail;
267 }
268
269 for (uint32_t i = 0; i < NUM_META_FS_KEYS; ++i) {
270 VkFormat format = radv_fs_key_format_exemplars[i];
271 unsigned fs_key = radv_format_meta_fs_key(format);
272 res = create_pass(device, format, &state->resolve.pass[fs_key]);
273 if (res != VK_SUCCESS)
274 goto fail;
275
276 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
277 res = create_pipeline(device, vs_module_h,
278 &state->resolve.pipeline[fs_key], state->resolve.pass[fs_key]);
279 if (res != VK_SUCCESS)
280 goto fail;
281 }
282
283 goto cleanup;
284
285 fail:
286 radv_device_finish_meta_resolve_state(device);
287
288 cleanup:
289 ralloc_free(vs_module.nir);
290
291 return res;
292 }
293
294 static void
295 emit_resolve(struct radv_cmd_buffer *cmd_buffer,
296 VkFormat vk_format,
297 const VkOffset2D *dest_offset,
298 const VkExtent2D *resolve_extent)
299 {
300 struct radv_device *device = cmd_buffer->device;
301 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
302 unsigned fs_key = radv_format_meta_fs_key(vk_format);
303
304 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
305
306 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
307 device->meta_state.resolve.pipeline[fs_key]);
308
309 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
310 .x = dest_offset->x,
311 .y = dest_offset->y,
312 .width = resolve_extent->width,
313 .height = resolve_extent->height,
314 .minDepth = 0.0f,
315 .maxDepth = 1.0f
316 });
317
318 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) {
319 .offset = *dest_offset,
320 .extent = *resolve_extent,
321 });
322
323 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
324 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
325 }
326
327 enum radv_resolve_method {
328 RESOLVE_HW,
329 RESOLVE_COMPUTE,
330 RESOLVE_FRAGMENT,
331 };
332
333 static void radv_pick_resolve_method_images(struct radv_image *src_image,
334 struct radv_image *dest_image,
335 VkImageLayout dest_image_layout,
336 struct radv_cmd_buffer *cmd_buffer,
337 enum radv_resolve_method *method)
338
339 {
340 uint32_t queue_mask = radv_image_queue_family_mask(dest_image,
341 cmd_buffer->queue_family_index,
342 cmd_buffer->queue_family_index);
343
344 if (src_image->vk_format == VK_FORMAT_R16G16_UNORM ||
345 src_image->vk_format == VK_FORMAT_R16G16_SNORM)
346 *method = RESOLVE_COMPUTE;
347 else if (vk_format_is_int(src_image->vk_format))
348 *method = RESOLVE_COMPUTE;
349 else if (src_image->info.array_size > 1 ||
350 dest_image->info.array_size > 1)
351 *method = RESOLVE_COMPUTE;
352
353 if (radv_layout_dcc_compressed(dest_image, dest_image_layout, queue_mask)) {
354 *method = RESOLVE_FRAGMENT;
355 } else if (dest_image->surface.micro_tile_mode != src_image->surface.micro_tile_mode) {
356 *method = RESOLVE_COMPUTE;
357 }
358 }
359
360 static VkResult
361 build_resolve_pipeline(struct radv_device *device,
362 unsigned fs_key)
363 {
364 VkResult result = VK_SUCCESS;
365
366 if (device->meta_state.resolve.pipeline[fs_key])
367 return result;
368
369 mtx_lock(&device->meta_state.mtx);
370 if (device->meta_state.resolve.pipeline[fs_key]) {
371 mtx_unlock(&device->meta_state.mtx);
372 return result;
373 }
374
375 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
376
377 result = create_pass(device, radv_fs_key_format_exemplars[fs_key], &device->meta_state.resolve.pass[fs_key]);
378 if (result != VK_SUCCESS)
379 goto fail;
380
381 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
382 result = create_pipeline(device, vs_module_h, &device->meta_state.resolve.pipeline[fs_key], device->meta_state.resolve.pass[fs_key]);
383
384 fail:
385 ralloc_free(vs_module.nir);
386 mtx_unlock(&device->meta_state.mtx);
387 return result;
388 }
389
390 void radv_CmdResolveImage(
391 VkCommandBuffer cmd_buffer_h,
392 VkImage src_image_h,
393 VkImageLayout src_image_layout,
394 VkImage dest_image_h,
395 VkImageLayout dest_image_layout,
396 uint32_t region_count,
397 const VkImageResolve* regions)
398 {
399 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, cmd_buffer_h);
400 RADV_FROM_HANDLE(radv_image, src_image, src_image_h);
401 RADV_FROM_HANDLE(radv_image, dest_image, dest_image_h);
402 struct radv_device *device = cmd_buffer->device;
403 struct radv_meta_saved_state saved_state;
404 VkDevice device_h = radv_device_to_handle(device);
405 enum radv_resolve_method resolve_method = RESOLVE_HW;
406 /* we can use the hw resolve only for single full resolves */
407 if (region_count == 1) {
408 if (regions[0].srcOffset.x ||
409 regions[0].srcOffset.y ||
410 regions[0].srcOffset.z)
411 resolve_method = RESOLVE_COMPUTE;
412 if (regions[0].dstOffset.x ||
413 regions[0].dstOffset.y ||
414 regions[0].dstOffset.z)
415 resolve_method = RESOLVE_COMPUTE;
416
417 if (regions[0].extent.width != src_image->info.width ||
418 regions[0].extent.height != src_image->info.height ||
419 regions[0].extent.depth != src_image->info.depth)
420 resolve_method = RESOLVE_COMPUTE;
421 } else
422 resolve_method = RESOLVE_COMPUTE;
423
424 radv_pick_resolve_method_images(src_image, dest_image,
425 dest_image_layout, cmd_buffer,
426 &resolve_method);
427
428 if (resolve_method == RESOLVE_FRAGMENT) {
429 radv_meta_resolve_fragment_image(cmd_buffer,
430 src_image,
431 src_image_layout,
432 dest_image,
433 dest_image_layout,
434 region_count, regions);
435 return;
436 }
437
438 if (resolve_method == RESOLVE_COMPUTE) {
439 radv_meta_resolve_compute_image(cmd_buffer,
440 src_image,
441 src_image_layout,
442 dest_image,
443 dest_image_layout,
444 region_count, regions);
445 return;
446 }
447
448 radv_meta_save(&saved_state, cmd_buffer,
449 RADV_META_SAVE_GRAPHICS_PIPELINE);
450
451 assert(src_image->info.samples > 1);
452 if (src_image->info.samples <= 1) {
453 /* this causes GPU hangs if we get past here */
454 fprintf(stderr, "radv: Illegal resolve operation (src not multisampled), will hang GPU.");
455 return;
456 }
457 assert(dest_image->info.samples == 1);
458
459 if (src_image->info.array_size > 1)
460 radv_finishme("vkCmdResolveImage: multisample array images");
461
462 if (radv_image_has_dcc(dest_image)) {
463 radv_initialize_dcc(cmd_buffer, dest_image, 0xffffffff);
464 }
465 unsigned fs_key = radv_format_meta_fs_key(dest_image->vk_format);
466 for (uint32_t r = 0; r < region_count; ++r) {
467 const VkImageResolve *region = &regions[r];
468
469 /* From the Vulkan 1.0 spec:
470 *
471 * - The aspectMask member of srcSubresource and dstSubresource must
472 * only contain VK_IMAGE_ASPECT_COLOR_BIT
473 *
474 * - The layerCount member of srcSubresource and dstSubresource must
475 * match
476 */
477 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
478 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
479 assert(region->srcSubresource.layerCount ==
480 region->dstSubresource.layerCount);
481
482 const uint32_t src_base_layer =
483 radv_meta_get_iview_layer(src_image, &region->srcSubresource,
484 &region->srcOffset);
485
486 const uint32_t dest_base_layer =
487 radv_meta_get_iview_layer(dest_image, &region->dstSubresource,
488 &region->dstOffset);
489
490 /**
491 * From Vulkan 1.0.6 spec: 18.6 Resolving Multisample Images
492 *
493 * extent is the size in texels of the source image to resolve in width,
494 * height and depth. 1D images use only x and width. 2D images use x, y,
495 * width and height. 3D images use x, y, z, width, height and depth.
496 *
497 * srcOffset and dstOffset select the initial x, y, and z offsets in
498 * texels of the sub-regions of the source and destination image data.
499 * extent is the size in texels of the source image to resolve in width,
500 * height and depth. 1D images use only x and width. 2D images use x, y,
501 * width and height. 3D images use x, y, z, width, height and depth.
502 */
503 const struct VkExtent3D extent =
504 radv_sanitize_image_extent(src_image->type, region->extent);
505 const struct VkOffset3D dstOffset =
506 radv_sanitize_image_offset(dest_image->type, region->dstOffset);
507
508
509 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
510 ++layer) {
511
512 VkResult ret = build_resolve_pipeline(device, fs_key);
513 if (ret != VK_SUCCESS) {
514 cmd_buffer->record_result = ret;
515 break;
516 }
517
518 struct radv_image_view src_iview;
519 radv_image_view_init(&src_iview, cmd_buffer->device,
520 &(VkImageViewCreateInfo) {
521 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
522 .image = src_image_h,
523 .viewType = radv_meta_get_view_type(src_image),
524 .format = src_image->vk_format,
525 .subresourceRange = {
526 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
527 .baseMipLevel = region->srcSubresource.mipLevel,
528 .levelCount = 1,
529 .baseArrayLayer = src_base_layer + layer,
530 .layerCount = 1,
531 },
532 });
533
534 struct radv_image_view dest_iview;
535 radv_image_view_init(&dest_iview, cmd_buffer->device,
536 &(VkImageViewCreateInfo) {
537 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
538 .image = dest_image_h,
539 .viewType = radv_meta_get_view_type(dest_image),
540 .format = dest_image->vk_format,
541 .subresourceRange = {
542 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
543 .baseMipLevel = region->dstSubresource.mipLevel,
544 .levelCount = 1,
545 .baseArrayLayer = dest_base_layer + layer,
546 .layerCount = 1,
547 },
548 });
549
550 VkFramebuffer fb_h;
551 radv_CreateFramebuffer(device_h,
552 &(VkFramebufferCreateInfo) {
553 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
554 .attachmentCount = 2,
555 .pAttachments = (VkImageView[]) {
556 radv_image_view_to_handle(&src_iview),
557 radv_image_view_to_handle(&dest_iview),
558 },
559 .width = radv_minify(dest_image->info.width,
560 region->dstSubresource.mipLevel),
561 .height = radv_minify(dest_image->info.height,
562 region->dstSubresource.mipLevel),
563 .layers = 1
564 },
565 &cmd_buffer->pool->alloc,
566 &fb_h);
567
568 radv_CmdBeginRenderPass(cmd_buffer_h,
569 &(VkRenderPassBeginInfo) {
570 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
571 .renderPass = device->meta_state.resolve.pass[fs_key],
572 .framebuffer = fb_h,
573 .renderArea = {
574 .offset = {
575 dstOffset.x,
576 dstOffset.y,
577 },
578 .extent = {
579 extent.width,
580 extent.height,
581 }
582 },
583 .clearValueCount = 0,
584 .pClearValues = NULL,
585 },
586 VK_SUBPASS_CONTENTS_INLINE);
587
588 emit_resolve(cmd_buffer,
589 dest_iview.vk_format,
590 &(VkOffset2D) {
591 .x = dstOffset.x,
592 .y = dstOffset.y,
593 },
594 &(VkExtent2D) {
595 .width = extent.width,
596 .height = extent.height,
597 });
598
599 radv_CmdEndRenderPass(cmd_buffer_h);
600
601 radv_DestroyFramebuffer(device_h, fb_h,
602 &cmd_buffer->pool->alloc);
603 }
604 }
605
606 radv_meta_restore(&saved_state, cmd_buffer);
607 }
608
609 /**
610 * Emit any needed resolves for the current subpass.
611 */
612 void
613 radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer)
614 {
615 struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
616 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
617 struct radv_meta_saved_state saved_state;
618 enum radv_resolve_method resolve_method = RESOLVE_HW;
619
620 /* FINISHME(perf): Skip clears for resolve attachments.
621 *
622 * From the Vulkan 1.0 spec:
623 *
624 * If the first use of an attachment in a render pass is as a resolve
625 * attachment, then the loadOp is effectively ignored as the resolve is
626 * guaranteed to overwrite all pixels in the render area.
627 */
628
629 if (!subpass->has_resolve)
630 return;
631
632 for (uint32_t i = 0; i < subpass->color_count; ++i) {
633 struct radv_subpass_attachment src_att = subpass->color_attachments[i];
634 struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
635
636 if (src_att.attachment == VK_ATTACHMENT_UNUSED ||
637 dest_att.attachment == VK_ATTACHMENT_UNUSED)
638 continue;
639
640 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
641 struct radv_image *src_img = cmd_buffer->state.framebuffer->attachments[src_att.attachment].attachment->image;
642
643 radv_pick_resolve_method_images(src_img, dst_img, dest_att.layout, cmd_buffer, &resolve_method);
644 if (resolve_method == RESOLVE_FRAGMENT) {
645 break;
646 }
647 }
648
649 if (resolve_method == RESOLVE_COMPUTE) {
650 radv_cmd_buffer_resolve_subpass_cs(cmd_buffer);
651 return;
652 } else if (resolve_method == RESOLVE_FRAGMENT) {
653 radv_cmd_buffer_resolve_subpass_fs(cmd_buffer);
654 return;
655 }
656
657 radv_meta_save(&saved_state, cmd_buffer,
658 RADV_META_SAVE_GRAPHICS_PIPELINE);
659
660 for (uint32_t i = 0; i < subpass->color_count; ++i) {
661 struct radv_subpass_attachment src_att = subpass->color_attachments[i];
662 struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
663
664 if (src_att.attachment == VK_ATTACHMENT_UNUSED ||
665 dest_att.attachment == VK_ATTACHMENT_UNUSED)
666 continue;
667
668 struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
669
670 if (radv_image_has_dcc(dst_img)) {
671 radv_initialize_dcc(cmd_buffer, dst_img, 0xffffffff);
672 cmd_buffer->state.attachments[dest_att.attachment].current_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
673 }
674
675 struct radv_subpass resolve_subpass = {
676 .color_count = 2,
677 .color_attachments = (struct radv_subpass_attachment[]) { src_att, dest_att },
678 .depth_stencil_attachment = { .attachment = VK_ATTACHMENT_UNUSED },
679 };
680
681 radv_cmd_buffer_set_subpass(cmd_buffer, &resolve_subpass, false);
682
683 VkResult ret = build_resolve_pipeline(cmd_buffer->device, radv_format_meta_fs_key(dst_img->vk_format));
684 if (ret != VK_SUCCESS) {
685 cmd_buffer->record_result = ret;
686 continue;
687 }
688
689 emit_resolve(cmd_buffer,
690 dst_img->vk_format,
691 &(VkOffset2D) { 0, 0 },
692 &(VkExtent2D) { fb->width, fb->height });
693 }
694
695 cmd_buffer->state.subpass = subpass;
696 radv_meta_restore(&saved_state, cmd_buffer);
697 }
698
699 /**
700 * Decompress CMask/FMask before resolving a multisampled source image inside a
701 * subpass.
702 */
703 void
704 radv_decompress_resolve_subpass_src(struct radv_cmd_buffer *cmd_buffer)
705 {
706 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
707 struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
708
709 for (uint32_t i = 0; i < subpass->color_count; ++i) {
710 struct radv_subpass_attachment src_att = subpass->color_attachments[i];
711 struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
712
713 if (src_att.attachment == VK_ATTACHMENT_UNUSED ||
714 dest_att.attachment == VK_ATTACHMENT_UNUSED)
715 continue;
716
717 struct radv_image *src_image =
718 fb->attachments[src_att.attachment].attachment->image;
719
720 VkImageResolve region = {};
721 region.srcSubresource.baseArrayLayer = 0;
722 region.srcSubresource.mipLevel = 0;
723 region.srcSubresource.layerCount = src_image->info.array_size;
724
725 radv_decompress_resolve_src(cmd_buffer, src_image,
726 src_att.layout, 1, &region);
727 }
728 }
729
730 /**
731 * Decompress CMask/FMask before resolving a multisampled source image.
732 */
733 void
734 radv_decompress_resolve_src(struct radv_cmd_buffer *cmd_buffer,
735 struct radv_image *src_image,
736 VkImageLayout src_image_layout,
737 uint32_t region_count,
738 const VkImageResolve *regions)
739 {
740 for (uint32_t r = 0; r < region_count; ++r) {
741 const VkImageResolve *region = &regions[r];
742 const uint32_t src_base_layer =
743 radv_meta_get_iview_layer(src_image, &region->srcSubresource,
744 &region->srcOffset);
745 VkImageSubresourceRange range;
746 range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
747 range.baseMipLevel = region->srcSubresource.mipLevel;
748 range.levelCount = 1;
749 range.baseArrayLayer = src_base_layer;
750 range.layerCount = region->srcSubresource.layerCount;
751
752 uint32_t queue_mask =
753 radv_image_queue_family_mask(src_image,
754 cmd_buffer->queue_family_index,
755 cmd_buffer->queue_family_index);
756
757 if (radv_layout_dcc_compressed(src_image, src_image_layout,
758 queue_mask)) {
759 radv_decompress_dcc(cmd_buffer, src_image, &range);
760 } else {
761 radv_fast_clear_flush_image_inplace(cmd_buffer,
762 src_image, &range);
763 }
764 }
765 }