radv: create decompress pipelines for separate depth/stencil layouts
[mesa.git] / src / amd / vulkan / radv_meta_decompress.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "sid.h"
30
31 enum radv_depth_op {
32 DEPTH_DECOMPRESS,
33 DEPTH_RESUMMARIZE,
34 };
35
36 enum radv_depth_decompress {
37 DECOMPRESS_DEPTH_STENCIL,
38 DECOMPRESS_DEPTH,
39 DECOMPRESS_STENCIL,
40 };
41
42 static VkResult
43 create_pass(struct radv_device *device,
44 uint32_t samples,
45 VkRenderPass *pass)
46 {
47 VkResult result;
48 VkDevice device_h = radv_device_to_handle(device);
49 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
50 VkAttachmentDescription attachment;
51
52 attachment.flags = 0;
53 attachment.format = VK_FORMAT_D32_SFLOAT_S8_UINT;
54 attachment.samples = samples;
55 attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
56 attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
57 attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
58 attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
59 attachment.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
60 attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
61
62 result = radv_CreateRenderPass(device_h,
63 &(VkRenderPassCreateInfo) {
64 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
65 .attachmentCount = 1,
66 .pAttachments = &attachment,
67 .subpassCount = 1,
68 .pSubpasses = &(VkSubpassDescription) {
69 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
70 .inputAttachmentCount = 0,
71 .colorAttachmentCount = 0,
72 .pColorAttachments = NULL,
73 .pResolveAttachments = NULL,
74 .pDepthStencilAttachment = &(VkAttachmentReference) {
75 .attachment = 0,
76 .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
77 },
78 .preserveAttachmentCount = 0,
79 .pPreserveAttachments = NULL,
80 },
81 .dependencyCount = 0,
82 },
83 alloc,
84 pass);
85
86 return result;
87 }
88
89 static VkResult
90 create_pipeline_layout(struct radv_device *device, VkPipelineLayout *layout)
91 {
92 VkPipelineLayoutCreateInfo pl_create_info = {
93 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
94 .setLayoutCount = 0,
95 .pSetLayouts = NULL,
96 .pushConstantRangeCount = 0,
97 .pPushConstantRanges = NULL,
98 };
99
100 return radv_CreatePipelineLayout(radv_device_to_handle(device),
101 &pl_create_info,
102 &device->meta_state.alloc,
103 layout);
104 }
105
106 static VkResult
107 create_pipeline(struct radv_device *device,
108 VkShaderModule vs_module_h,
109 uint32_t samples,
110 VkRenderPass pass,
111 VkPipelineLayout layout,
112 enum radv_depth_op op,
113 enum radv_depth_decompress decompress,
114 VkPipeline *pipeline)
115 {
116 VkResult result;
117 VkDevice device_h = radv_device_to_handle(device);
118 struct radv_shader_module vs_module = {0};
119
120 mtx_lock(&device->meta_state.mtx);
121 if (*pipeline) {
122 mtx_unlock(&device->meta_state.mtx);
123 return VK_SUCCESS;
124 }
125
126 if (!vs_module_h) {
127 vs_module.nir = radv_meta_build_nir_vs_generate_vertices();
128 vs_module_h = radv_shader_module_to_handle(&vs_module);
129 }
130
131 struct radv_shader_module fs_module = {
132 .nir = radv_meta_build_nir_fs_noop(),
133 };
134
135 if (!fs_module.nir) {
136 /* XXX: Need more accurate error */
137 result = VK_ERROR_OUT_OF_HOST_MEMORY;
138 goto cleanup;
139 }
140
141 const VkPipelineSampleLocationsStateCreateInfoEXT sample_locs_create_info = {
142 .sType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
143 .sampleLocationsEnable = false,
144 };
145
146 const VkGraphicsPipelineCreateInfo pipeline_create_info = {
147 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
148 .stageCount = 2,
149 .pStages = (VkPipelineShaderStageCreateInfo[]) {
150 {
151 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
152 .stage = VK_SHADER_STAGE_VERTEX_BIT,
153 .module = vs_module_h,
154 .pName = "main",
155 },
156 {
157 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
158 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
159 .module = radv_shader_module_to_handle(&fs_module),
160 .pName = "main",
161 },
162 },
163 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
164 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
165 .vertexBindingDescriptionCount = 0,
166 .vertexAttributeDescriptionCount = 0,
167 },
168 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
169 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
170 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
171 .primitiveRestartEnable = false,
172 },
173 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
174 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
175 .viewportCount = 1,
176 .scissorCount = 1,
177 },
178 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
179 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
180 .depthClampEnable = false,
181 .rasterizerDiscardEnable = false,
182 .polygonMode = VK_POLYGON_MODE_FILL,
183 .cullMode = VK_CULL_MODE_NONE,
184 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
185 },
186 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
187 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
188 .pNext = &sample_locs_create_info,
189 .rasterizationSamples = samples,
190 .sampleShadingEnable = false,
191 .pSampleMask = NULL,
192 .alphaToCoverageEnable = false,
193 .alphaToOneEnable = false,
194 },
195 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
196 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
197 .logicOpEnable = false,
198 .attachmentCount = 0,
199 .pAttachments = NULL,
200 },
201 .pDepthStencilState = &(VkPipelineDepthStencilStateCreateInfo) {
202 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
203 .depthTestEnable = false,
204 .depthWriteEnable = false,
205 .depthBoundsTestEnable = false,
206 .stencilTestEnable = false,
207 },
208 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
209 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
210 .dynamicStateCount = 3,
211 .pDynamicStates = (VkDynamicState[]) {
212 VK_DYNAMIC_STATE_VIEWPORT,
213 VK_DYNAMIC_STATE_SCISSOR,
214 VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
215 },
216 },
217 .layout = layout,
218 .renderPass = pass,
219 .subpass = 0,
220 };
221
222 struct radv_graphics_pipeline_create_info extra = {
223 .use_rectlist = true,
224 .db_flush_depth_inplace = decompress == DECOMPRESS_DEPTH_STENCIL ||
225 decompress == DECOMPRESS_DEPTH,
226 .db_flush_stencil_inplace = decompress == DECOMPRESS_DEPTH_STENCIL ||
227 decompress == DECOMPRESS_STENCIL,
228 .db_resummarize = op == DEPTH_RESUMMARIZE,
229 };
230
231 result = radv_graphics_pipeline_create(device_h,
232 radv_pipeline_cache_to_handle(&device->meta_state.cache),
233 &pipeline_create_info, &extra,
234 &device->meta_state.alloc,
235 pipeline);
236
237 cleanup:
238 ralloc_free(fs_module.nir);
239 if (vs_module.nir)
240 ralloc_free(vs_module.nir);
241 mtx_unlock(&device->meta_state.mtx);
242 return result;
243 }
244
245 void
246 radv_device_finish_meta_depth_decomp_state(struct radv_device *device)
247 {
248 struct radv_meta_state *state = &device->meta_state;
249
250 for (uint32_t i = 0; i < ARRAY_SIZE(state->depth_decomp); ++i) {
251 radv_DestroyRenderPass(radv_device_to_handle(device),
252 state->depth_decomp[i].pass,
253 &state->alloc);
254 radv_DestroyPipelineLayout(radv_device_to_handle(device),
255 state->depth_decomp[i].p_layout,
256 &state->alloc);
257
258 for (uint32_t j = 0; j < NUM_DEPTH_DECOMPRESS_PIPELINES; j++) {
259 radv_DestroyPipeline(radv_device_to_handle(device),
260 state->depth_decomp[i].decompress_pipeline[j],
261 &state->alloc);
262 }
263 radv_DestroyPipeline(radv_device_to_handle(device),
264 state->depth_decomp[i].resummarize_pipeline,
265 &state->alloc);
266 }
267 }
268
269 VkResult
270 radv_device_init_meta_depth_decomp_state(struct radv_device *device, bool on_demand)
271 {
272 struct radv_meta_state *state = &device->meta_state;
273 VkResult res = VK_SUCCESS;
274
275 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
276 if (!vs_module.nir) {
277 /* XXX: Need more accurate error */
278 res = VK_ERROR_OUT_OF_HOST_MEMORY;
279 goto fail;
280 }
281
282 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
283
284 for (uint32_t i = 0; i < ARRAY_SIZE(state->depth_decomp); ++i) {
285 uint32_t samples = 1 << i;
286
287 res = create_pass(device, samples, &state->depth_decomp[i].pass);
288 if (res != VK_SUCCESS)
289 goto fail;
290
291 res = create_pipeline_layout(device,
292 &state->depth_decomp[i].p_layout);
293 if (res != VK_SUCCESS)
294 goto fail;
295
296 if (on_demand)
297 continue;
298
299 for (uint32_t j = 0; j < NUM_DEPTH_DECOMPRESS_PIPELINES; j++) {
300 res = create_pipeline(device, vs_module_h, samples,
301 state->depth_decomp[i].pass,
302 state->depth_decomp[i].p_layout,
303 DEPTH_DECOMPRESS,
304 j,
305 &state->depth_decomp[i].decompress_pipeline[j]);
306 if (res != VK_SUCCESS)
307 goto fail;
308 }
309
310 res = create_pipeline(device, vs_module_h, samples,
311 state->depth_decomp[i].pass,
312 state->depth_decomp[i].p_layout,
313 DEPTH_RESUMMARIZE,
314 0, /* unused */
315 &state->depth_decomp[i].resummarize_pipeline);
316 if (res != VK_SUCCESS)
317 goto fail;
318 }
319
320 goto cleanup;
321
322 fail:
323 radv_device_finish_meta_depth_decomp_state(device);
324
325 cleanup:
326 ralloc_free(vs_module.nir);
327
328 return res;
329 }
330
331 static VkPipeline *
332 radv_get_depth_pipeline(struct radv_cmd_buffer *cmd_buffer,
333 struct radv_image *image, enum radv_depth_op op)
334 {
335 struct radv_meta_state *state = &cmd_buffer->device->meta_state;
336 uint32_t samples = image->info.samples;
337 uint32_t samples_log2 = ffs(samples) - 1;
338 VkPipeline *pipeline;
339
340 if (!state->depth_decomp[samples_log2].decompress_pipeline[DECOMPRESS_DEPTH_STENCIL]) {
341 VkResult ret;
342
343 for (uint32_t i = 0; i < NUM_DEPTH_DECOMPRESS_PIPELINES; i++) {
344 ret = create_pipeline(cmd_buffer->device, VK_NULL_HANDLE, samples,
345 state->depth_decomp[samples_log2].pass,
346 state->depth_decomp[samples_log2].p_layout,
347 DEPTH_DECOMPRESS,
348 i,
349 &state->depth_decomp[samples_log2].decompress_pipeline[i]);
350 if (ret != VK_SUCCESS) {
351 cmd_buffer->record_result = ret;
352 return NULL;
353 }
354 }
355
356 ret = create_pipeline(cmd_buffer->device, VK_NULL_HANDLE, samples,
357 state->depth_decomp[samples_log2].pass,
358 state->depth_decomp[samples_log2].p_layout,
359 DEPTH_RESUMMARIZE,
360 0, /* unused */
361 &state->depth_decomp[samples_log2].resummarize_pipeline);
362 if (ret != VK_SUCCESS) {
363 cmd_buffer->record_result = ret;
364 return NULL;
365 }
366 }
367
368 switch (op) {
369 case DEPTH_DECOMPRESS:
370 pipeline = &state->depth_decomp[samples_log2].decompress_pipeline[DECOMPRESS_DEPTH_STENCIL];
371 break;
372 case DEPTH_RESUMMARIZE:
373 pipeline = &state->depth_decomp[samples_log2].resummarize_pipeline;
374 break;
375 default:
376 unreachable("unknown operation");
377 }
378
379 return pipeline;
380 }
381
382 static void
383 radv_process_depth_image_layer(struct radv_cmd_buffer *cmd_buffer,
384 struct radv_image *image,
385 const VkImageSubresourceRange *range,
386 int level, int layer)
387 {
388 struct radv_device *device = cmd_buffer->device;
389 struct radv_meta_state *state = &device->meta_state;
390 uint32_t samples_log2 = ffs(image->info.samples) - 1;
391 struct radv_image_view iview;
392 uint32_t width, height;
393
394 width = radv_minify(image->info.width, range->baseMipLevel + level);
395 height = radv_minify(image->info.height, range->baseMipLevel + level);
396
397 radv_image_view_init(&iview, device,
398 &(VkImageViewCreateInfo) {
399 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
400 .image = radv_image_to_handle(image),
401 .viewType = radv_meta_get_view_type(image),
402 .format = image->vk_format,
403 .subresourceRange = {
404 .aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
405 .baseMipLevel = range->baseMipLevel + level,
406 .levelCount = 1,
407 .baseArrayLayer = range->baseArrayLayer + layer,
408 .layerCount = 1,
409 },
410 }, NULL);
411
412
413 VkFramebuffer fb_h;
414 radv_CreateFramebuffer(radv_device_to_handle(device),
415 &(VkFramebufferCreateInfo) {
416 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
417 .attachmentCount = 1,
418 .pAttachments = (VkImageView[]) {
419 radv_image_view_to_handle(&iview)
420 },
421 .width = width,
422 .height = height,
423 .layers = 1
424 }, &cmd_buffer->pool->alloc, &fb_h);
425
426 radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
427 &(VkRenderPassBeginInfo) {
428 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
429 .renderPass = state->depth_decomp[samples_log2].pass,
430 .framebuffer = fb_h,
431 .renderArea = {
432 .offset = {
433 0,
434 0,
435 },
436 .extent = {
437 width,
438 height,
439 }
440 },
441 .clearValueCount = 0,
442 .pClearValues = NULL,
443 },
444 VK_SUBPASS_CONTENTS_INLINE);
445
446 radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
447 radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
448
449 radv_DestroyFramebuffer(radv_device_to_handle(device), fb_h,
450 &cmd_buffer->pool->alloc);
451 }
452
453 static void radv_process_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
454 struct radv_image *image,
455 const VkImageSubresourceRange *subresourceRange,
456 struct radv_sample_locations_state *sample_locs,
457 enum radv_depth_op op)
458 {
459 struct radv_meta_saved_state saved_state;
460 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
461 VkPipeline *pipeline;
462
463 if (!radv_image_has_htile(image))
464 return;
465
466 radv_meta_save(&saved_state, cmd_buffer,
467 RADV_META_SAVE_GRAPHICS_PIPELINE |
468 RADV_META_SAVE_SAMPLE_LOCATIONS |
469 RADV_META_SAVE_PASS);
470
471 pipeline = radv_get_depth_pipeline(cmd_buffer, image, op);
472
473 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
474 VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
475
476 if (sample_locs) {
477 assert(image->flags & VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT);
478
479 /* Set the sample locations specified during explicit or
480 * automatic layout transitions, otherwise the depth decompress
481 * pass uses the default HW locations.
482 */
483 radv_CmdSetSampleLocationsEXT(cmd_buffer_h, &(VkSampleLocationsInfoEXT) {
484 .sampleLocationsPerPixel = sample_locs->per_pixel,
485 .sampleLocationGridSize = sample_locs->grid_size,
486 .sampleLocationsCount = sample_locs->count,
487 .pSampleLocations = sample_locs->locations,
488 });
489 }
490
491 for (uint32_t l = 0; l < radv_get_levelCount(image, subresourceRange); ++l) {
492 uint32_t width =
493 radv_minify(image->info.width,
494 subresourceRange->baseMipLevel + l);
495 uint32_t height =
496 radv_minify(image->info.height,
497 subresourceRange->baseMipLevel + l);
498
499 radv_CmdSetViewport(cmd_buffer_h, 0, 1,
500 &(VkViewport) {
501 .x = 0,
502 .y = 0,
503 .width = width,
504 .height = height,
505 .minDepth = 0.0f,
506 .maxDepth = 1.0f
507 });
508
509 radv_CmdSetScissor(cmd_buffer_h, 0, 1,
510 &(VkRect2D) {
511 .offset = { 0, 0 },
512 .extent = { width, height },
513 });
514
515 for (uint32_t s = 0; s < radv_get_layerCount(image, subresourceRange); s++) {
516 radv_process_depth_image_layer(cmd_buffer, image,
517 subresourceRange, l, s);
518 }
519 }
520
521 radv_meta_restore(&saved_state, cmd_buffer);
522 }
523
524 void radv_decompress_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
525 struct radv_image *image,
526 const VkImageSubresourceRange *subresourceRange,
527 struct radv_sample_locations_state *sample_locs)
528 {
529 assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);
530 radv_process_depth_image_inplace(cmd_buffer, image, subresourceRange,
531 sample_locs, DEPTH_DECOMPRESS);
532 }
533
534 void radv_resummarize_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
535 struct radv_image *image,
536 const VkImageSubresourceRange *subresourceRange,
537 struct radv_sample_locations_state *sample_locs)
538 {
539 assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);
540 radv_process_depth_image_inplace(cmd_buffer, image, subresourceRange,
541 sample_locs, DEPTH_RESUMMARIZE);
542 }