radv: add radv_meta_save() helper
[mesa.git] / src / amd / vulkan / radv_meta_decompress.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "radv_meta.h"
28 #include "radv_private.h"
29 #include "sid.h"
30
31 static VkResult
32 create_pass(struct radv_device *device,
33 uint32_t samples,
34 VkRenderPass *pass)
35 {
36 VkResult result;
37 VkDevice device_h = radv_device_to_handle(device);
38 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
39 VkAttachmentDescription attachment;
40
41 attachment.flags = 0;
42 attachment.format = VK_FORMAT_D32_SFLOAT_S8_UINT;
43 attachment.samples = samples;
44 attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
45 attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
46 attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
47 attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
48 attachment.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
49 attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
50
51 result = radv_CreateRenderPass(device_h,
52 &(VkRenderPassCreateInfo) {
53 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
54 .attachmentCount = 1,
55 .pAttachments = &attachment,
56 .subpassCount = 1,
57 .pSubpasses = &(VkSubpassDescription) {
58 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
59 .inputAttachmentCount = 0,
60 .colorAttachmentCount = 0,
61 .pColorAttachments = NULL,
62 .pResolveAttachments = NULL,
63 .pDepthStencilAttachment = &(VkAttachmentReference) {
64 .attachment = 0,
65 .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
66 },
67 .preserveAttachmentCount = 0,
68 .pPreserveAttachments = NULL,
69 },
70 .dependencyCount = 0,
71 },
72 alloc,
73 pass);
74
75 return result;
76 }
77
78 static VkResult
79 create_pipeline(struct radv_device *device,
80 VkShaderModule vs_module_h,
81 uint32_t samples,
82 VkRenderPass pass,
83 VkPipeline *decompress_pipeline,
84 VkPipeline *resummarize_pipeline)
85 {
86 VkResult result;
87 VkDevice device_h = radv_device_to_handle(device);
88
89 struct radv_shader_module fs_module = {
90 .nir = radv_meta_build_nir_fs_noop(),
91 };
92
93 if (!fs_module.nir) {
94 /* XXX: Need more accurate error */
95 result = VK_ERROR_OUT_OF_HOST_MEMORY;
96 goto cleanup;
97 }
98
99 const VkGraphicsPipelineCreateInfo pipeline_create_info = {
100 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
101 .stageCount = 2,
102 .pStages = (VkPipelineShaderStageCreateInfo[]) {
103 {
104 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
105 .stage = VK_SHADER_STAGE_VERTEX_BIT,
106 .module = vs_module_h,
107 .pName = "main",
108 },
109 {
110 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
111 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
112 .module = radv_shader_module_to_handle(&fs_module),
113 .pName = "main",
114 },
115 },
116 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
117 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
118 .vertexBindingDescriptionCount = 0,
119 .vertexAttributeDescriptionCount = 0,
120 },
121 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
122 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
123 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
124 .primitiveRestartEnable = false,
125 },
126 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
127 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
128 .viewportCount = 1,
129 .scissorCount = 1,
130 },
131 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
132 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
133 .depthClampEnable = false,
134 .rasterizerDiscardEnable = false,
135 .polygonMode = VK_POLYGON_MODE_FILL,
136 .cullMode = VK_CULL_MODE_NONE,
137 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
138 },
139 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
140 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
141 .rasterizationSamples = samples,
142 .sampleShadingEnable = false,
143 .pSampleMask = NULL,
144 .alphaToCoverageEnable = false,
145 .alphaToOneEnable = false,
146 },
147 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
148 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
149 .logicOpEnable = false,
150 .attachmentCount = 0,
151 .pAttachments = NULL,
152 },
153 .pDepthStencilState = &(VkPipelineDepthStencilStateCreateInfo) {
154 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
155 .depthTestEnable = false,
156 .depthWriteEnable = false,
157 .depthBoundsTestEnable = false,
158 .stencilTestEnable = false,
159 },
160 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
161 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
162 .dynamicStateCount = 2,
163 .pDynamicStates = (VkDynamicState[]) {
164 VK_DYNAMIC_STATE_VIEWPORT,
165 VK_DYNAMIC_STATE_SCISSOR,
166 },
167 },
168 .renderPass = pass,
169 .subpass = 0,
170 };
171
172 result = radv_graphics_pipeline_create(device_h,
173 radv_pipeline_cache_to_handle(&device->meta_state.cache),
174 &pipeline_create_info,
175 &(struct radv_graphics_pipeline_create_info) {
176 .use_rectlist = true,
177 .db_flush_depth_inplace = true,
178 .db_flush_stencil_inplace = true,
179 },
180 &device->meta_state.alloc,
181 decompress_pipeline);
182 if (result != VK_SUCCESS)
183 goto cleanup;
184
185 result = radv_graphics_pipeline_create(device_h,
186 radv_pipeline_cache_to_handle(&device->meta_state.cache),
187 &pipeline_create_info,
188 &(struct radv_graphics_pipeline_create_info) {
189 .use_rectlist = true,
190 .db_flush_depth_inplace = true,
191 .db_flush_stencil_inplace = true,
192 .db_resummarize = true,
193 },
194 &device->meta_state.alloc,
195 resummarize_pipeline);
196 if (result != VK_SUCCESS)
197 goto cleanup;
198
199 goto cleanup;
200
201 cleanup:
202 ralloc_free(fs_module.nir);
203 return result;
204 }
205
206 void
207 radv_device_finish_meta_depth_decomp_state(struct radv_device *device)
208 {
209 struct radv_meta_state *state = &device->meta_state;
210
211 for (uint32_t i = 0; i < ARRAY_SIZE(state->depth_decomp); ++i) {
212 radv_DestroyRenderPass(radv_device_to_handle(device),
213 state->depth_decomp[i].pass,
214 &state->alloc);
215 radv_DestroyPipeline(radv_device_to_handle(device),
216 state->depth_decomp[i].decompress_pipeline,
217 &state->alloc);
218 radv_DestroyPipeline(radv_device_to_handle(device),
219 state->depth_decomp[i].resummarize_pipeline,
220 &state->alloc);
221 }
222 }
223
224 VkResult
225 radv_device_init_meta_depth_decomp_state(struct radv_device *device)
226 {
227 struct radv_meta_state *state = &device->meta_state;
228 VkResult res = VK_SUCCESS;
229
230 struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
231 if (!vs_module.nir) {
232 /* XXX: Need more accurate error */
233 res = VK_ERROR_OUT_OF_HOST_MEMORY;
234 goto fail;
235 }
236
237 VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
238
239 for (uint32_t i = 0; i < ARRAY_SIZE(state->depth_decomp); ++i) {
240 uint32_t samples = 1 << i;
241
242 res = create_pass(device, samples, &state->depth_decomp[i].pass);
243 if (res != VK_SUCCESS)
244 goto fail;
245
246 res = create_pipeline(device, vs_module_h, samples,
247 state->depth_decomp[i].pass,
248 &state->depth_decomp[i].decompress_pipeline,
249 &state->depth_decomp[i].resummarize_pipeline);
250 if (res != VK_SUCCESS)
251 goto fail;
252 }
253
254 goto cleanup;
255
256 fail:
257 radv_device_finish_meta_depth_decomp_state(device);
258
259 cleanup:
260 ralloc_free(vs_module.nir);
261
262 return res;
263 }
264
265 static void
266 emit_depth_decomp(struct radv_cmd_buffer *cmd_buffer,
267 const VkExtent2D *depth_decomp_extent,
268 VkPipeline pipeline_h)
269 {
270 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
271
272 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
273 pipeline_h);
274
275 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
276 .x = 0,
277 .y = 0,
278 .width = depth_decomp_extent->width,
279 .height = depth_decomp_extent->height,
280 .minDepth = 0.0f,
281 .maxDepth = 1.0f
282 });
283
284 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) {
285 .offset = { 0, 0 },
286 .extent = *depth_decomp_extent,
287 });
288
289 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
290 }
291
292
293 enum radv_depth_op {
294 DEPTH_DECOMPRESS,
295 DEPTH_RESUMMARIZE,
296 };
297
298 static void radv_process_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
299 struct radv_image *image,
300 VkImageSubresourceRange *subresourceRange,
301 enum radv_depth_op op)
302 {
303 struct radv_meta_saved_state saved_state;
304 VkDevice device_h = radv_device_to_handle(cmd_buffer->device);
305 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
306 uint32_t width = radv_minify(image->info.width,
307 subresourceRange->baseMipLevel);
308 uint32_t height = radv_minify(image->info.height,
309 subresourceRange->baseMipLevel);
310 uint32_t samples = image->info.samples;
311 uint32_t samples_log2 = ffs(samples) - 1;
312 struct radv_meta_state *meta_state = &cmd_buffer->device->meta_state;
313 VkPipeline pipeline_h;
314
315 if (!image->surface.htile_size)
316 return;
317
318 radv_meta_save(&saved_state, cmd_buffer,
319 RADV_META_SAVE_GRAPHICS_PIPELINE |
320 RADV_META_SAVE_PASS);
321
322 switch (op) {
323 case DEPTH_DECOMPRESS:
324 pipeline_h = meta_state->depth_decomp[samples_log2].decompress_pipeline;
325 break;
326 case DEPTH_RESUMMARIZE:
327 pipeline_h = meta_state->depth_decomp[samples_log2].resummarize_pipeline;
328 break;
329 default:
330 unreachable("unknown operation");
331 }
332
333 for (uint32_t layer = 0; layer < radv_get_layerCount(image, subresourceRange); layer++) {
334 struct radv_image_view iview;
335
336 radv_image_view_init(&iview, cmd_buffer->device,
337 &(VkImageViewCreateInfo) {
338 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
339 .image = radv_image_to_handle(image),
340 .viewType = radv_meta_get_view_type(image),
341 .format = image->vk_format,
342 .subresourceRange = {
343 .aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
344 .baseMipLevel = subresourceRange->baseMipLevel,
345 .levelCount = 1,
346 .baseArrayLayer = subresourceRange->baseArrayLayer + layer,
347 .layerCount = 1,
348 },
349 });
350
351
352 VkFramebuffer fb_h;
353 radv_CreateFramebuffer(device_h,
354 &(VkFramebufferCreateInfo) {
355 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
356 .attachmentCount = 1,
357 .pAttachments = (VkImageView[]) {
358 radv_image_view_to_handle(&iview)
359 },
360 .width = width,
361 .height = height,
362 .layers = 1
363 },
364 &cmd_buffer->pool->alloc,
365 &fb_h);
366
367 radv_CmdBeginRenderPass(cmd_buffer_h,
368 &(VkRenderPassBeginInfo) {
369 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
370 .renderPass = meta_state->depth_decomp[samples_log2].pass,
371 .framebuffer = fb_h,
372 .renderArea = {
373 .offset = {
374 0,
375 0,
376 },
377 .extent = {
378 width,
379 height,
380 }
381 },
382 .clearValueCount = 0,
383 .pClearValues = NULL,
384 },
385 VK_SUBPASS_CONTENTS_INLINE);
386
387 emit_depth_decomp(cmd_buffer, &(VkExtent2D){width, height}, pipeline_h);
388 radv_CmdEndRenderPass(cmd_buffer_h);
389
390 radv_DestroyFramebuffer(device_h, fb_h,
391 &cmd_buffer->pool->alloc);
392 }
393 radv_meta_restore(&saved_state, cmd_buffer);
394 }
395
396 void radv_decompress_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
397 struct radv_image *image,
398 VkImageSubresourceRange *subresourceRange)
399 {
400 assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);
401 radv_process_depth_image_inplace(cmd_buffer, image, subresourceRange, DEPTH_DECOMPRESS);
402 }
403
404 void radv_resummarize_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
405 struct radv_image *image,
406 VkImageSubresourceRange *subresourceRange)
407 {
408 assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);
409 radv_process_depth_image_inplace(cmd_buffer, image, subresourceRange, DEPTH_RESUMMARIZE);
410 }