radv/gfx10: re-enable fast depth/stencil clears with separate aspects
[mesa.git] / src / amd / vulkan / radv_meta_clear.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "radv_debug.h"
25 #include "radv_meta.h"
26 #include "radv_private.h"
27 #include "nir/nir_builder.h"
28
29 #include "util/format_rgb9e5.h"
30 #include "vk_format.h"
31
32 enum {
33 DEPTH_CLEAR_SLOW,
34 DEPTH_CLEAR_FAST_EXPCLEAR,
35 DEPTH_CLEAR_FAST_NO_EXPCLEAR
36 };
37
38 static void
39 build_color_shaders(struct nir_shader **out_vs,
40 struct nir_shader **out_fs,
41 uint32_t frag_output)
42 {
43 nir_builder vs_b;
44 nir_builder fs_b;
45
46 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
47 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
48
49 vs_b.shader->info.name = ralloc_strdup(vs_b.shader, "meta_clear_color_vs");
50 fs_b.shader->info.name = ralloc_strdup(fs_b.shader, "meta_clear_color_fs");
51
52 const struct glsl_type *position_type = glsl_vec4_type();
53 const struct glsl_type *color_type = glsl_vec4_type();
54
55 nir_variable *vs_out_pos =
56 nir_variable_create(vs_b.shader, nir_var_shader_out, position_type,
57 "gl_Position");
58 vs_out_pos->data.location = VARYING_SLOT_POS;
59
60 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
61 nir_intrinsic_set_base(in_color_load, 0);
62 nir_intrinsic_set_range(in_color_load, 16);
63 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
64 in_color_load->num_components = 4;
65 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 4, 32, "clear color");
66 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
67
68 nir_variable *fs_out_color =
69 nir_variable_create(fs_b.shader, nir_var_shader_out, color_type,
70 "f_color");
71 fs_out_color->data.location = FRAG_RESULT_DATA0 + frag_output;
72
73 nir_store_var(&fs_b, fs_out_color, &in_color_load->dest.ssa, 0xf);
74
75 nir_ssa_def *outvec = radv_meta_gen_rect_vertices(&vs_b);
76 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
77
78 const struct glsl_type *layer_type = glsl_int_type();
79 nir_variable *vs_out_layer =
80 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
81 "v_layer");
82 vs_out_layer->data.location = VARYING_SLOT_LAYER;
83 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
84 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
85 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
86
87 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
88 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
89
90 *out_vs = vs_b.shader;
91 *out_fs = fs_b.shader;
92 }
93
94 static VkResult
95 create_pipeline(struct radv_device *device,
96 struct radv_render_pass *render_pass,
97 uint32_t samples,
98 struct nir_shader *vs_nir,
99 struct nir_shader *fs_nir,
100 const VkPipelineVertexInputStateCreateInfo *vi_state,
101 const VkPipelineDepthStencilStateCreateInfo *ds_state,
102 const VkPipelineColorBlendStateCreateInfo *cb_state,
103 const VkPipelineLayout layout,
104 const struct radv_graphics_pipeline_create_info *extra,
105 const VkAllocationCallbacks *alloc,
106 VkPipeline *pipeline)
107 {
108 VkDevice device_h = radv_device_to_handle(device);
109 VkResult result;
110
111 struct radv_shader_module vs_m = { .nir = vs_nir };
112 struct radv_shader_module fs_m = { .nir = fs_nir };
113
114 result = radv_graphics_pipeline_create(device_h,
115 radv_pipeline_cache_to_handle(&device->meta_state.cache),
116 &(VkGraphicsPipelineCreateInfo) {
117 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
118 .stageCount = fs_nir ? 2 : 1,
119 .pStages = (VkPipelineShaderStageCreateInfo[]) {
120 {
121 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
122 .stage = VK_SHADER_STAGE_VERTEX_BIT,
123 .module = radv_shader_module_to_handle(&vs_m),
124 .pName = "main",
125 },
126 {
127 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
128 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
129 .module = radv_shader_module_to_handle(&fs_m),
130 .pName = "main",
131 },
132 },
133 .pVertexInputState = vi_state,
134 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
135 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
136 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
137 .primitiveRestartEnable = false,
138 },
139 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
140 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
141 .viewportCount = 1,
142 .scissorCount = 1,
143 },
144 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
145 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
146 .rasterizerDiscardEnable = false,
147 .polygonMode = VK_POLYGON_MODE_FILL,
148 .cullMode = VK_CULL_MODE_NONE,
149 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
150 .depthBiasEnable = false,
151 },
152 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
153 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
154 .rasterizationSamples = samples,
155 .sampleShadingEnable = false,
156 .pSampleMask = NULL,
157 .alphaToCoverageEnable = false,
158 .alphaToOneEnable = false,
159 },
160 .pDepthStencilState = ds_state,
161 .pColorBlendState = cb_state,
162 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
163 /* The meta clear pipeline declares all state as dynamic.
164 * As a consequence, vkCmdBindPipeline writes no dynamic state
165 * to the cmd buffer. Therefore, at the end of the meta clear,
166 * we need only restore dynamic state was vkCmdSet.
167 */
168 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
169 .dynamicStateCount = 8,
170 .pDynamicStates = (VkDynamicState[]) {
171 /* Everything except stencil write mask */
172 VK_DYNAMIC_STATE_VIEWPORT,
173 VK_DYNAMIC_STATE_SCISSOR,
174 VK_DYNAMIC_STATE_LINE_WIDTH,
175 VK_DYNAMIC_STATE_DEPTH_BIAS,
176 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
177 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
178 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
179 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
180 },
181 },
182 .layout = layout,
183 .flags = 0,
184 .renderPass = radv_render_pass_to_handle(render_pass),
185 .subpass = 0,
186 },
187 extra,
188 alloc,
189 pipeline);
190
191 ralloc_free(vs_nir);
192 ralloc_free(fs_nir);
193
194 return result;
195 }
196
197 static VkResult
198 create_color_renderpass(struct radv_device *device,
199 VkFormat vk_format,
200 uint32_t samples,
201 VkRenderPass *pass)
202 {
203 mtx_lock(&device->meta_state.mtx);
204 if (*pass) {
205 mtx_unlock (&device->meta_state.mtx);
206 return VK_SUCCESS;
207 }
208
209 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
210 &(VkRenderPassCreateInfo) {
211 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
212 .attachmentCount = 1,
213 .pAttachments = &(VkAttachmentDescription) {
214 .format = vk_format,
215 .samples = samples,
216 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
217 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
218 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
219 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
220 },
221 .subpassCount = 1,
222 .pSubpasses = &(VkSubpassDescription) {
223 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
224 .inputAttachmentCount = 0,
225 .colorAttachmentCount = 1,
226 .pColorAttachments = &(VkAttachmentReference) {
227 .attachment = 0,
228 .layout = VK_IMAGE_LAYOUT_GENERAL,
229 },
230 .pResolveAttachments = NULL,
231 .pDepthStencilAttachment = &(VkAttachmentReference) {
232 .attachment = VK_ATTACHMENT_UNUSED,
233 .layout = VK_IMAGE_LAYOUT_GENERAL,
234 },
235 .preserveAttachmentCount = 0,
236 .pPreserveAttachments = NULL,
237 },
238 .dependencyCount = 0,
239 }, &device->meta_state.alloc, pass);
240 mtx_unlock(&device->meta_state.mtx);
241 return result;
242 }
243
244 static VkResult
245 create_color_pipeline(struct radv_device *device,
246 uint32_t samples,
247 uint32_t frag_output,
248 VkPipeline *pipeline,
249 VkRenderPass pass)
250 {
251 struct nir_shader *vs_nir;
252 struct nir_shader *fs_nir;
253 VkResult result;
254
255 mtx_lock(&device->meta_state.mtx);
256 if (*pipeline) {
257 mtx_unlock(&device->meta_state.mtx);
258 return VK_SUCCESS;
259 }
260
261 build_color_shaders(&vs_nir, &fs_nir, frag_output);
262
263 const VkPipelineVertexInputStateCreateInfo vi_state = {
264 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
265 .vertexBindingDescriptionCount = 0,
266 .vertexAttributeDescriptionCount = 0,
267 };
268
269 const VkPipelineDepthStencilStateCreateInfo ds_state = {
270 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
271 .depthTestEnable = false,
272 .depthWriteEnable = false,
273 .depthBoundsTestEnable = false,
274 .stencilTestEnable = false,
275 };
276
277 VkPipelineColorBlendAttachmentState blend_attachment_state[MAX_RTS] = { 0 };
278 blend_attachment_state[frag_output] = (VkPipelineColorBlendAttachmentState) {
279 .blendEnable = false,
280 .colorWriteMask = VK_COLOR_COMPONENT_A_BIT |
281 VK_COLOR_COMPONENT_R_BIT |
282 VK_COLOR_COMPONENT_G_BIT |
283 VK_COLOR_COMPONENT_B_BIT,
284 };
285
286 const VkPipelineColorBlendStateCreateInfo cb_state = {
287 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
288 .logicOpEnable = false,
289 .attachmentCount = MAX_RTS,
290 .pAttachments = blend_attachment_state
291 };
292
293
294 struct radv_graphics_pipeline_create_info extra = {
295 .use_rectlist = true,
296 };
297 result = create_pipeline(device, radv_render_pass_from_handle(pass),
298 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
299 device->meta_state.clear_color_p_layout,
300 &extra, &device->meta_state.alloc, pipeline);
301
302 mtx_unlock(&device->meta_state.mtx);
303 return result;
304 }
305
306 static void
307 finish_meta_clear_htile_mask_state(struct radv_device *device)
308 {
309 struct radv_meta_state *state = &device->meta_state;
310
311 radv_DestroyPipeline(radv_device_to_handle(device),
312 state->clear_htile_mask_pipeline,
313 &state->alloc);
314 radv_DestroyPipelineLayout(radv_device_to_handle(device),
315 state->clear_htile_mask_p_layout,
316 &state->alloc);
317 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
318 state->clear_htile_mask_ds_layout,
319 &state->alloc);
320 }
321
322 void
323 radv_device_finish_meta_clear_state(struct radv_device *device)
324 {
325 struct radv_meta_state *state = &device->meta_state;
326
327 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
328 for (uint32_t j = 0; j < ARRAY_SIZE(state->clear[i].color_pipelines); ++j) {
329 radv_DestroyPipeline(radv_device_to_handle(device),
330 state->clear[i].color_pipelines[j],
331 &state->alloc);
332 radv_DestroyRenderPass(radv_device_to_handle(device),
333 state->clear[i].render_pass[j],
334 &state->alloc);
335 }
336
337 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
338 radv_DestroyPipeline(radv_device_to_handle(device),
339 state->clear[i].depth_only_pipeline[j],
340 &state->alloc);
341 radv_DestroyPipeline(radv_device_to_handle(device),
342 state->clear[i].stencil_only_pipeline[j],
343 &state->alloc);
344 radv_DestroyPipeline(radv_device_to_handle(device),
345 state->clear[i].depthstencil_pipeline[j],
346 &state->alloc);
347 }
348 radv_DestroyRenderPass(radv_device_to_handle(device),
349 state->clear[i].depthstencil_rp,
350 &state->alloc);
351 }
352 radv_DestroyPipelineLayout(radv_device_to_handle(device),
353 state->clear_color_p_layout,
354 &state->alloc);
355 radv_DestroyPipelineLayout(radv_device_to_handle(device),
356 state->clear_depth_p_layout,
357 &state->alloc);
358
359 finish_meta_clear_htile_mask_state(device);
360 }
361
362 static void
363 emit_color_clear(struct radv_cmd_buffer *cmd_buffer,
364 const VkClearAttachment *clear_att,
365 const VkClearRect *clear_rect,
366 uint32_t view_mask)
367 {
368 struct radv_device *device = cmd_buffer->device;
369 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
370 const uint32_t subpass_att = clear_att->colorAttachment;
371 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
372 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
373 cmd_buffer->state.attachments[pass_att].iview : NULL;
374 uint32_t samples, samples_log2;
375 VkFormat format;
376 unsigned fs_key;
377 VkClearColorValue clear_value = clear_att->clearValue.color;
378 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
379 VkPipeline pipeline;
380
381 /* When a framebuffer is bound to the current command buffer, get the
382 * number of samples from it. Otherwise, get the number of samples from
383 * the render pass because it's likely a secondary command buffer.
384 */
385 if (iview) {
386 samples = iview->image->info.samples;
387 format = iview->vk_format;
388 } else {
389 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
390 format = cmd_buffer->state.pass->attachments[pass_att].format;
391 }
392
393 samples_log2 = ffs(samples) - 1;
394 fs_key = radv_format_meta_fs_key(format);
395
396 if (fs_key == -1) {
397 radv_finishme("color clears incomplete");
398 return;
399 }
400
401 if (device->meta_state.clear[samples_log2].render_pass[fs_key] == VK_NULL_HANDLE) {
402 VkResult ret = create_color_renderpass(device, radv_fs_key_format_exemplars[fs_key],
403 samples,
404 &device->meta_state.clear[samples_log2].render_pass[fs_key]);
405 if (ret != VK_SUCCESS) {
406 cmd_buffer->record_result = ret;
407 return;
408 }
409 }
410
411 if (device->meta_state.clear[samples_log2].color_pipelines[fs_key] == VK_NULL_HANDLE) {
412 VkResult ret = create_color_pipeline(device, samples, 0,
413 &device->meta_state.clear[samples_log2].color_pipelines[fs_key],
414 device->meta_state.clear[samples_log2].render_pass[fs_key]);
415 if (ret != VK_SUCCESS) {
416 cmd_buffer->record_result = ret;
417 return;
418 }
419 }
420
421 pipeline = device->meta_state.clear[samples_log2].color_pipelines[fs_key];
422 if (!pipeline) {
423 radv_finishme("color clears incomplete");
424 return;
425 }
426 assert(samples_log2 < ARRAY_SIZE(device->meta_state.clear));
427 assert(pipeline);
428 assert(clear_att->aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
429 assert(clear_att->colorAttachment < subpass->color_count);
430
431 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
432 device->meta_state.clear_color_p_layout,
433 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16,
434 &clear_value);
435
436 struct radv_subpass clear_subpass = {
437 .color_count = 1,
438 .color_attachments = (struct radv_subpass_attachment[]) {
439 subpass->color_attachments[clear_att->colorAttachment]
440 },
441 .depth_stencil_attachment = NULL,
442 };
443
444 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
445
446 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
447 pipeline);
448
449 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
450 .x = clear_rect->rect.offset.x,
451 .y = clear_rect->rect.offset.y,
452 .width = clear_rect->rect.extent.width,
453 .height = clear_rect->rect.extent.height,
454 .minDepth = 0.0f,
455 .maxDepth = 1.0f
456 });
457
458 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
459
460 if (view_mask) {
461 unsigned i;
462 for_each_bit(i, view_mask)
463 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
464 } else {
465 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
466 }
467
468 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
469 }
470
471
472 static void
473 build_depthstencil_shader(struct nir_shader **out_vs, struct nir_shader **out_fs)
474 {
475 nir_builder vs_b, fs_b;
476
477 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
478 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
479
480 vs_b.shader->info.name = ralloc_strdup(vs_b.shader, "meta_clear_depthstencil_vs");
481 fs_b.shader->info.name = ralloc_strdup(fs_b.shader, "meta_clear_depthstencil_fs");
482 const struct glsl_type *position_out_type = glsl_vec4_type();
483
484 nir_variable *vs_out_pos =
485 nir_variable_create(vs_b.shader, nir_var_shader_out, position_out_type,
486 "gl_Position");
487 vs_out_pos->data.location = VARYING_SLOT_POS;
488
489 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
490 nir_intrinsic_set_base(in_color_load, 0);
491 nir_intrinsic_set_range(in_color_load, 4);
492 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
493 in_color_load->num_components = 1;
494 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 1, 32, "depth value");
495 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
496
497 nir_variable *fs_out_depth =
498 nir_variable_create(fs_b.shader, nir_var_shader_out,
499 glsl_int_type(), "f_depth");
500 fs_out_depth->data.location = FRAG_RESULT_DEPTH;
501 nir_store_var(&fs_b, fs_out_depth, &in_color_load->dest.ssa, 0x1);
502
503 nir_ssa_def *outvec = radv_meta_gen_rect_vertices(&vs_b);
504 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
505
506 const struct glsl_type *layer_type = glsl_int_type();
507 nir_variable *vs_out_layer =
508 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
509 "v_layer");
510 vs_out_layer->data.location = VARYING_SLOT_LAYER;
511 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
512 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
513 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
514
515 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
516 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
517
518 *out_vs = vs_b.shader;
519 *out_fs = fs_b.shader;
520 }
521
522 static VkResult
523 create_depthstencil_renderpass(struct radv_device *device,
524 uint32_t samples,
525 VkRenderPass *render_pass)
526 {
527 mtx_lock(&device->meta_state.mtx);
528 if (*render_pass) {
529 mtx_unlock(&device->meta_state.mtx);
530 return VK_SUCCESS;
531 }
532
533 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
534 &(VkRenderPassCreateInfo) {
535 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
536 .attachmentCount = 1,
537 .pAttachments = &(VkAttachmentDescription) {
538 .format = VK_FORMAT_D32_SFLOAT_S8_UINT,
539 .samples = samples,
540 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
541 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
542 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
543 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
544 },
545 .subpassCount = 1,
546 .pSubpasses = &(VkSubpassDescription) {
547 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
548 .inputAttachmentCount = 0,
549 .colorAttachmentCount = 0,
550 .pColorAttachments = NULL,
551 .pResolveAttachments = NULL,
552 .pDepthStencilAttachment = &(VkAttachmentReference) {
553 .attachment = 0,
554 .layout = VK_IMAGE_LAYOUT_GENERAL,
555 },
556 .preserveAttachmentCount = 0,
557 .pPreserveAttachments = NULL,
558 },
559 .dependencyCount = 0,
560 }, &device->meta_state.alloc, render_pass);
561 mtx_unlock(&device->meta_state.mtx);
562 return result;
563 }
564
565 static VkResult
566 create_depthstencil_pipeline(struct radv_device *device,
567 VkImageAspectFlags aspects,
568 uint32_t samples,
569 int index,
570 VkPipeline *pipeline,
571 VkRenderPass render_pass)
572 {
573 struct nir_shader *vs_nir, *fs_nir;
574 VkResult result;
575
576 mtx_lock(&device->meta_state.mtx);
577 if (*pipeline) {
578 mtx_unlock(&device->meta_state.mtx);
579 return VK_SUCCESS;
580 }
581
582 build_depthstencil_shader(&vs_nir, &fs_nir);
583
584 const VkPipelineVertexInputStateCreateInfo vi_state = {
585 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
586 .vertexBindingDescriptionCount = 0,
587 .vertexAttributeDescriptionCount = 0,
588 };
589
590 const VkPipelineDepthStencilStateCreateInfo ds_state = {
591 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
592 .depthTestEnable = (aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
593 .depthCompareOp = VK_COMPARE_OP_ALWAYS,
594 .depthWriteEnable = (aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
595 .depthBoundsTestEnable = false,
596 .stencilTestEnable = (aspects & VK_IMAGE_ASPECT_STENCIL_BIT),
597 .front = {
598 .passOp = VK_STENCIL_OP_REPLACE,
599 .compareOp = VK_COMPARE_OP_ALWAYS,
600 .writeMask = UINT32_MAX,
601 .reference = 0, /* dynamic */
602 },
603 .back = { 0 /* dont care */ },
604 };
605
606 const VkPipelineColorBlendStateCreateInfo cb_state = {
607 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
608 .logicOpEnable = false,
609 .attachmentCount = 0,
610 .pAttachments = NULL,
611 };
612
613 struct radv_graphics_pipeline_create_info extra = {
614 .use_rectlist = true,
615 };
616
617 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT) {
618 extra.db_depth_clear = index == DEPTH_CLEAR_SLOW ? false : true;
619 extra.db_depth_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
620 }
621 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
622 extra.db_stencil_clear = index == DEPTH_CLEAR_SLOW ? false : true;
623 extra.db_stencil_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
624 }
625 result = create_pipeline(device, radv_render_pass_from_handle(render_pass),
626 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
627 device->meta_state.clear_depth_p_layout,
628 &extra, &device->meta_state.alloc, pipeline);
629
630 mtx_unlock(&device->meta_state.mtx);
631 return result;
632 }
633
634 static bool depth_view_can_fast_clear(struct radv_cmd_buffer *cmd_buffer,
635 const struct radv_image_view *iview,
636 VkImageAspectFlags aspects,
637 VkImageLayout layout,
638 bool in_render_loop,
639 const VkClearRect *clear_rect,
640 VkClearDepthStencilValue clear_value)
641 {
642 if (!iview)
643 return false;
644
645 uint32_t queue_mask = radv_image_queue_family_mask(iview->image,
646 cmd_buffer->queue_family_index,
647 cmd_buffer->queue_family_index);
648 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
649 clear_rect->rect.extent.width != iview->extent.width ||
650 clear_rect->rect.extent.height != iview->extent.height)
651 return false;
652 if (radv_image_is_tc_compat_htile(iview->image) &&
653 (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) && clear_value.depth != 0.0 &&
654 clear_value.depth != 1.0) ||
655 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) && clear_value.stencil != 0)))
656 return false;
657 if (radv_image_has_htile(iview->image) &&
658 iview->base_mip == 0 &&
659 iview->base_layer == 0 &&
660 iview->layer_count == iview->image->info.array_size &&
661 radv_layout_is_htile_compressed(iview->image, layout, in_render_loop, queue_mask) &&
662 radv_image_extent_compare(iview->image, &iview->extent))
663 return true;
664 return false;
665 }
666
667 static VkPipeline
668 pick_depthstencil_pipeline(struct radv_cmd_buffer *cmd_buffer,
669 struct radv_meta_state *meta_state,
670 const struct radv_image_view *iview,
671 int samples_log2,
672 VkImageAspectFlags aspects,
673 VkImageLayout layout,
674 bool in_render_loop,
675 const VkClearRect *clear_rect,
676 VkClearDepthStencilValue clear_value)
677 {
678 bool fast = depth_view_can_fast_clear(cmd_buffer, iview, aspects, layout,
679 in_render_loop, clear_rect, clear_value);
680 int index = DEPTH_CLEAR_SLOW;
681 VkPipeline *pipeline;
682
683 if (fast) {
684 /* we don't know the previous clear values, so we always have
685 * the NO_EXPCLEAR path */
686 index = DEPTH_CLEAR_FAST_NO_EXPCLEAR;
687 }
688
689 switch (aspects) {
690 case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
691 pipeline = &meta_state->clear[samples_log2].depthstencil_pipeline[index];
692 break;
693 case VK_IMAGE_ASPECT_DEPTH_BIT:
694 pipeline = &meta_state->clear[samples_log2].depth_only_pipeline[index];
695 break;
696 case VK_IMAGE_ASPECT_STENCIL_BIT:
697 pipeline = &meta_state->clear[samples_log2].stencil_only_pipeline[index];
698 break;
699 default:
700 unreachable("expected depth or stencil aspect");
701 }
702
703 if (cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp == VK_NULL_HANDLE) {
704 VkResult ret = create_depthstencil_renderpass(cmd_buffer->device, 1u << samples_log2,
705 &cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
706 if (ret != VK_SUCCESS) {
707 cmd_buffer->record_result = ret;
708 return VK_NULL_HANDLE;
709 }
710 }
711
712 if (*pipeline == VK_NULL_HANDLE) {
713 VkResult ret = create_depthstencil_pipeline(cmd_buffer->device, aspects, 1u << samples_log2, index,
714 pipeline, cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
715 if (ret != VK_SUCCESS) {
716 cmd_buffer->record_result = ret;
717 return VK_NULL_HANDLE;
718 }
719 }
720 return *pipeline;
721 }
722
723 static void
724 emit_depthstencil_clear(struct radv_cmd_buffer *cmd_buffer,
725 const VkClearAttachment *clear_att,
726 const VkClearRect *clear_rect,
727 struct radv_subpass_attachment *ds_att,
728 uint32_t view_mask)
729 {
730 struct radv_device *device = cmd_buffer->device;
731 struct radv_meta_state *meta_state = &device->meta_state;
732 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
733 const uint32_t pass_att = ds_att->attachment;
734 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
735 VkImageAspectFlags aspects = clear_att->aspectMask;
736 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
737 cmd_buffer->state.attachments[pass_att].iview : NULL;
738 uint32_t samples, samples_log2;
739 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
740
741 /* When a framebuffer is bound to the current command buffer, get the
742 * number of samples from it. Otherwise, get the number of samples from
743 * the render pass because it's likely a secondary command buffer.
744 */
745 if (iview) {
746 samples = iview->image->info.samples;
747 } else {
748 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
749 }
750
751 samples_log2 = ffs(samples) - 1;
752
753 assert(pass_att != VK_ATTACHMENT_UNUSED);
754
755 if (!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT))
756 clear_value.depth = 1.0f;
757
758 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
759 device->meta_state.clear_depth_p_layout,
760 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4,
761 &clear_value.depth);
762
763 uint32_t prev_reference = cmd_buffer->state.dynamic.stencil_reference.front;
764 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
765 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
766 clear_value.stencil);
767 }
768
769 VkPipeline pipeline = pick_depthstencil_pipeline(cmd_buffer,
770 meta_state,
771 iview,
772 samples_log2,
773 aspects,
774 ds_att->layout,
775 ds_att->in_render_loop,
776 clear_rect,
777 clear_value);
778 if (!pipeline)
779 return;
780
781 struct radv_subpass clear_subpass = {
782 .color_count = 0,
783 .color_attachments = NULL,
784 .depth_stencil_attachment = ds_att,
785 };
786
787 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
788
789 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
790 pipeline);
791
792 if (depth_view_can_fast_clear(cmd_buffer, iview, aspects,
793 ds_att->layout, ds_att->in_render_loop,
794 clear_rect, clear_value))
795 radv_update_ds_clear_metadata(cmd_buffer, iview,
796 clear_value, aspects);
797
798 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
799 .x = clear_rect->rect.offset.x,
800 .y = clear_rect->rect.offset.y,
801 .width = clear_rect->rect.extent.width,
802 .height = clear_rect->rect.extent.height,
803 .minDepth = 0.0f,
804 .maxDepth = 1.0f
805 });
806
807 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
808
809 if (view_mask) {
810 unsigned i;
811 for_each_bit(i, view_mask)
812 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
813 } else {
814 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
815 }
816
817 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
818 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
819 prev_reference);
820 }
821
822 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
823 }
824
825 static uint32_t
826 clear_htile_mask(struct radv_cmd_buffer *cmd_buffer,
827 struct radeon_winsys_bo *bo, uint64_t offset, uint64_t size,
828 uint32_t htile_value, uint32_t htile_mask)
829 {
830 struct radv_device *device = cmd_buffer->device;
831 struct radv_meta_state *state = &device->meta_state;
832 uint64_t block_count = round_up_u64(size, 1024);
833 struct radv_meta_saved_state saved_state;
834
835 radv_meta_save(&saved_state, cmd_buffer,
836 RADV_META_SAVE_COMPUTE_PIPELINE |
837 RADV_META_SAVE_CONSTANTS |
838 RADV_META_SAVE_DESCRIPTORS);
839
840 struct radv_buffer dst_buffer = {
841 .bo = bo,
842 .offset = offset,
843 .size = size
844 };
845
846 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
847 VK_PIPELINE_BIND_POINT_COMPUTE,
848 state->clear_htile_mask_pipeline);
849
850 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
851 state->clear_htile_mask_p_layout,
852 0, /* set */
853 1, /* descriptorWriteCount */
854 (VkWriteDescriptorSet[]) {
855 {
856 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
857 .dstBinding = 0,
858 .dstArrayElement = 0,
859 .descriptorCount = 1,
860 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
861 .pBufferInfo = &(VkDescriptorBufferInfo) {
862 .buffer = radv_buffer_to_handle(&dst_buffer),
863 .offset = 0,
864 .range = size
865 }
866 }
867 });
868
869 const unsigned constants[2] = {
870 htile_value & htile_mask,
871 ~htile_mask,
872 };
873
874 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
875 state->clear_htile_mask_p_layout,
876 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
877 constants);
878
879 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
880
881 radv_meta_restore(&saved_state, cmd_buffer);
882
883 return RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
884 RADV_CMD_FLAG_INV_VCACHE |
885 RADV_CMD_FLAG_WB_L2;
886 }
887
888 static uint32_t
889 radv_get_htile_fast_clear_value(const struct radv_image *image,
890 VkClearDepthStencilValue value)
891 {
892 uint32_t clear_value;
893
894 if (!image->planes[0].surface.has_stencil) {
895 clear_value = value.depth ? 0xfffffff0 : 0;
896 } else {
897 clear_value = value.depth ? 0xfffc0000 : 0;
898 }
899
900 return clear_value;
901 }
902
903 static uint32_t
904 radv_get_htile_mask(const struct radv_image *image, VkImageAspectFlags aspects)
905 {
906 uint32_t mask = 0;
907
908 if (!image->planes[0].surface.has_stencil) {
909 /* All the HTILE buffer is used when there is no stencil. */
910 mask = UINT32_MAX;
911 } else {
912 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT)
913 mask |= 0xfffffc0f;
914 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT)
915 mask |= 0x000003f0;
916 }
917
918 return mask;
919 }
920
921 static bool
922 radv_is_fast_clear_depth_allowed(VkClearDepthStencilValue value)
923 {
924 return value.depth == 1.0f || value.depth == 0.0f;
925 }
926
927 static bool
928 radv_is_fast_clear_stencil_allowed(VkClearDepthStencilValue value)
929 {
930 return value.stencil == 0;
931 }
932
933 /**
934 * Determine if the given image can be fast cleared.
935 */
936 static bool
937 radv_image_can_fast_clear(struct radv_device *device, struct radv_image *image)
938 {
939 if (device->instance->debug_flags & RADV_DEBUG_NO_FAST_CLEARS)
940 return false;
941
942 if (vk_format_is_color(image->vk_format)) {
943 if (!radv_image_has_cmask(image) && !radv_image_has_dcc(image))
944 return false;
945
946 /* RB+ doesn't work with CMASK fast clear on Stoney. */
947 if (!radv_image_has_dcc(image) &&
948 device->physical_device->rad_info.family == CHIP_STONEY)
949 return false;
950 } else {
951 if (!radv_image_has_htile(image))
952 return false;
953 }
954
955 /* Do not fast clears 3D images. */
956 if (image->type == VK_IMAGE_TYPE_3D)
957 return false;
958
959 return true;
960 }
961
962 /**
963 * Determine if the given image view can be fast cleared.
964 */
965 static bool
966 radv_image_view_can_fast_clear(struct radv_device *device,
967 const struct radv_image_view *iview)
968 {
969 struct radv_image *image;
970
971 if (!iview)
972 return false;
973 image = iview->image;
974
975 /* Only fast clear if the image itself can be fast cleared. */
976 if (!radv_image_can_fast_clear(device, image))
977 return false;
978
979 /* Only fast clear if all layers are bound. */
980 if (iview->base_layer > 0 ||
981 iview->layer_count != image->info.array_size)
982 return false;
983
984 /* Only fast clear if the view covers the whole image. */
985 if (!radv_image_extent_compare(image, &iview->extent))
986 return false;
987
988 return true;
989 }
990
991 static bool
992 radv_can_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
993 const struct radv_image_view *iview,
994 VkImageLayout image_layout,
995 bool in_render_loop,
996 VkImageAspectFlags aspects,
997 const VkClearRect *clear_rect,
998 const VkClearDepthStencilValue clear_value,
999 uint32_t view_mask)
1000 {
1001 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1002 return false;
1003
1004 if (!radv_layout_is_htile_compressed(iview->image, image_layout, in_render_loop,
1005 radv_image_queue_family_mask(iview->image,
1006 cmd_buffer->queue_family_index,
1007 cmd_buffer->queue_family_index)))
1008 return false;
1009
1010 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1011 clear_rect->rect.extent.width != iview->image->info.width ||
1012 clear_rect->rect.extent.height != iview->image->info.height)
1013 return false;
1014
1015 if (view_mask && (iview->image->info.array_size >= 32 ||
1016 (1u << iview->image->info.array_size) - 1u != view_mask))
1017 return false;
1018 if (!view_mask && clear_rect->baseArrayLayer != 0)
1019 return false;
1020 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1021 return false;
1022
1023 if (cmd_buffer->device->physical_device->rad_info.chip_class < GFX9 &&
1024 (!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT) ||
1025 ((vk_format_aspects(iview->image->vk_format) & VK_IMAGE_ASPECT_STENCIL_BIT) &&
1026 !(aspects & VK_IMAGE_ASPECT_STENCIL_BIT))))
1027 return false;
1028
1029 if (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) &&
1030 !radv_is_fast_clear_depth_allowed(clear_value)) ||
1031 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) &&
1032 !radv_is_fast_clear_stencil_allowed(clear_value)))
1033 return false;
1034
1035 return true;
1036 }
1037
1038 static void
1039 radv_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
1040 const struct radv_image_view *iview,
1041 const VkClearAttachment *clear_att,
1042 enum radv_cmd_flush_bits *pre_flush,
1043 enum radv_cmd_flush_bits *post_flush)
1044 {
1045 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1046 VkImageAspectFlags aspects = clear_att->aspectMask;
1047 uint32_t clear_word, flush_bits;
1048 uint32_t htile_mask;
1049
1050 clear_word = radv_get_htile_fast_clear_value(iview->image, clear_value);
1051 htile_mask = radv_get_htile_mask(iview->image, aspects);
1052
1053 if (pre_flush) {
1054 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_DB |
1055 RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) & ~ *pre_flush;
1056 *pre_flush |= cmd_buffer->state.flush_bits;
1057 }
1058
1059 if (htile_mask == UINT_MAX) {
1060 /* Clear the whole HTILE buffer. */
1061 flush_bits = radv_fill_buffer(cmd_buffer, iview->image->bo,
1062 iview->image->offset + iview->image->htile_offset,
1063 iview->image->planes[0].surface.htile_size, clear_word);
1064 } else {
1065 /* Only clear depth or stencil bytes in the HTILE buffer. */
1066 assert(cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9);
1067 flush_bits = clear_htile_mask(cmd_buffer, iview->image->bo,
1068 iview->image->offset + iview->image->htile_offset,
1069 iview->image->planes[0].surface.htile_size, clear_word,
1070 htile_mask);
1071 }
1072
1073 radv_update_ds_clear_metadata(cmd_buffer, iview, clear_value, aspects);
1074 if (post_flush) {
1075 *post_flush |= flush_bits;
1076 }
1077 }
1078
1079 static nir_shader *
1080 build_clear_htile_mask_shader()
1081 {
1082 nir_builder b;
1083
1084 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
1085 b.shader->info.name = ralloc_strdup(b.shader, "meta_clear_htile_mask");
1086 b.shader->info.cs.local_size[0] = 64;
1087 b.shader->info.cs.local_size[1] = 1;
1088 b.shader->info.cs.local_size[2] = 1;
1089
1090 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
1091 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
1092 nir_ssa_def *block_size = nir_imm_ivec4(&b,
1093 b.shader->info.cs.local_size[0],
1094 b.shader->info.cs.local_size[1],
1095 b.shader->info.cs.local_size[2], 0);
1096
1097 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
1098
1099 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
1100 offset = nir_channel(&b, offset, 0);
1101
1102 nir_intrinsic_instr *buf =
1103 nir_intrinsic_instr_create(b.shader,
1104 nir_intrinsic_vulkan_resource_index);
1105
1106 buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1107 buf->num_components = 1;
1108 nir_intrinsic_set_desc_set(buf, 0);
1109 nir_intrinsic_set_binding(buf, 0);
1110 nir_ssa_dest_init(&buf->instr, &buf->dest, buf->num_components, 32, NULL);
1111 nir_builder_instr_insert(&b, &buf->instr);
1112
1113 nir_intrinsic_instr *constants =
1114 nir_intrinsic_instr_create(b.shader,
1115 nir_intrinsic_load_push_constant);
1116 nir_intrinsic_set_base(constants, 0);
1117 nir_intrinsic_set_range(constants, 8);
1118 constants->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1119 constants->num_components = 2;
1120 nir_ssa_dest_init(&constants->instr, &constants->dest, 2, 32, "constants");
1121 nir_builder_instr_insert(&b, &constants->instr);
1122
1123 nir_intrinsic_instr *load =
1124 nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_ssbo);
1125 load->src[0] = nir_src_for_ssa(&buf->dest.ssa);
1126 load->src[1] = nir_src_for_ssa(offset);
1127 nir_ssa_dest_init(&load->instr, &load->dest, 4, 32, NULL);
1128 load->num_components = 4;
1129 nir_builder_instr_insert(&b, &load->instr);
1130
1131 /* data = (data & ~htile_mask) | (htile_value & htile_mask) */
1132 nir_ssa_def *data =
1133 nir_iand(&b, &load->dest.ssa,
1134 nir_channel(&b, &constants->dest.ssa, 1));
1135 data = nir_ior(&b, data, nir_channel(&b, &constants->dest.ssa, 0));
1136
1137 nir_intrinsic_instr *store =
1138 nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
1139 store->src[0] = nir_src_for_ssa(data);
1140 store->src[1] = nir_src_for_ssa(&buf->dest.ssa);
1141 store->src[2] = nir_src_for_ssa(offset);
1142 nir_intrinsic_set_write_mask(store, 0xf);
1143 nir_intrinsic_set_access(store, ACCESS_NON_READABLE);
1144 store->num_components = 4;
1145 nir_builder_instr_insert(&b, &store->instr);
1146
1147 return b.shader;
1148 }
1149
1150 static VkResult
1151 init_meta_clear_htile_mask_state(struct radv_device *device)
1152 {
1153 struct radv_meta_state *state = &device->meta_state;
1154 struct radv_shader_module cs = { .nir = NULL };
1155 VkResult result;
1156
1157 cs.nir = build_clear_htile_mask_shader();
1158
1159 VkDescriptorSetLayoutCreateInfo ds_layout_info = {
1160 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1161 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
1162 .bindingCount = 1,
1163 .pBindings = (VkDescriptorSetLayoutBinding[]) {
1164 {
1165 .binding = 0,
1166 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
1167 .descriptorCount = 1,
1168 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
1169 .pImmutableSamplers = NULL
1170 },
1171 }
1172 };
1173
1174 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
1175 &ds_layout_info, &state->alloc,
1176 &state->clear_htile_mask_ds_layout);
1177 if (result != VK_SUCCESS)
1178 goto fail;
1179
1180 VkPipelineLayoutCreateInfo p_layout_info = {
1181 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1182 .setLayoutCount = 1,
1183 .pSetLayouts = &state->clear_htile_mask_ds_layout,
1184 .pushConstantRangeCount = 1,
1185 .pPushConstantRanges = &(VkPushConstantRange){
1186 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
1187 },
1188 };
1189
1190 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
1191 &p_layout_info, &state->alloc,
1192 &state->clear_htile_mask_p_layout);
1193 if (result != VK_SUCCESS)
1194 goto fail;
1195
1196 VkPipelineShaderStageCreateInfo shader_stage = {
1197 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
1198 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
1199 .module = radv_shader_module_to_handle(&cs),
1200 .pName = "main",
1201 .pSpecializationInfo = NULL,
1202 };
1203
1204 VkComputePipelineCreateInfo pipeline_info = {
1205 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
1206 .stage = shader_stage,
1207 .flags = 0,
1208 .layout = state->clear_htile_mask_p_layout,
1209 };
1210
1211 result = radv_CreateComputePipelines(radv_device_to_handle(device),
1212 radv_pipeline_cache_to_handle(&state->cache),
1213 1, &pipeline_info, NULL,
1214 &state->clear_htile_mask_pipeline);
1215
1216 ralloc_free(cs.nir);
1217 return result;
1218 fail:
1219 ralloc_free(cs.nir);
1220 return result;
1221 }
1222
1223 VkResult
1224 radv_device_init_meta_clear_state(struct radv_device *device, bool on_demand)
1225 {
1226 VkResult res;
1227 struct radv_meta_state *state = &device->meta_state;
1228
1229 VkPipelineLayoutCreateInfo pl_color_create_info = {
1230 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1231 .setLayoutCount = 0,
1232 .pushConstantRangeCount = 1,
1233 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16},
1234 };
1235
1236 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1237 &pl_color_create_info,
1238 &device->meta_state.alloc,
1239 &device->meta_state.clear_color_p_layout);
1240 if (res != VK_SUCCESS)
1241 goto fail;
1242
1243 VkPipelineLayoutCreateInfo pl_depth_create_info = {
1244 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1245 .setLayoutCount = 0,
1246 .pushConstantRangeCount = 1,
1247 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
1248 };
1249
1250 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1251 &pl_depth_create_info,
1252 &device->meta_state.alloc,
1253 &device->meta_state.clear_depth_p_layout);
1254 if (res != VK_SUCCESS)
1255 goto fail;
1256
1257 res = init_meta_clear_htile_mask_state(device);
1258 if (res != VK_SUCCESS)
1259 goto fail;
1260
1261 if (on_demand)
1262 return VK_SUCCESS;
1263
1264 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
1265 uint32_t samples = 1 << i;
1266 for (uint32_t j = 0; j < NUM_META_FS_KEYS; ++j) {
1267 VkFormat format = radv_fs_key_format_exemplars[j];
1268 unsigned fs_key = radv_format_meta_fs_key(format);
1269 assert(!state->clear[i].color_pipelines[fs_key]);
1270
1271 res = create_color_renderpass(device, format, samples,
1272 &state->clear[i].render_pass[fs_key]);
1273 if (res != VK_SUCCESS)
1274 goto fail;
1275
1276 res = create_color_pipeline(device, samples, 0, &state->clear[i].color_pipelines[fs_key],
1277 state->clear[i].render_pass[fs_key]);
1278 if (res != VK_SUCCESS)
1279 goto fail;
1280
1281 }
1282
1283 res = create_depthstencil_renderpass(device,
1284 samples,
1285 &state->clear[i].depthstencil_rp);
1286 if (res != VK_SUCCESS)
1287 goto fail;
1288
1289 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
1290 res = create_depthstencil_pipeline(device,
1291 VK_IMAGE_ASPECT_DEPTH_BIT,
1292 samples,
1293 j,
1294 &state->clear[i].depth_only_pipeline[j],
1295 state->clear[i].depthstencil_rp);
1296 if (res != VK_SUCCESS)
1297 goto fail;
1298
1299 res = create_depthstencil_pipeline(device,
1300 VK_IMAGE_ASPECT_STENCIL_BIT,
1301 samples,
1302 j,
1303 &state->clear[i].stencil_only_pipeline[j],
1304 state->clear[i].depthstencil_rp);
1305 if (res != VK_SUCCESS)
1306 goto fail;
1307
1308 res = create_depthstencil_pipeline(device,
1309 VK_IMAGE_ASPECT_DEPTH_BIT |
1310 VK_IMAGE_ASPECT_STENCIL_BIT,
1311 samples,
1312 j,
1313 &state->clear[i].depthstencil_pipeline[j],
1314 state->clear[i].depthstencil_rp);
1315 if (res != VK_SUCCESS)
1316 goto fail;
1317 }
1318 }
1319 return VK_SUCCESS;
1320
1321 fail:
1322 radv_device_finish_meta_clear_state(device);
1323 return res;
1324 }
1325
1326 static uint32_t
1327 radv_get_cmask_fast_clear_value(const struct radv_image *image)
1328 {
1329 uint32_t value = 0; /* Default value when no DCC. */
1330
1331 /* The fast-clear value is different for images that have both DCC and
1332 * CMASK metadata.
1333 */
1334 if (radv_image_has_dcc(image)) {
1335 /* DCC fast clear with MSAA should clear CMASK to 0xC. */
1336 return image->info.samples > 1 ? 0xcccccccc : 0xffffffff;
1337 }
1338
1339 return value;
1340 }
1341
1342 uint32_t
1343 radv_clear_cmask(struct radv_cmd_buffer *cmd_buffer,
1344 struct radv_image *image,
1345 const VkImageSubresourceRange *range, uint32_t value)
1346 {
1347 uint64_t offset = image->offset + image->cmask_offset;
1348 uint64_t size;
1349
1350 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1351 /* TODO: clear layers. */
1352 size = image->planes[0].surface.cmask_size;
1353 } else {
1354 unsigned cmask_slice_size =
1355 image->planes[0].surface.cmask_slice_size;
1356
1357 offset += cmask_slice_size * range->baseArrayLayer;
1358 size = cmask_slice_size * radv_get_layerCount(image, range);
1359 }
1360
1361 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1362 }
1363
1364
1365 uint32_t
1366 radv_clear_fmask(struct radv_cmd_buffer *cmd_buffer,
1367 struct radv_image *image,
1368 const VkImageSubresourceRange *range, uint32_t value)
1369 {
1370 uint64_t offset = image->offset + image->fmask_offset;
1371 uint64_t size;
1372
1373 /* MSAA images do not support mipmap levels. */
1374 assert(range->baseMipLevel == 0 &&
1375 radv_get_levelCount(image, range) == 1);
1376
1377 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1378 /* TODO: clear layers. */
1379 size = image->planes[0].surface.fmask_size;
1380 } else {
1381 unsigned fmask_slice_size =
1382 image->planes[0].surface.u.legacy.fmask.slice_size;
1383
1384
1385 offset += fmask_slice_size * range->baseArrayLayer;
1386 size = fmask_slice_size * radv_get_layerCount(image, range);
1387 }
1388
1389 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1390 }
1391
1392 uint32_t
1393 radv_clear_dcc(struct radv_cmd_buffer *cmd_buffer,
1394 struct radv_image *image,
1395 const VkImageSubresourceRange *range, uint32_t value)
1396 {
1397 uint32_t level_count = radv_get_levelCount(image, range);
1398 uint32_t flush_bits = 0;
1399
1400 /* Mark the image as being compressed. */
1401 radv_update_dcc_metadata(cmd_buffer, image, range, true);
1402
1403 for (uint32_t l = 0; l < level_count; l++) {
1404 uint64_t offset = image->offset + image->dcc_offset;
1405 uint32_t level = range->baseMipLevel + l;
1406 uint64_t size;
1407
1408 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1409 /* Mipmap levels aren't implemented. */
1410 assert(level == 0);
1411 size = image->planes[0].surface.dcc_size;
1412 } else {
1413 const struct legacy_surf_level *surf_level =
1414 &image->planes[0].surface.u.legacy.level[level];
1415
1416 /* If dcc_fast_clear_size is 0 (which might happens for
1417 * mipmaps) the fill buffer operation below is a no-op.
1418 * This can only happen during initialization as the
1419 * fast clear path fallbacks to slow clears if one
1420 * level can't be fast cleared.
1421 */
1422 offset += surf_level->dcc_offset +
1423 surf_level->dcc_slice_fast_clear_size * range->baseArrayLayer;
1424 size = surf_level->dcc_slice_fast_clear_size * radv_get_layerCount(image, range);
1425 }
1426
1427 flush_bits |= radv_fill_buffer(cmd_buffer, image->bo, offset,
1428 size, value);
1429 }
1430
1431 return flush_bits;
1432 }
1433
1434 uint32_t
1435 radv_clear_htile(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
1436 const VkImageSubresourceRange *range, uint32_t value)
1437 {
1438 unsigned layer_count = radv_get_layerCount(image, range);
1439 uint64_t size = image->planes[0].surface.htile_slice_size * layer_count;
1440 uint64_t offset = image->offset + image->htile_offset +
1441 image->planes[0].surface.htile_slice_size * range->baseArrayLayer;
1442
1443 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1444 }
1445
1446 enum {
1447 RADV_DCC_CLEAR_REG = 0x20202020U,
1448 RADV_DCC_CLEAR_MAIN_1 = 0x80808080U,
1449 RADV_DCC_CLEAR_SECONDARY_1 = 0x40404040U
1450 };
1451
1452 static void vi_get_fast_clear_parameters(struct radv_device *device,
1453 VkFormat image_format,
1454 VkFormat view_format,
1455 const VkClearColorValue *clear_value,
1456 uint32_t* reset_value,
1457 bool *can_avoid_fast_clear_elim)
1458 {
1459 bool values[4] = {};
1460 int extra_channel;
1461 bool main_value = false;
1462 bool extra_value = false;
1463 bool has_color = false;
1464 bool has_alpha = false;
1465 int i;
1466 *can_avoid_fast_clear_elim = false;
1467
1468 *reset_value = RADV_DCC_CLEAR_REG;
1469
1470 const struct vk_format_description *desc = vk_format_description(view_format);
1471 if (view_format == VK_FORMAT_B10G11R11_UFLOAT_PACK32 ||
1472 view_format == VK_FORMAT_R5G6B5_UNORM_PACK16 ||
1473 view_format == VK_FORMAT_B5G6R5_UNORM_PACK16)
1474 extra_channel = -1;
1475 else if (desc->layout == VK_FORMAT_LAYOUT_PLAIN) {
1476 if (vi_alpha_is_on_msb(device, view_format))
1477 extra_channel = desc->nr_channels - 1;
1478 else
1479 extra_channel = 0;
1480 } else
1481 return;
1482
1483 for (i = 0; i < 4; i++) {
1484 int index = desc->swizzle[i] - VK_SWIZZLE_X;
1485 if (desc->swizzle[i] < VK_SWIZZLE_X ||
1486 desc->swizzle[i] > VK_SWIZZLE_W)
1487 continue;
1488
1489 if (desc->channel[i].pure_integer &&
1490 desc->channel[i].type == VK_FORMAT_TYPE_SIGNED) {
1491 /* Use the maximum value for clamping the clear color. */
1492 int max = u_bit_consecutive(0, desc->channel[i].size - 1);
1493
1494 values[i] = clear_value->int32[i] != 0;
1495 if (clear_value->int32[i] != 0 && MIN2(clear_value->int32[i], max) != max)
1496 return;
1497 } else if (desc->channel[i].pure_integer &&
1498 desc->channel[i].type == VK_FORMAT_TYPE_UNSIGNED) {
1499 /* Use the maximum value for clamping the clear color. */
1500 unsigned max = u_bit_consecutive(0, desc->channel[i].size);
1501
1502 values[i] = clear_value->uint32[i] != 0U;
1503 if (clear_value->uint32[i] != 0U && MIN2(clear_value->uint32[i], max) != max)
1504 return;
1505 } else {
1506 values[i] = clear_value->float32[i] != 0.0F;
1507 if (clear_value->float32[i] != 0.0F && clear_value->float32[i] != 1.0F)
1508 return;
1509 }
1510
1511 if (index == extra_channel) {
1512 extra_value = values[i];
1513 has_alpha = true;
1514 } else {
1515 main_value = values[i];
1516 has_color = true;
1517 }
1518 }
1519
1520 /* If alpha isn't present, make it the same as color, and vice versa. */
1521 if (!has_alpha)
1522 extra_value = main_value;
1523 else if (!has_color)
1524 main_value = extra_value;
1525
1526 for (int i = 0; i < 4; ++i)
1527 if (values[i] != main_value &&
1528 desc->swizzle[i] - VK_SWIZZLE_X != extra_channel &&
1529 desc->swizzle[i] >= VK_SWIZZLE_X &&
1530 desc->swizzle[i] <= VK_SWIZZLE_W)
1531 return;
1532
1533 *can_avoid_fast_clear_elim = true;
1534 *reset_value = 0;
1535 if (main_value)
1536 *reset_value |= RADV_DCC_CLEAR_MAIN_1;
1537
1538 if (extra_value)
1539 *reset_value |= RADV_DCC_CLEAR_SECONDARY_1;
1540 return;
1541 }
1542
1543 static bool
1544 radv_can_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1545 const struct radv_image_view *iview,
1546 VkImageLayout image_layout,
1547 bool in_render_loop,
1548 const VkClearRect *clear_rect,
1549 VkClearColorValue clear_value,
1550 uint32_t view_mask)
1551 {
1552 uint32_t clear_color[2];
1553
1554 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1555 return false;
1556
1557 if (!radv_layout_can_fast_clear(iview->image, image_layout, in_render_loop,
1558 radv_image_queue_family_mask(iview->image,
1559 cmd_buffer->queue_family_index,
1560 cmd_buffer->queue_family_index)))
1561 return false;
1562
1563 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1564 clear_rect->rect.extent.width != iview->image->info.width ||
1565 clear_rect->rect.extent.height != iview->image->info.height)
1566 return false;
1567
1568 if (view_mask && (iview->image->info.array_size >= 32 ||
1569 (1u << iview->image->info.array_size) - 1u != view_mask))
1570 return false;
1571 if (!view_mask && clear_rect->baseArrayLayer != 0)
1572 return false;
1573 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1574 return false;
1575
1576 /* DCC */
1577 if (!radv_format_pack_clear_color(iview->vk_format,
1578 clear_color, &clear_value))
1579 return false;
1580
1581 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1582 bool can_avoid_fast_clear_elim;
1583 uint32_t reset_value;
1584
1585 vi_get_fast_clear_parameters(cmd_buffer->device,
1586 iview->image->vk_format,
1587 iview->vk_format,
1588 &clear_value, &reset_value,
1589 &can_avoid_fast_clear_elim);
1590
1591 if (iview->image->info.samples > 1) {
1592 /* DCC fast clear with MSAA should clear CMASK. */
1593 /* FIXME: This doesn't work for now. There is a
1594 * hardware bug with fast clears and DCC for MSAA
1595 * textures. AMDVLK has a workaround but it doesn't
1596 * seem to work here. Note that we might emit useless
1597 * CB flushes but that shouldn't matter.
1598 */
1599 if (!can_avoid_fast_clear_elim)
1600 return false;
1601 }
1602
1603 if (iview->image->info.levels > 1 &&
1604 cmd_buffer->device->physical_device->rad_info.chip_class == GFX8) {
1605 for (uint32_t l = 0; l < iview->level_count; l++) {
1606 uint32_t level = iview->base_mip + l;
1607 struct legacy_surf_level *surf_level =
1608 &iview->image->planes[0].surface.u.legacy.level[level];
1609
1610 /* Do not fast clears if one level can't be
1611 * fast cleared.
1612 */
1613 if (!surf_level->dcc_fast_clear_size)
1614 return false;
1615 }
1616 }
1617 }
1618
1619 return true;
1620 }
1621
1622
1623 static void
1624 radv_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1625 const struct radv_image_view *iview,
1626 const VkClearAttachment *clear_att,
1627 uint32_t subpass_att,
1628 enum radv_cmd_flush_bits *pre_flush,
1629 enum radv_cmd_flush_bits *post_flush)
1630 {
1631 VkClearColorValue clear_value = clear_att->clearValue.color;
1632 uint32_t clear_color[2], flush_bits = 0;
1633 uint32_t cmask_clear_value;
1634 VkImageSubresourceRange range = {
1635 .aspectMask = iview->aspect_mask,
1636 .baseMipLevel = iview->base_mip,
1637 .levelCount = iview->level_count,
1638 .baseArrayLayer = iview->base_layer,
1639 .layerCount = iview->layer_count,
1640 };
1641
1642 if (pre_flush) {
1643 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_CB |
1644 RADV_CMD_FLAG_FLUSH_AND_INV_CB_META) & ~ *pre_flush;
1645 *pre_flush |= cmd_buffer->state.flush_bits;
1646 }
1647
1648 /* DCC */
1649 radv_format_pack_clear_color(iview->vk_format, clear_color, &clear_value);
1650
1651 cmask_clear_value = radv_get_cmask_fast_clear_value(iview->image);
1652
1653 /* clear cmask buffer */
1654 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1655 uint32_t reset_value;
1656 bool can_avoid_fast_clear_elim;
1657 bool need_decompress_pass = false;
1658
1659 vi_get_fast_clear_parameters(cmd_buffer->device,
1660 iview->image->vk_format,
1661 iview->vk_format,
1662 &clear_value, &reset_value,
1663 &can_avoid_fast_clear_elim);
1664
1665 if (radv_image_has_cmask(iview->image)) {
1666 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1667 &range, cmask_clear_value);
1668
1669 need_decompress_pass = true;
1670 }
1671
1672 if (!can_avoid_fast_clear_elim)
1673 need_decompress_pass = true;
1674
1675 flush_bits |= radv_clear_dcc(cmd_buffer, iview->image, &range,
1676 reset_value);
1677
1678 radv_update_fce_metadata(cmd_buffer, iview->image, &range,
1679 need_decompress_pass);
1680 } else {
1681 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1682 &range, cmask_clear_value);
1683 }
1684
1685 if (post_flush) {
1686 *post_flush |= flush_bits;
1687 }
1688
1689 radv_update_color_clear_metadata(cmd_buffer, iview, subpass_att,
1690 clear_color);
1691 }
1692
1693 /**
1694 * The parameters mean that same as those in vkCmdClearAttachments.
1695 */
1696 static void
1697 emit_clear(struct radv_cmd_buffer *cmd_buffer,
1698 const VkClearAttachment *clear_att,
1699 const VkClearRect *clear_rect,
1700 enum radv_cmd_flush_bits *pre_flush,
1701 enum radv_cmd_flush_bits *post_flush,
1702 uint32_t view_mask,
1703 bool ds_resolve_clear)
1704 {
1705 const struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
1706 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
1707 VkImageAspectFlags aspects = clear_att->aspectMask;
1708
1709 if (aspects & VK_IMAGE_ASPECT_COLOR_BIT) {
1710 const uint32_t subpass_att = clear_att->colorAttachment;
1711 assert(subpass_att < subpass->color_count);
1712 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
1713 if (pass_att == VK_ATTACHMENT_UNUSED)
1714 return;
1715
1716 VkImageLayout image_layout = subpass->color_attachments[subpass_att].layout;
1717 bool in_render_loop = subpass->color_attachments[subpass_att].in_render_loop;
1718 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[pass_att].iview : NULL;
1719 VkClearColorValue clear_value = clear_att->clearValue.color;
1720
1721 if (radv_can_fast_clear_color(cmd_buffer, iview, image_layout, in_render_loop,
1722 clear_rect, clear_value, view_mask)) {
1723 radv_fast_clear_color(cmd_buffer, iview, clear_att,
1724 subpass_att, pre_flush,
1725 post_flush);
1726 } else {
1727 emit_color_clear(cmd_buffer, clear_att, clear_rect, view_mask);
1728 }
1729 } else {
1730 struct radv_subpass_attachment *ds_att = subpass->depth_stencil_attachment;
1731
1732 if (ds_resolve_clear)
1733 ds_att = subpass->ds_resolve_attachment;
1734
1735 if (!ds_att || ds_att->attachment == VK_ATTACHMENT_UNUSED)
1736 return;
1737
1738 VkImageLayout image_layout = ds_att->layout;
1739 bool in_render_loop = ds_att->in_render_loop;
1740 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[ds_att->attachment].iview : NULL;
1741 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1742
1743 assert(aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
1744 VK_IMAGE_ASPECT_STENCIL_BIT));
1745
1746 if (radv_can_fast_clear_depth(cmd_buffer, iview, image_layout,
1747 in_render_loop, aspects, clear_rect,
1748 clear_value, view_mask)) {
1749 radv_fast_clear_depth(cmd_buffer, iview, clear_att,
1750 pre_flush, post_flush);
1751 } else {
1752 emit_depthstencil_clear(cmd_buffer, clear_att, clear_rect,
1753 ds_att, view_mask);
1754 }
1755 }
1756 }
1757
1758 static inline bool
1759 radv_attachment_needs_clear(struct radv_cmd_state *cmd_state, uint32_t a)
1760 {
1761 uint32_t view_mask = cmd_state->subpass->view_mask;
1762 return (a != VK_ATTACHMENT_UNUSED &&
1763 cmd_state->attachments[a].pending_clear_aspects &&
1764 (!view_mask || (view_mask & ~cmd_state->attachments[a].cleared_views)));
1765 }
1766
1767 static bool
1768 radv_subpass_needs_clear(struct radv_cmd_buffer *cmd_buffer)
1769 {
1770 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1771 uint32_t a;
1772
1773 if (!cmd_state->subpass)
1774 return false;
1775
1776 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1777 a = cmd_state->subpass->color_attachments[i].attachment;
1778 if (radv_attachment_needs_clear(cmd_state, a))
1779 return true;
1780 }
1781
1782 if (cmd_state->subpass->depth_stencil_attachment) {
1783 a = cmd_state->subpass->depth_stencil_attachment->attachment;
1784 if (radv_attachment_needs_clear(cmd_state, a))
1785 return true;
1786 }
1787
1788 if (!cmd_state->subpass->ds_resolve_attachment)
1789 return false;
1790
1791 a = cmd_state->subpass->ds_resolve_attachment->attachment;
1792 return radv_attachment_needs_clear(cmd_state, a);
1793 }
1794
1795 static void
1796 radv_subpass_clear_attachment(struct radv_cmd_buffer *cmd_buffer,
1797 struct radv_attachment_state *attachment,
1798 const VkClearAttachment *clear_att,
1799 enum radv_cmd_flush_bits *pre_flush,
1800 enum radv_cmd_flush_bits *post_flush,
1801 bool ds_resolve_clear)
1802 {
1803 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1804 uint32_t view_mask = cmd_state->subpass->view_mask;
1805
1806 VkClearRect clear_rect = {
1807 .rect = cmd_state->render_area,
1808 .baseArrayLayer = 0,
1809 .layerCount = cmd_state->framebuffer->layers,
1810 };
1811
1812 emit_clear(cmd_buffer, clear_att, &clear_rect, pre_flush, post_flush,
1813 view_mask & ~attachment->cleared_views, ds_resolve_clear);
1814 if (view_mask)
1815 attachment->cleared_views |= view_mask;
1816 else
1817 attachment->pending_clear_aspects = 0;
1818 }
1819
1820 /**
1821 * Emit any pending attachment clears for the current subpass.
1822 *
1823 * @see radv_attachment_state::pending_clear_aspects
1824 */
1825 void
1826 radv_cmd_buffer_clear_subpass(struct radv_cmd_buffer *cmd_buffer)
1827 {
1828 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1829 struct radv_meta_saved_state saved_state;
1830 enum radv_cmd_flush_bits pre_flush = 0;
1831 enum radv_cmd_flush_bits post_flush = 0;
1832
1833 if (!radv_subpass_needs_clear(cmd_buffer))
1834 return;
1835
1836 radv_meta_save(&saved_state, cmd_buffer,
1837 RADV_META_SAVE_GRAPHICS_PIPELINE |
1838 RADV_META_SAVE_CONSTANTS);
1839
1840 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1841 uint32_t a = cmd_state->subpass->color_attachments[i].attachment;
1842
1843 if (!radv_attachment_needs_clear(cmd_state, a))
1844 continue;
1845
1846 assert(cmd_state->attachments[a].pending_clear_aspects ==
1847 VK_IMAGE_ASPECT_COLOR_BIT);
1848
1849 VkClearAttachment clear_att = {
1850 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1851 .colorAttachment = i, /* Use attachment index relative to subpass */
1852 .clearValue = cmd_state->attachments[a].clear_value,
1853 };
1854
1855 radv_subpass_clear_attachment(cmd_buffer,
1856 &cmd_state->attachments[a],
1857 &clear_att, &pre_flush,
1858 &post_flush, false);
1859 }
1860
1861 if (cmd_state->subpass->depth_stencil_attachment) {
1862 uint32_t ds = cmd_state->subpass->depth_stencil_attachment->attachment;
1863 if (radv_attachment_needs_clear(cmd_state, ds)) {
1864 VkClearAttachment clear_att = {
1865 .aspectMask = cmd_state->attachments[ds].pending_clear_aspects,
1866 .clearValue = cmd_state->attachments[ds].clear_value,
1867 };
1868
1869 radv_subpass_clear_attachment(cmd_buffer,
1870 &cmd_state->attachments[ds],
1871 &clear_att, &pre_flush,
1872 &post_flush, false);
1873 }
1874 }
1875
1876 if (cmd_state->subpass->ds_resolve_attachment) {
1877 uint32_t ds_resolve = cmd_state->subpass->ds_resolve_attachment->attachment;
1878 if (radv_attachment_needs_clear(cmd_state, ds_resolve)) {
1879 VkClearAttachment clear_att = {
1880 .aspectMask = cmd_state->attachments[ds_resolve].pending_clear_aspects,
1881 .clearValue = cmd_state->attachments[ds_resolve].clear_value,
1882 };
1883
1884 radv_subpass_clear_attachment(cmd_buffer,
1885 &cmd_state->attachments[ds_resolve],
1886 &clear_att, &pre_flush,
1887 &post_flush, true);
1888 }
1889 }
1890
1891 radv_meta_restore(&saved_state, cmd_buffer);
1892 cmd_buffer->state.flush_bits |= post_flush;
1893 }
1894
1895 static void
1896 radv_clear_image_layer(struct radv_cmd_buffer *cmd_buffer,
1897 struct radv_image *image,
1898 VkImageLayout image_layout,
1899 const VkImageSubresourceRange *range,
1900 VkFormat format, int level, int layer,
1901 const VkClearValue *clear_val)
1902 {
1903 VkDevice device_h = radv_device_to_handle(cmd_buffer->device);
1904 struct radv_image_view iview;
1905 uint32_t width = radv_minify(image->info.width, range->baseMipLevel + level);
1906 uint32_t height = radv_minify(image->info.height, range->baseMipLevel + level);
1907
1908 radv_image_view_init(&iview, cmd_buffer->device,
1909 &(VkImageViewCreateInfo) {
1910 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
1911 .image = radv_image_to_handle(image),
1912 .viewType = radv_meta_get_view_type(image),
1913 .format = format,
1914 .subresourceRange = {
1915 .aspectMask = range->aspectMask,
1916 .baseMipLevel = range->baseMipLevel + level,
1917 .levelCount = 1,
1918 .baseArrayLayer = range->baseArrayLayer + layer,
1919 .layerCount = 1
1920 },
1921 }, NULL);
1922
1923 VkFramebuffer fb;
1924 radv_CreateFramebuffer(device_h,
1925 &(VkFramebufferCreateInfo) {
1926 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
1927 .attachmentCount = 1,
1928 .pAttachments = (VkImageView[]) {
1929 radv_image_view_to_handle(&iview),
1930 },
1931 .width = width,
1932 .height = height,
1933 .layers = 1
1934 },
1935 &cmd_buffer->pool->alloc,
1936 &fb);
1937
1938 VkAttachmentDescription att_desc = {
1939 .format = iview.vk_format,
1940 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
1941 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
1942 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
1943 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE,
1944 .initialLayout = image_layout,
1945 .finalLayout = image_layout,
1946 };
1947
1948 VkSubpassDescription subpass_desc = {
1949 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
1950 .inputAttachmentCount = 0,
1951 .colorAttachmentCount = 0,
1952 .pColorAttachments = NULL,
1953 .pResolveAttachments = NULL,
1954 .pDepthStencilAttachment = NULL,
1955 .preserveAttachmentCount = 0,
1956 .pPreserveAttachments = NULL,
1957 };
1958
1959 const VkAttachmentReference att_ref = {
1960 .attachment = 0,
1961 .layout = image_layout,
1962 };
1963
1964 if (range->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
1965 subpass_desc.colorAttachmentCount = 1;
1966 subpass_desc.pColorAttachments = &att_ref;
1967 } else {
1968 subpass_desc.pDepthStencilAttachment = &att_ref;
1969 }
1970
1971 VkRenderPass pass;
1972 radv_CreateRenderPass(device_h,
1973 &(VkRenderPassCreateInfo) {
1974 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
1975 .attachmentCount = 1,
1976 .pAttachments = &att_desc,
1977 .subpassCount = 1,
1978 .pSubpasses = &subpass_desc,
1979 },
1980 &cmd_buffer->pool->alloc,
1981 &pass);
1982
1983 radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
1984 &(VkRenderPassBeginInfo) {
1985 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1986 .renderArea = {
1987 .offset = { 0, 0, },
1988 .extent = {
1989 .width = width,
1990 .height = height,
1991 },
1992 },
1993 .renderPass = pass,
1994 .framebuffer = fb,
1995 .clearValueCount = 0,
1996 .pClearValues = NULL,
1997 },
1998 VK_SUBPASS_CONTENTS_INLINE);
1999
2000 VkClearAttachment clear_att = {
2001 .aspectMask = range->aspectMask,
2002 .colorAttachment = 0,
2003 .clearValue = *clear_val,
2004 };
2005
2006 VkClearRect clear_rect = {
2007 .rect = {
2008 .offset = { 0, 0 },
2009 .extent = { width, height },
2010 },
2011 .baseArrayLayer = range->baseArrayLayer,
2012 .layerCount = 1, /* FINISHME: clear multi-layer framebuffer */
2013 };
2014
2015 emit_clear(cmd_buffer, &clear_att, &clear_rect, NULL, NULL, 0, false);
2016
2017 radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
2018 radv_DestroyRenderPass(device_h, pass,
2019 &cmd_buffer->pool->alloc);
2020 radv_DestroyFramebuffer(device_h, fb,
2021 &cmd_buffer->pool->alloc);
2022 }
2023
2024 /**
2025 * Return TRUE if a fast color or depth clear has been performed.
2026 */
2027 static bool
2028 radv_fast_clear_range(struct radv_cmd_buffer *cmd_buffer,
2029 struct radv_image *image,
2030 VkFormat format,
2031 VkImageLayout image_layout,
2032 bool in_render_loop,
2033 const VkImageSubresourceRange *range,
2034 const VkClearValue *clear_val)
2035 {
2036 struct radv_image_view iview;
2037
2038 radv_image_view_init(&iview, cmd_buffer->device,
2039 &(VkImageViewCreateInfo) {
2040 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
2041 .image = radv_image_to_handle(image),
2042 .viewType = radv_meta_get_view_type(image),
2043 .format = image->vk_format,
2044 .subresourceRange = {
2045 .aspectMask = range->aspectMask,
2046 .baseMipLevel = range->baseMipLevel,
2047 .levelCount = range->levelCount,
2048 .baseArrayLayer = range->baseArrayLayer,
2049 .layerCount = range->layerCount,
2050 },
2051 }, NULL);
2052
2053 VkClearRect clear_rect = {
2054 .rect = {
2055 .offset = { 0, 0 },
2056 .extent = {
2057 radv_minify(image->info.width, range->baseMipLevel),
2058 radv_minify(image->info.height, range->baseMipLevel),
2059 },
2060 },
2061 .baseArrayLayer = range->baseArrayLayer,
2062 .layerCount = range->layerCount,
2063 };
2064
2065 VkClearAttachment clear_att = {
2066 .aspectMask = range->aspectMask,
2067 .colorAttachment = 0,
2068 .clearValue = *clear_val,
2069 };
2070
2071 if (vk_format_is_color(format)) {
2072 if (radv_can_fast_clear_color(cmd_buffer, &iview, image_layout,
2073 in_render_loop, &clear_rect,
2074 clear_att.clearValue.color, 0)) {
2075 radv_fast_clear_color(cmd_buffer, &iview, &clear_att,
2076 clear_att.colorAttachment,
2077 NULL, NULL);
2078 return true;
2079 }
2080 } else {
2081 if (radv_can_fast_clear_depth(cmd_buffer, &iview, image_layout,
2082 in_render_loop,range->aspectMask,
2083 &clear_rect, clear_att.clearValue.depthStencil,
2084 0)) {
2085 radv_fast_clear_depth(cmd_buffer, &iview, &clear_att,
2086 NULL, NULL);
2087 return true;
2088 }
2089 }
2090
2091 return false;
2092 }
2093
2094 static void
2095 radv_cmd_clear_image(struct radv_cmd_buffer *cmd_buffer,
2096 struct radv_image *image,
2097 VkImageLayout image_layout,
2098 const VkClearValue *clear_value,
2099 uint32_t range_count,
2100 const VkImageSubresourceRange *ranges,
2101 bool cs)
2102 {
2103 VkFormat format = image->vk_format;
2104 VkClearValue internal_clear_value = *clear_value;
2105
2106 if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
2107 uint32_t value;
2108 format = VK_FORMAT_R32_UINT;
2109 value = float3_to_rgb9e5(clear_value->color.float32);
2110 internal_clear_value.color.uint32[0] = value;
2111 }
2112
2113 if (format == VK_FORMAT_R4G4_UNORM_PACK8) {
2114 uint8_t r, g;
2115 format = VK_FORMAT_R8_UINT;
2116 r = float_to_ubyte(clear_value->color.float32[0]) >> 4;
2117 g = float_to_ubyte(clear_value->color.float32[1]) >> 4;
2118 internal_clear_value.color.uint32[0] = (r << 4) | (g & 0xf);
2119 }
2120
2121 if (format == VK_FORMAT_R32G32B32_UINT ||
2122 format == VK_FORMAT_R32G32B32_SINT ||
2123 format == VK_FORMAT_R32G32B32_SFLOAT)
2124 cs = true;
2125
2126 for (uint32_t r = 0; r < range_count; r++) {
2127 const VkImageSubresourceRange *range = &ranges[r];
2128
2129 /* Try to perform a fast clear first, otherwise fallback to
2130 * the legacy path.
2131 */
2132 if (!cs &&
2133 radv_fast_clear_range(cmd_buffer, image, format,
2134 image_layout, false, range,
2135 &internal_clear_value)) {
2136 continue;
2137 }
2138
2139 for (uint32_t l = 0; l < radv_get_levelCount(image, range); ++l) {
2140 const uint32_t layer_count = image->type == VK_IMAGE_TYPE_3D ?
2141 radv_minify(image->info.depth, range->baseMipLevel + l) :
2142 radv_get_layerCount(image, range);
2143 for (uint32_t s = 0; s < layer_count; ++s) {
2144
2145 if (cs) {
2146 struct radv_meta_blit2d_surf surf;
2147 surf.format = format;
2148 surf.image = image;
2149 surf.level = range->baseMipLevel + l;
2150 surf.layer = range->baseArrayLayer + s;
2151 surf.aspect_mask = range->aspectMask;
2152 radv_meta_clear_image_cs(cmd_buffer, &surf,
2153 &internal_clear_value.color);
2154 } else {
2155 radv_clear_image_layer(cmd_buffer, image, image_layout,
2156 range, format, l, s, &internal_clear_value);
2157 }
2158 }
2159 }
2160 }
2161 }
2162
2163 void radv_CmdClearColorImage(
2164 VkCommandBuffer commandBuffer,
2165 VkImage image_h,
2166 VkImageLayout imageLayout,
2167 const VkClearColorValue* pColor,
2168 uint32_t rangeCount,
2169 const VkImageSubresourceRange* pRanges)
2170 {
2171 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2172 RADV_FROM_HANDLE(radv_image, image, image_h);
2173 struct radv_meta_saved_state saved_state;
2174 bool cs = cmd_buffer->queue_family_index == RADV_QUEUE_COMPUTE;
2175
2176 if (cs) {
2177 radv_meta_save(&saved_state, cmd_buffer,
2178 RADV_META_SAVE_COMPUTE_PIPELINE |
2179 RADV_META_SAVE_CONSTANTS |
2180 RADV_META_SAVE_DESCRIPTORS);
2181 } else {
2182 radv_meta_save(&saved_state, cmd_buffer,
2183 RADV_META_SAVE_GRAPHICS_PIPELINE |
2184 RADV_META_SAVE_CONSTANTS);
2185 }
2186
2187 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2188 (const VkClearValue *) pColor,
2189 rangeCount, pRanges, cs);
2190
2191 radv_meta_restore(&saved_state, cmd_buffer);
2192 }
2193
2194 void radv_CmdClearDepthStencilImage(
2195 VkCommandBuffer commandBuffer,
2196 VkImage image_h,
2197 VkImageLayout imageLayout,
2198 const VkClearDepthStencilValue* pDepthStencil,
2199 uint32_t rangeCount,
2200 const VkImageSubresourceRange* pRanges)
2201 {
2202 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2203 RADV_FROM_HANDLE(radv_image, image, image_h);
2204 struct radv_meta_saved_state saved_state;
2205
2206 radv_meta_save(&saved_state, cmd_buffer,
2207 RADV_META_SAVE_GRAPHICS_PIPELINE |
2208 RADV_META_SAVE_CONSTANTS);
2209
2210 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2211 (const VkClearValue *) pDepthStencil,
2212 rangeCount, pRanges, false);
2213
2214 radv_meta_restore(&saved_state, cmd_buffer);
2215 }
2216
2217 void radv_CmdClearAttachments(
2218 VkCommandBuffer commandBuffer,
2219 uint32_t attachmentCount,
2220 const VkClearAttachment* pAttachments,
2221 uint32_t rectCount,
2222 const VkClearRect* pRects)
2223 {
2224 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2225 struct radv_meta_saved_state saved_state;
2226 enum radv_cmd_flush_bits pre_flush = 0;
2227 enum radv_cmd_flush_bits post_flush = 0;
2228
2229 if (!cmd_buffer->state.subpass)
2230 return;
2231
2232 radv_meta_save(&saved_state, cmd_buffer,
2233 RADV_META_SAVE_GRAPHICS_PIPELINE |
2234 RADV_META_SAVE_CONSTANTS);
2235
2236 /* FINISHME: We can do better than this dumb loop. It thrashes too much
2237 * state.
2238 */
2239 for (uint32_t a = 0; a < attachmentCount; ++a) {
2240 for (uint32_t r = 0; r < rectCount; ++r) {
2241 emit_clear(cmd_buffer, &pAttachments[a], &pRects[r], &pre_flush, &post_flush,
2242 cmd_buffer->state.subpass->view_mask, false);
2243 }
2244 }
2245
2246 radv_meta_restore(&saved_state, cmd_buffer);
2247 cmd_buffer->state.flush_bits |= post_flush;
2248 }