Added few more stubs so that control reaches to DestroyDevice().
[mesa.git] / src / amd / vulkan / radv_meta_clear.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "radv_debug.h"
25 #include "radv_meta.h"
26 #include "radv_private.h"
27 #include "nir/nir_builder.h"
28
29 #include "util/format_rgb9e5.h"
30 #include "vk_format.h"
31
32 enum {
33 DEPTH_CLEAR_SLOW,
34 DEPTH_CLEAR_FAST_EXPCLEAR,
35 DEPTH_CLEAR_FAST_NO_EXPCLEAR
36 };
37
38 static void
39 build_color_shaders(struct nir_shader **out_vs,
40 struct nir_shader **out_fs,
41 uint32_t frag_output)
42 {
43 nir_builder vs_b;
44 nir_builder fs_b;
45
46 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
47 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
48
49 vs_b.shader->info.name = ralloc_strdup(vs_b.shader, "meta_clear_color_vs");
50 fs_b.shader->info.name = ralloc_strdup(fs_b.shader, "meta_clear_color_fs");
51
52 const struct glsl_type *position_type = glsl_vec4_type();
53 const struct glsl_type *color_type = glsl_vec4_type();
54
55 nir_variable *vs_out_pos =
56 nir_variable_create(vs_b.shader, nir_var_shader_out, position_type,
57 "gl_Position");
58 vs_out_pos->data.location = VARYING_SLOT_POS;
59
60 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
61 nir_intrinsic_set_base(in_color_load, 0);
62 nir_intrinsic_set_range(in_color_load, 16);
63 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
64 in_color_load->num_components = 4;
65 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 4, 32, "clear color");
66 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
67
68 nir_variable *fs_out_color =
69 nir_variable_create(fs_b.shader, nir_var_shader_out, color_type,
70 "f_color");
71 fs_out_color->data.location = FRAG_RESULT_DATA0 + frag_output;
72
73 nir_store_var(&fs_b, fs_out_color, &in_color_load->dest.ssa, 0xf);
74
75 nir_ssa_def *outvec = radv_meta_gen_rect_vertices(&vs_b);
76 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
77
78 const struct glsl_type *layer_type = glsl_int_type();
79 nir_variable *vs_out_layer =
80 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
81 "v_layer");
82 vs_out_layer->data.location = VARYING_SLOT_LAYER;
83 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
84 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
85 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
86
87 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
88 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
89
90 *out_vs = vs_b.shader;
91 *out_fs = fs_b.shader;
92 }
93
94 static VkResult
95 create_pipeline(struct radv_device *device,
96 struct radv_render_pass *render_pass,
97 uint32_t samples,
98 struct nir_shader *vs_nir,
99 struct nir_shader *fs_nir,
100 const VkPipelineVertexInputStateCreateInfo *vi_state,
101 const VkPipelineDepthStencilStateCreateInfo *ds_state,
102 const VkPipelineColorBlendStateCreateInfo *cb_state,
103 const VkPipelineLayout layout,
104 const struct radv_graphics_pipeline_create_info *extra,
105 const VkAllocationCallbacks *alloc,
106 VkPipeline *pipeline)
107 {
108 VkDevice device_h = radv_device_to_handle(device);
109 VkResult result;
110
111 struct radv_shader_module vs_m = { .nir = vs_nir };
112 struct radv_shader_module fs_m = { .nir = fs_nir };
113
114 result = radv_graphics_pipeline_create(device_h,
115 radv_pipeline_cache_to_handle(&device->meta_state.cache),
116 &(VkGraphicsPipelineCreateInfo) {
117 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
118 .stageCount = fs_nir ? 2 : 1,
119 .pStages = (VkPipelineShaderStageCreateInfo[]) {
120 {
121 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
122 .stage = VK_SHADER_STAGE_VERTEX_BIT,
123 .module = radv_shader_module_to_handle(&vs_m),
124 .pName = "main",
125 },
126 {
127 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
128 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
129 .module = radv_shader_module_to_handle(&fs_m),
130 .pName = "main",
131 },
132 },
133 .pVertexInputState = vi_state,
134 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
135 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
136 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
137 .primitiveRestartEnable = false,
138 },
139 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
140 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
141 .viewportCount = 1,
142 .scissorCount = 1,
143 },
144 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
145 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
146 .rasterizerDiscardEnable = false,
147 .polygonMode = VK_POLYGON_MODE_FILL,
148 .cullMode = VK_CULL_MODE_NONE,
149 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
150 .depthBiasEnable = false,
151 },
152 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
153 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
154 .rasterizationSamples = samples,
155 .sampleShadingEnable = false,
156 .pSampleMask = NULL,
157 .alphaToCoverageEnable = false,
158 .alphaToOneEnable = false,
159 },
160 .pDepthStencilState = ds_state,
161 .pColorBlendState = cb_state,
162 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
163 /* The meta clear pipeline declares all state as dynamic.
164 * As a consequence, vkCmdBindPipeline writes no dynamic state
165 * to the cmd buffer. Therefore, at the end of the meta clear,
166 * we need only restore dynamic state was vkCmdSet.
167 */
168 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
169 .dynamicStateCount = 8,
170 .pDynamicStates = (VkDynamicState[]) {
171 /* Everything except stencil write mask */
172 VK_DYNAMIC_STATE_VIEWPORT,
173 VK_DYNAMIC_STATE_SCISSOR,
174 VK_DYNAMIC_STATE_LINE_WIDTH,
175 VK_DYNAMIC_STATE_DEPTH_BIAS,
176 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
177 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
178 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
179 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
180 },
181 },
182 .layout = layout,
183 .flags = 0,
184 .renderPass = radv_render_pass_to_handle(render_pass),
185 .subpass = 0,
186 },
187 extra,
188 alloc,
189 pipeline);
190
191 ralloc_free(vs_nir);
192 ralloc_free(fs_nir);
193
194 return result;
195 }
196
197 static VkResult
198 create_color_renderpass(struct radv_device *device,
199 VkFormat vk_format,
200 uint32_t samples,
201 VkRenderPass *pass)
202 {
203 mtx_lock(&device->meta_state.mtx);
204 if (*pass) {
205 mtx_unlock (&device->meta_state.mtx);
206 return VK_SUCCESS;
207 }
208
209 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
210 &(VkRenderPassCreateInfo) {
211 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
212 .attachmentCount = 1,
213 .pAttachments = &(VkAttachmentDescription) {
214 .format = vk_format,
215 .samples = samples,
216 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
217 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
218 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
219 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
220 },
221 .subpassCount = 1,
222 .pSubpasses = &(VkSubpassDescription) {
223 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
224 .inputAttachmentCount = 0,
225 .colorAttachmentCount = 1,
226 .pColorAttachments = &(VkAttachmentReference) {
227 .attachment = 0,
228 .layout = VK_IMAGE_LAYOUT_GENERAL,
229 },
230 .pResolveAttachments = NULL,
231 .pDepthStencilAttachment = &(VkAttachmentReference) {
232 .attachment = VK_ATTACHMENT_UNUSED,
233 .layout = VK_IMAGE_LAYOUT_GENERAL,
234 },
235 .preserveAttachmentCount = 0,
236 .pPreserveAttachments = NULL,
237 },
238 .dependencyCount = 2,
239 .pDependencies = (VkSubpassDependency[]) {
240 {
241 .srcSubpass = VK_SUBPASS_EXTERNAL,
242 .dstSubpass = 0,
243 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
244 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
245 .srcAccessMask = 0,
246 .dstAccessMask = 0,
247 .dependencyFlags = 0
248 },
249 {
250 .srcSubpass = 0,
251 .dstSubpass = VK_SUBPASS_EXTERNAL,
252 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
253 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
254 .srcAccessMask = 0,
255 .dstAccessMask = 0,
256 .dependencyFlags = 0
257 }
258 },
259 }, &device->meta_state.alloc, pass);
260 mtx_unlock(&device->meta_state.mtx);
261 return result;
262 }
263
264 static VkResult
265 create_color_pipeline(struct radv_device *device,
266 uint32_t samples,
267 uint32_t frag_output,
268 VkPipeline *pipeline,
269 VkRenderPass pass)
270 {
271 struct nir_shader *vs_nir;
272 struct nir_shader *fs_nir;
273 VkResult result;
274
275 mtx_lock(&device->meta_state.mtx);
276 if (*pipeline) {
277 mtx_unlock(&device->meta_state.mtx);
278 return VK_SUCCESS;
279 }
280
281 build_color_shaders(&vs_nir, &fs_nir, frag_output);
282
283 const VkPipelineVertexInputStateCreateInfo vi_state = {
284 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
285 .vertexBindingDescriptionCount = 0,
286 .vertexAttributeDescriptionCount = 0,
287 };
288
289 const VkPipelineDepthStencilStateCreateInfo ds_state = {
290 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
291 .depthTestEnable = false,
292 .depthWriteEnable = false,
293 .depthBoundsTestEnable = false,
294 .stencilTestEnable = false,
295 };
296
297 VkPipelineColorBlendAttachmentState blend_attachment_state[MAX_RTS] = { 0 };
298 blend_attachment_state[frag_output] = (VkPipelineColorBlendAttachmentState) {
299 .blendEnable = false,
300 .colorWriteMask = VK_COLOR_COMPONENT_A_BIT |
301 VK_COLOR_COMPONENT_R_BIT |
302 VK_COLOR_COMPONENT_G_BIT |
303 VK_COLOR_COMPONENT_B_BIT,
304 };
305
306 const VkPipelineColorBlendStateCreateInfo cb_state = {
307 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
308 .logicOpEnable = false,
309 .attachmentCount = MAX_RTS,
310 .pAttachments = blend_attachment_state
311 };
312
313
314 struct radv_graphics_pipeline_create_info extra = {
315 .use_rectlist = true,
316 };
317 result = create_pipeline(device, radv_render_pass_from_handle(pass),
318 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
319 device->meta_state.clear_color_p_layout,
320 &extra, &device->meta_state.alloc, pipeline);
321
322 mtx_unlock(&device->meta_state.mtx);
323 return result;
324 }
325
326 static void
327 finish_meta_clear_htile_mask_state(struct radv_device *device)
328 {
329 struct radv_meta_state *state = &device->meta_state;
330
331 radv_DestroyPipeline(radv_device_to_handle(device),
332 state->clear_htile_mask_pipeline,
333 &state->alloc);
334 radv_DestroyPipelineLayout(radv_device_to_handle(device),
335 state->clear_htile_mask_p_layout,
336 &state->alloc);
337 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
338 state->clear_htile_mask_ds_layout,
339 &state->alloc);
340 }
341
342 void
343 radv_device_finish_meta_clear_state(struct radv_device *device)
344 {
345 struct radv_meta_state *state = &device->meta_state;
346
347 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
348 for (uint32_t j = 0; j < ARRAY_SIZE(state->clear[i].color_pipelines); ++j) {
349 radv_DestroyPipeline(radv_device_to_handle(device),
350 state->clear[i].color_pipelines[j],
351 &state->alloc);
352 radv_DestroyRenderPass(radv_device_to_handle(device),
353 state->clear[i].render_pass[j],
354 &state->alloc);
355 }
356
357 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
358 radv_DestroyPipeline(radv_device_to_handle(device),
359 state->clear[i].depth_only_pipeline[j],
360 &state->alloc);
361 radv_DestroyPipeline(radv_device_to_handle(device),
362 state->clear[i].stencil_only_pipeline[j],
363 &state->alloc);
364 radv_DestroyPipeline(radv_device_to_handle(device),
365 state->clear[i].depthstencil_pipeline[j],
366 &state->alloc);
367
368 radv_DestroyPipeline(radv_device_to_handle(device),
369 state->clear[i].depth_only_unrestricted_pipeline[j],
370 &state->alloc);
371 radv_DestroyPipeline(radv_device_to_handle(device),
372 state->clear[i].stencil_only_unrestricted_pipeline[j],
373 &state->alloc);
374 radv_DestroyPipeline(radv_device_to_handle(device),
375 state->clear[i].depthstencil_unrestricted_pipeline[j],
376 &state->alloc);
377 }
378 radv_DestroyRenderPass(radv_device_to_handle(device),
379 state->clear[i].depthstencil_rp,
380 &state->alloc);
381 }
382 radv_DestroyPipelineLayout(radv_device_to_handle(device),
383 state->clear_color_p_layout,
384 &state->alloc);
385 radv_DestroyPipelineLayout(radv_device_to_handle(device),
386 state->clear_depth_p_layout,
387 &state->alloc);
388 radv_DestroyPipelineLayout(radv_device_to_handle(device),
389 state->clear_depth_unrestricted_p_layout,
390 &state->alloc);
391
392 finish_meta_clear_htile_mask_state(device);
393 }
394
395 static void
396 emit_color_clear(struct radv_cmd_buffer *cmd_buffer,
397 const VkClearAttachment *clear_att,
398 const VkClearRect *clear_rect,
399 uint32_t view_mask)
400 {
401 struct radv_device *device = cmd_buffer->device;
402 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
403 const uint32_t subpass_att = clear_att->colorAttachment;
404 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
405 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
406 cmd_buffer->state.attachments[pass_att].iview : NULL;
407 uint32_t samples, samples_log2;
408 VkFormat format;
409 unsigned fs_key;
410 VkClearColorValue clear_value = clear_att->clearValue.color;
411 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
412 VkPipeline pipeline;
413
414 /* When a framebuffer is bound to the current command buffer, get the
415 * number of samples from it. Otherwise, get the number of samples from
416 * the render pass because it's likely a secondary command buffer.
417 */
418 if (iview) {
419 samples = iview->image->info.samples;
420 format = iview->vk_format;
421 } else {
422 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
423 format = cmd_buffer->state.pass->attachments[pass_att].format;
424 }
425
426 samples_log2 = ffs(samples) - 1;
427 fs_key = radv_format_meta_fs_key(format);
428
429 if (fs_key == -1) {
430 radv_finishme("color clears incomplete");
431 return;
432 }
433
434 if (device->meta_state.clear[samples_log2].render_pass[fs_key] == VK_NULL_HANDLE) {
435 VkResult ret = create_color_renderpass(device, radv_fs_key_format_exemplars[fs_key],
436 samples,
437 &device->meta_state.clear[samples_log2].render_pass[fs_key]);
438 if (ret != VK_SUCCESS) {
439 cmd_buffer->record_result = ret;
440 return;
441 }
442 }
443
444 if (device->meta_state.clear[samples_log2].color_pipelines[fs_key] == VK_NULL_HANDLE) {
445 VkResult ret = create_color_pipeline(device, samples, 0,
446 &device->meta_state.clear[samples_log2].color_pipelines[fs_key],
447 device->meta_state.clear[samples_log2].render_pass[fs_key]);
448 if (ret != VK_SUCCESS) {
449 cmd_buffer->record_result = ret;
450 return;
451 }
452 }
453
454 pipeline = device->meta_state.clear[samples_log2].color_pipelines[fs_key];
455 if (!pipeline) {
456 radv_finishme("color clears incomplete");
457 return;
458 }
459 assert(samples_log2 < ARRAY_SIZE(device->meta_state.clear));
460 assert(pipeline);
461 assert(clear_att->aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
462 assert(clear_att->colorAttachment < subpass->color_count);
463
464 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
465 device->meta_state.clear_color_p_layout,
466 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16,
467 &clear_value);
468
469 struct radv_subpass clear_subpass = {
470 .color_count = 1,
471 .color_attachments = (struct radv_subpass_attachment[]) {
472 subpass->color_attachments[clear_att->colorAttachment]
473 },
474 .depth_stencil_attachment = NULL,
475 };
476
477 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
478
479 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
480 pipeline);
481
482 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
483 .x = clear_rect->rect.offset.x,
484 .y = clear_rect->rect.offset.y,
485 .width = clear_rect->rect.extent.width,
486 .height = clear_rect->rect.extent.height,
487 .minDepth = 0.0f,
488 .maxDepth = 1.0f
489 });
490
491 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
492
493 if (view_mask) {
494 unsigned i;
495 for_each_bit(i, view_mask)
496 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
497 } else {
498 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
499 }
500
501 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
502 }
503
504
505 static void
506 build_depthstencil_shader(struct nir_shader **out_vs,
507 struct nir_shader **out_fs,
508 bool unrestricted)
509 {
510 nir_builder vs_b, fs_b;
511
512 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
513 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
514
515 vs_b.shader->info.name = ralloc_strdup(vs_b.shader,
516 unrestricted ? "meta_clear_depthstencil_unrestricted_vs"
517 : "meta_clear_depthstencil_vs");
518 fs_b.shader->info.name = ralloc_strdup(fs_b.shader,
519 unrestricted ? "meta_clear_depthstencil_unrestricted_fs"
520 : "meta_clear_depthstencil_fs");
521 const struct glsl_type *position_out_type = glsl_vec4_type();
522
523 nir_variable *vs_out_pos =
524 nir_variable_create(vs_b.shader, nir_var_shader_out, position_out_type,
525 "gl_Position");
526 vs_out_pos->data.location = VARYING_SLOT_POS;
527
528 nir_ssa_def *z;
529 if (unrestricted) {
530 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
531 nir_intrinsic_set_base(in_color_load, 0);
532 nir_intrinsic_set_range(in_color_load, 4);
533 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
534 in_color_load->num_components = 1;
535 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 1, 32, "depth value");
536 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
537
538 nir_variable *fs_out_depth =
539 nir_variable_create(fs_b.shader, nir_var_shader_out,
540 glsl_int_type(), "f_depth");
541 fs_out_depth->data.location = FRAG_RESULT_DEPTH;
542 nir_store_var(&fs_b, fs_out_depth, &in_color_load->dest.ssa, 0x1);
543
544 z = nir_imm_float(&vs_b, 0.0);
545 } else {
546 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(vs_b.shader, nir_intrinsic_load_push_constant);
547 nir_intrinsic_set_base(in_color_load, 0);
548 nir_intrinsic_set_range(in_color_load, 4);
549 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&vs_b, 0));
550 in_color_load->num_components = 1;
551 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 1, 32, "depth value");
552 nir_builder_instr_insert(&vs_b, &in_color_load->instr);
553
554 z = &in_color_load->dest.ssa;
555 }
556
557 nir_ssa_def *outvec = radv_meta_gen_rect_vertices_comp2(&vs_b, z);
558 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
559
560 const struct glsl_type *layer_type = glsl_int_type();
561 nir_variable *vs_out_layer =
562 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
563 "v_layer");
564 vs_out_layer->data.location = VARYING_SLOT_LAYER;
565 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
566 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
567 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
568
569 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
570 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
571
572 *out_vs = vs_b.shader;
573 *out_fs = fs_b.shader;
574 }
575
576 static VkResult
577 create_depthstencil_renderpass(struct radv_device *device,
578 uint32_t samples,
579 VkRenderPass *render_pass)
580 {
581 mtx_lock(&device->meta_state.mtx);
582 if (*render_pass) {
583 mtx_unlock(&device->meta_state.mtx);
584 return VK_SUCCESS;
585 }
586
587 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
588 &(VkRenderPassCreateInfo) {
589 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
590 .attachmentCount = 1,
591 .pAttachments = &(VkAttachmentDescription) {
592 .format = VK_FORMAT_D32_SFLOAT_S8_UINT,
593 .samples = samples,
594 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
595 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
596 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
597 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
598 },
599 .subpassCount = 1,
600 .pSubpasses = &(VkSubpassDescription) {
601 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
602 .inputAttachmentCount = 0,
603 .colorAttachmentCount = 0,
604 .pColorAttachments = NULL,
605 .pResolveAttachments = NULL,
606 .pDepthStencilAttachment = &(VkAttachmentReference) {
607 .attachment = 0,
608 .layout = VK_IMAGE_LAYOUT_GENERAL,
609 },
610 .preserveAttachmentCount = 0,
611 .pPreserveAttachments = NULL,
612 },
613 .dependencyCount = 2,
614 .pDependencies = (VkSubpassDependency[]) {
615 {
616 .srcSubpass = VK_SUBPASS_EXTERNAL,
617 .dstSubpass = 0,
618 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
619 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
620 .srcAccessMask = 0,
621 .dstAccessMask = 0,
622 .dependencyFlags = 0
623 },
624 {
625 .srcSubpass = 0,
626 .dstSubpass = VK_SUBPASS_EXTERNAL,
627 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
628 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
629 .srcAccessMask = 0,
630 .dstAccessMask = 0,
631 .dependencyFlags = 0
632 }
633 }
634 }, &device->meta_state.alloc, render_pass);
635 mtx_unlock(&device->meta_state.mtx);
636 return result;
637 }
638
639 static VkResult
640 create_depthstencil_pipeline(struct radv_device *device,
641 VkImageAspectFlags aspects,
642 uint32_t samples,
643 int index,
644 bool unrestricted,
645 VkPipeline *pipeline,
646 VkRenderPass render_pass)
647 {
648 struct nir_shader *vs_nir, *fs_nir;
649 VkResult result;
650
651 mtx_lock(&device->meta_state.mtx);
652 if (*pipeline) {
653 mtx_unlock(&device->meta_state.mtx);
654 return VK_SUCCESS;
655 }
656
657 build_depthstencil_shader(&vs_nir, &fs_nir, unrestricted);
658
659 const VkPipelineVertexInputStateCreateInfo vi_state = {
660 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
661 .vertexBindingDescriptionCount = 0,
662 .vertexAttributeDescriptionCount = 0,
663 };
664
665 const VkPipelineDepthStencilStateCreateInfo ds_state = {
666 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
667 .depthTestEnable = !!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
668 .depthCompareOp = VK_COMPARE_OP_ALWAYS,
669 .depthWriteEnable = !!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
670 .depthBoundsTestEnable = false,
671 .stencilTestEnable = !!(aspects & VK_IMAGE_ASPECT_STENCIL_BIT),
672 .front = {
673 .passOp = VK_STENCIL_OP_REPLACE,
674 .compareOp = VK_COMPARE_OP_ALWAYS,
675 .writeMask = UINT32_MAX,
676 .reference = 0, /* dynamic */
677 },
678 .back = { 0 /* dont care */ },
679 };
680
681 const VkPipelineColorBlendStateCreateInfo cb_state = {
682 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
683 .logicOpEnable = false,
684 .attachmentCount = 0,
685 .pAttachments = NULL,
686 };
687
688 struct radv_graphics_pipeline_create_info extra = {
689 .use_rectlist = true,
690 };
691
692 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT) {
693 extra.db_depth_clear = index == DEPTH_CLEAR_SLOW ? false : true;
694 extra.db_depth_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
695 }
696 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
697 extra.db_stencil_clear = index == DEPTH_CLEAR_SLOW ? false : true;
698 extra.db_stencil_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
699 }
700 result = create_pipeline(device, radv_render_pass_from_handle(render_pass),
701 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
702 device->meta_state.clear_depth_p_layout,
703 &extra, &device->meta_state.alloc, pipeline);
704
705 mtx_unlock(&device->meta_state.mtx);
706 return result;
707 }
708
709 static bool depth_view_can_fast_clear(struct radv_cmd_buffer *cmd_buffer,
710 const struct radv_image_view *iview,
711 VkImageAspectFlags aspects,
712 VkImageLayout layout,
713 bool in_render_loop,
714 const VkClearRect *clear_rect,
715 VkClearDepthStencilValue clear_value)
716 {
717 if (!iview)
718 return false;
719
720 uint32_t queue_mask = radv_image_queue_family_mask(iview->image,
721 cmd_buffer->queue_family_index,
722 cmd_buffer->queue_family_index);
723 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
724 clear_rect->rect.extent.width != iview->extent.width ||
725 clear_rect->rect.extent.height != iview->extent.height)
726 return false;
727 if (radv_image_is_tc_compat_htile(iview->image) &&
728 (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) && clear_value.depth != 0.0 &&
729 clear_value.depth != 1.0) ||
730 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) && clear_value.stencil != 0)))
731 return false;
732 if (radv_image_has_htile(iview->image) &&
733 iview->base_mip == 0 &&
734 iview->base_layer == 0 &&
735 iview->layer_count == iview->image->info.array_size &&
736 radv_layout_is_htile_compressed(iview->image, layout, in_render_loop, queue_mask) &&
737 radv_image_extent_compare(iview->image, &iview->extent))
738 return true;
739 return false;
740 }
741
742 static VkPipeline
743 pick_depthstencil_pipeline(struct radv_cmd_buffer *cmd_buffer,
744 struct radv_meta_state *meta_state,
745 const struct radv_image_view *iview,
746 int samples_log2,
747 VkImageAspectFlags aspects,
748 VkImageLayout layout,
749 bool in_render_loop,
750 const VkClearRect *clear_rect,
751 VkClearDepthStencilValue clear_value)
752 {
753 bool fast = depth_view_can_fast_clear(cmd_buffer, iview, aspects, layout,
754 in_render_loop, clear_rect, clear_value);
755 bool unrestricted = cmd_buffer->device->enabled_extensions.EXT_depth_range_unrestricted;
756 int index = DEPTH_CLEAR_SLOW;
757 VkPipeline *pipeline;
758
759 if (fast) {
760 /* we don't know the previous clear values, so we always have
761 * the NO_EXPCLEAR path */
762 index = DEPTH_CLEAR_FAST_NO_EXPCLEAR;
763 }
764
765 switch (aspects) {
766 case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
767 pipeline = unrestricted ?
768 &meta_state->clear[samples_log2].depthstencil_unrestricted_pipeline[index] :
769 &meta_state->clear[samples_log2].depthstencil_pipeline[index];
770 break;
771 case VK_IMAGE_ASPECT_DEPTH_BIT:
772 pipeline = unrestricted ?
773 &meta_state->clear[samples_log2].depth_only_unrestricted_pipeline[index] :
774 &meta_state->clear[samples_log2].depth_only_pipeline[index];
775 break;
776 case VK_IMAGE_ASPECT_STENCIL_BIT:
777 pipeline = unrestricted ?
778 &meta_state->clear[samples_log2].stencil_only_unrestricted_pipeline[index] :
779 &meta_state->clear[samples_log2].stencil_only_pipeline[index];
780 break;
781 default:
782 unreachable("expected depth or stencil aspect");
783 }
784
785 if (cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp == VK_NULL_HANDLE) {
786 VkResult ret = create_depthstencil_renderpass(cmd_buffer->device, 1u << samples_log2,
787 &cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
788 if (ret != VK_SUCCESS) {
789 cmd_buffer->record_result = ret;
790 return VK_NULL_HANDLE;
791 }
792 }
793
794 if (*pipeline == VK_NULL_HANDLE) {
795 VkResult ret = create_depthstencil_pipeline(cmd_buffer->device, aspects, 1u << samples_log2, index, unrestricted,
796 pipeline, cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
797 if (ret != VK_SUCCESS) {
798 cmd_buffer->record_result = ret;
799 return VK_NULL_HANDLE;
800 }
801 }
802 return *pipeline;
803 }
804
805 static void
806 emit_depthstencil_clear(struct radv_cmd_buffer *cmd_buffer,
807 const VkClearAttachment *clear_att,
808 const VkClearRect *clear_rect,
809 struct radv_subpass_attachment *ds_att,
810 uint32_t view_mask)
811 {
812 struct radv_device *device = cmd_buffer->device;
813 struct radv_meta_state *meta_state = &device->meta_state;
814 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
815 const uint32_t pass_att = ds_att->attachment;
816 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
817 VkImageAspectFlags aspects = clear_att->aspectMask;
818 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
819 cmd_buffer->state.attachments[pass_att].iview : NULL;
820 uint32_t samples, samples_log2;
821 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
822
823 /* When a framebuffer is bound to the current command buffer, get the
824 * number of samples from it. Otherwise, get the number of samples from
825 * the render pass because it's likely a secondary command buffer.
826 */
827 if (iview) {
828 samples = iview->image->info.samples;
829 } else {
830 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
831 }
832
833 samples_log2 = ffs(samples) - 1;
834
835 assert(pass_att != VK_ATTACHMENT_UNUSED);
836
837 if (!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT))
838 clear_value.depth = 1.0f;
839
840 if (cmd_buffer->device->enabled_extensions.EXT_depth_range_unrestricted) {
841 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
842 device->meta_state.clear_depth_unrestricted_p_layout,
843 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4,
844 &clear_value.depth);
845 } else {
846 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
847 device->meta_state.clear_depth_p_layout,
848 VK_SHADER_STAGE_VERTEX_BIT, 0, 4,
849 &clear_value.depth);
850 }
851
852 uint32_t prev_reference = cmd_buffer->state.dynamic.stencil_reference.front;
853 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
854 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
855 clear_value.stencil);
856 }
857
858 VkPipeline pipeline = pick_depthstencil_pipeline(cmd_buffer,
859 meta_state,
860 iview,
861 samples_log2,
862 aspects,
863 ds_att->layout,
864 ds_att->in_render_loop,
865 clear_rect,
866 clear_value);
867 if (!pipeline)
868 return;
869
870 struct radv_subpass clear_subpass = {
871 .color_count = 0,
872 .color_attachments = NULL,
873 .depth_stencil_attachment = ds_att,
874 };
875
876 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
877
878 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
879 pipeline);
880
881 if (depth_view_can_fast_clear(cmd_buffer, iview, aspects,
882 ds_att->layout, ds_att->in_render_loop,
883 clear_rect, clear_value))
884 radv_update_ds_clear_metadata(cmd_buffer, iview,
885 clear_value, aspects);
886
887 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
888 .x = clear_rect->rect.offset.x,
889 .y = clear_rect->rect.offset.y,
890 .width = clear_rect->rect.extent.width,
891 .height = clear_rect->rect.extent.height,
892 .minDepth = 0.0f,
893 .maxDepth = 1.0f
894 });
895
896 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
897
898 if (view_mask) {
899 unsigned i;
900 for_each_bit(i, view_mask)
901 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
902 } else {
903 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
904 }
905
906 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
907 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
908 prev_reference);
909 }
910
911 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
912 }
913
914 static uint32_t
915 clear_htile_mask(struct radv_cmd_buffer *cmd_buffer,
916 struct radeon_winsys_bo *bo, uint64_t offset, uint64_t size,
917 uint32_t htile_value, uint32_t htile_mask)
918 {
919 struct radv_device *device = cmd_buffer->device;
920 struct radv_meta_state *state = &device->meta_state;
921 uint64_t block_count = round_up_u64(size, 1024);
922 struct radv_meta_saved_state saved_state;
923
924 radv_meta_save(&saved_state, cmd_buffer,
925 RADV_META_SAVE_COMPUTE_PIPELINE |
926 RADV_META_SAVE_CONSTANTS |
927 RADV_META_SAVE_DESCRIPTORS);
928
929 struct radv_buffer dst_buffer = {
930 .bo = bo,
931 .offset = offset,
932 .size = size
933 };
934
935 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
936 VK_PIPELINE_BIND_POINT_COMPUTE,
937 state->clear_htile_mask_pipeline);
938
939 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
940 state->clear_htile_mask_p_layout,
941 0, /* set */
942 1, /* descriptorWriteCount */
943 (VkWriteDescriptorSet[]) {
944 {
945 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
946 .dstBinding = 0,
947 .dstArrayElement = 0,
948 .descriptorCount = 1,
949 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
950 .pBufferInfo = &(VkDescriptorBufferInfo) {
951 .buffer = radv_buffer_to_handle(&dst_buffer),
952 .offset = 0,
953 .range = size
954 }
955 }
956 });
957
958 const unsigned constants[2] = {
959 htile_value & htile_mask,
960 ~htile_mask,
961 };
962
963 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
964 state->clear_htile_mask_p_layout,
965 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
966 constants);
967
968 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
969
970 radv_meta_restore(&saved_state, cmd_buffer);
971
972 return RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
973 RADV_CMD_FLAG_INV_VCACHE |
974 RADV_CMD_FLAG_WB_L2;
975 }
976
977 static uint32_t
978 radv_get_htile_fast_clear_value(const struct radv_image *image,
979 VkClearDepthStencilValue value)
980 {
981 uint32_t clear_value;
982
983 if (!image->planes[0].surface.has_stencil) {
984 clear_value = value.depth ? 0xfffffff0 : 0;
985 } else {
986 clear_value = value.depth ? 0xfffc0000 : 0;
987 }
988
989 return clear_value;
990 }
991
992 static uint32_t
993 radv_get_htile_mask(const struct radv_image *image, VkImageAspectFlags aspects)
994 {
995 uint32_t mask = 0;
996
997 if (!image->planes[0].surface.has_stencil) {
998 /* All the HTILE buffer is used when there is no stencil. */
999 mask = UINT32_MAX;
1000 } else {
1001 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT)
1002 mask |= 0xfffffc0f;
1003 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT)
1004 mask |= 0x000003f0;
1005 }
1006
1007 return mask;
1008 }
1009
1010 static bool
1011 radv_is_fast_clear_depth_allowed(VkClearDepthStencilValue value)
1012 {
1013 return value.depth == 1.0f || value.depth == 0.0f;
1014 }
1015
1016 static bool
1017 radv_is_fast_clear_stencil_allowed(VkClearDepthStencilValue value)
1018 {
1019 return value.stencil == 0;
1020 }
1021
1022 /**
1023 * Determine if the given image can be fast cleared.
1024 */
1025 static bool
1026 radv_image_can_fast_clear(struct radv_device *device, struct radv_image *image)
1027 {
1028 if (device->instance->debug_flags & RADV_DEBUG_NO_FAST_CLEARS)
1029 return false;
1030
1031 if (vk_format_is_color(image->vk_format)) {
1032 if (!radv_image_has_cmask(image) && !radv_image_has_dcc(image))
1033 return false;
1034
1035 /* RB+ doesn't work with CMASK fast clear on Stoney. */
1036 if (!radv_image_has_dcc(image) &&
1037 device->physical_device->rad_info.family == CHIP_STONEY)
1038 return false;
1039 } else {
1040 if (!radv_image_has_htile(image))
1041 return false;
1042 }
1043
1044 /* Do not fast clears 3D images. */
1045 if (image->type == VK_IMAGE_TYPE_3D)
1046 return false;
1047
1048 return true;
1049 }
1050
1051 /**
1052 * Determine if the given image view can be fast cleared.
1053 */
1054 static bool
1055 radv_image_view_can_fast_clear(struct radv_device *device,
1056 const struct radv_image_view *iview)
1057 {
1058 struct radv_image *image;
1059
1060 if (!iview)
1061 return false;
1062 image = iview->image;
1063
1064 /* Only fast clear if the image itself can be fast cleared. */
1065 if (!radv_image_can_fast_clear(device, image))
1066 return false;
1067
1068 /* Only fast clear if all layers are bound. */
1069 if (iview->base_layer > 0 ||
1070 iview->layer_count != image->info.array_size)
1071 return false;
1072
1073 /* Only fast clear if the view covers the whole image. */
1074 if (!radv_image_extent_compare(image, &iview->extent))
1075 return false;
1076
1077 return true;
1078 }
1079
1080 static bool
1081 radv_can_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
1082 const struct radv_image_view *iview,
1083 VkImageLayout image_layout,
1084 bool in_render_loop,
1085 VkImageAspectFlags aspects,
1086 const VkClearRect *clear_rect,
1087 const VkClearDepthStencilValue clear_value,
1088 uint32_t view_mask)
1089 {
1090 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1091 return false;
1092
1093 if (!radv_layout_is_htile_compressed(iview->image, image_layout, in_render_loop,
1094 radv_image_queue_family_mask(iview->image,
1095 cmd_buffer->queue_family_index,
1096 cmd_buffer->queue_family_index)))
1097 return false;
1098
1099 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1100 clear_rect->rect.extent.width != iview->image->info.width ||
1101 clear_rect->rect.extent.height != iview->image->info.height)
1102 return false;
1103
1104 if (view_mask && (iview->image->info.array_size >= 32 ||
1105 (1u << iview->image->info.array_size) - 1u != view_mask))
1106 return false;
1107 if (!view_mask && clear_rect->baseArrayLayer != 0)
1108 return false;
1109 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1110 return false;
1111
1112 if (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) &&
1113 !radv_is_fast_clear_depth_allowed(clear_value)) ||
1114 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) &&
1115 !radv_is_fast_clear_stencil_allowed(clear_value)))
1116 return false;
1117
1118 return true;
1119 }
1120
1121 static void
1122 radv_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
1123 const struct radv_image_view *iview,
1124 const VkClearAttachment *clear_att,
1125 enum radv_cmd_flush_bits *pre_flush,
1126 enum radv_cmd_flush_bits *post_flush)
1127 {
1128 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1129 VkImageAspectFlags aspects = clear_att->aspectMask;
1130 uint32_t clear_word, flush_bits;
1131
1132 clear_word = radv_get_htile_fast_clear_value(iview->image, clear_value);
1133
1134 if (pre_flush) {
1135 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_DB |
1136 RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) & ~ *pre_flush;
1137 *pre_flush |= cmd_buffer->state.flush_bits;
1138 }
1139
1140 struct VkImageSubresourceRange range = {
1141 .aspectMask = aspects,
1142 .baseMipLevel = 0,
1143 .levelCount = VK_REMAINING_MIP_LEVELS,
1144 .baseArrayLayer = 0,
1145 .layerCount = VK_REMAINING_ARRAY_LAYERS,
1146 };
1147
1148 flush_bits = radv_clear_htile(cmd_buffer, iview->image, &range, clear_word);
1149
1150 if (iview->image->planes[0].surface.has_stencil &&
1151 !(aspects == (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
1152 /* Synchronize after performing a depth-only or a stencil-only
1153 * fast clear because the driver uses an optimized path which
1154 * performs a read-modify-write operation, and the two separate
1155 * aspects might use the same HTILE memory.
1156 */
1157 cmd_buffer->state.flush_bits |= flush_bits;
1158 }
1159
1160 radv_update_ds_clear_metadata(cmd_buffer, iview, clear_value, aspects);
1161 if (post_flush) {
1162 *post_flush |= flush_bits;
1163 }
1164 }
1165
1166 static nir_shader *
1167 build_clear_htile_mask_shader()
1168 {
1169 nir_builder b;
1170
1171 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
1172 b.shader->info.name = ralloc_strdup(b.shader, "meta_clear_htile_mask");
1173 b.shader->info.cs.local_size[0] = 64;
1174 b.shader->info.cs.local_size[1] = 1;
1175 b.shader->info.cs.local_size[2] = 1;
1176
1177 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
1178 nir_ssa_def *wg_id = nir_load_work_group_id(&b, 32);
1179 nir_ssa_def *block_size = nir_imm_ivec4(&b,
1180 b.shader->info.cs.local_size[0],
1181 b.shader->info.cs.local_size[1],
1182 b.shader->info.cs.local_size[2], 0);
1183
1184 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
1185
1186 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
1187 offset = nir_channel(&b, offset, 0);
1188
1189 nir_intrinsic_instr *buf =
1190 nir_intrinsic_instr_create(b.shader,
1191 nir_intrinsic_vulkan_resource_index);
1192
1193 buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1194 buf->num_components = 1;
1195 nir_intrinsic_set_desc_set(buf, 0);
1196 nir_intrinsic_set_binding(buf, 0);
1197 nir_ssa_dest_init(&buf->instr, &buf->dest, buf->num_components, 32, NULL);
1198 nir_builder_instr_insert(&b, &buf->instr);
1199
1200 nir_intrinsic_instr *constants =
1201 nir_intrinsic_instr_create(b.shader,
1202 nir_intrinsic_load_push_constant);
1203 nir_intrinsic_set_base(constants, 0);
1204 nir_intrinsic_set_range(constants, 8);
1205 constants->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1206 constants->num_components = 2;
1207 nir_ssa_dest_init(&constants->instr, &constants->dest, 2, 32, "constants");
1208 nir_builder_instr_insert(&b, &constants->instr);
1209
1210 nir_intrinsic_instr *load =
1211 nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_ssbo);
1212 load->src[0] = nir_src_for_ssa(&buf->dest.ssa);
1213 load->src[1] = nir_src_for_ssa(offset);
1214 nir_ssa_dest_init(&load->instr, &load->dest, 4, 32, NULL);
1215 load->num_components = 4;
1216 nir_intrinsic_set_align(load, 16, 0);
1217 nir_builder_instr_insert(&b, &load->instr);
1218
1219 /* data = (data & ~htile_mask) | (htile_value & htile_mask) */
1220 nir_ssa_def *data =
1221 nir_iand(&b, &load->dest.ssa,
1222 nir_channel(&b, &constants->dest.ssa, 1));
1223 data = nir_ior(&b, data, nir_channel(&b, &constants->dest.ssa, 0));
1224
1225 nir_intrinsic_instr *store =
1226 nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
1227 store->src[0] = nir_src_for_ssa(data);
1228 store->src[1] = nir_src_for_ssa(&buf->dest.ssa);
1229 store->src[2] = nir_src_for_ssa(offset);
1230 nir_intrinsic_set_write_mask(store, 0xf);
1231 nir_intrinsic_set_access(store, ACCESS_NON_READABLE);
1232 nir_intrinsic_set_align(store, 16, 0);
1233 store->num_components = 4;
1234 nir_builder_instr_insert(&b, &store->instr);
1235
1236 return b.shader;
1237 }
1238
1239 static VkResult
1240 init_meta_clear_htile_mask_state(struct radv_device *device)
1241 {
1242 struct radv_meta_state *state = &device->meta_state;
1243 struct radv_shader_module cs = { .nir = NULL };
1244 VkResult result;
1245
1246 cs.nir = build_clear_htile_mask_shader();
1247
1248 VkDescriptorSetLayoutCreateInfo ds_layout_info = {
1249 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1250 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
1251 .bindingCount = 1,
1252 .pBindings = (VkDescriptorSetLayoutBinding[]) {
1253 {
1254 .binding = 0,
1255 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
1256 .descriptorCount = 1,
1257 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
1258 .pImmutableSamplers = NULL
1259 },
1260 }
1261 };
1262
1263 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
1264 &ds_layout_info, &state->alloc,
1265 &state->clear_htile_mask_ds_layout);
1266 if (result != VK_SUCCESS)
1267 goto fail;
1268
1269 VkPipelineLayoutCreateInfo p_layout_info = {
1270 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1271 .setLayoutCount = 1,
1272 .pSetLayouts = &state->clear_htile_mask_ds_layout,
1273 .pushConstantRangeCount = 1,
1274 .pPushConstantRanges = &(VkPushConstantRange){
1275 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
1276 },
1277 };
1278
1279 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
1280 &p_layout_info, &state->alloc,
1281 &state->clear_htile_mask_p_layout);
1282 if (result != VK_SUCCESS)
1283 goto fail;
1284
1285 VkPipelineShaderStageCreateInfo shader_stage = {
1286 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
1287 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
1288 .module = radv_shader_module_to_handle(&cs),
1289 .pName = "main",
1290 .pSpecializationInfo = NULL,
1291 };
1292
1293 VkComputePipelineCreateInfo pipeline_info = {
1294 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
1295 .stage = shader_stage,
1296 .flags = 0,
1297 .layout = state->clear_htile_mask_p_layout,
1298 };
1299
1300 result = radv_CreateComputePipelines(radv_device_to_handle(device),
1301 radv_pipeline_cache_to_handle(&state->cache),
1302 1, &pipeline_info, NULL,
1303 &state->clear_htile_mask_pipeline);
1304
1305 ralloc_free(cs.nir);
1306 return result;
1307 fail:
1308 ralloc_free(cs.nir);
1309 return result;
1310 }
1311
1312 VkResult
1313 radv_device_init_meta_clear_state(struct radv_device *device, bool on_demand)
1314 {
1315 VkResult res;
1316 struct radv_meta_state *state = &device->meta_state;
1317
1318 VkPipelineLayoutCreateInfo pl_color_create_info = {
1319 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1320 .setLayoutCount = 0,
1321 .pushConstantRangeCount = 1,
1322 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16},
1323 };
1324
1325 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1326 &pl_color_create_info,
1327 &device->meta_state.alloc,
1328 &device->meta_state.clear_color_p_layout);
1329 if (res != VK_SUCCESS)
1330 goto fail;
1331
1332 VkPipelineLayoutCreateInfo pl_depth_create_info = {
1333 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1334 .setLayoutCount = 0,
1335 .pushConstantRangeCount = 1,
1336 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
1337 };
1338
1339 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1340 &pl_depth_create_info,
1341 &device->meta_state.alloc,
1342 &device->meta_state.clear_depth_p_layout);
1343 if (res != VK_SUCCESS)
1344 goto fail;
1345
1346 VkPipelineLayoutCreateInfo pl_depth_unrestricted_create_info = {
1347 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1348 .setLayoutCount = 0,
1349 .pushConstantRangeCount = 1,
1350 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
1351 };
1352
1353 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1354 &pl_depth_unrestricted_create_info,
1355 &device->meta_state.alloc,
1356 &device->meta_state.clear_depth_unrestricted_p_layout);
1357 if (res != VK_SUCCESS)
1358 goto fail;
1359
1360 res = init_meta_clear_htile_mask_state(device);
1361 if (res != VK_SUCCESS)
1362 goto fail;
1363
1364 if (on_demand)
1365 return VK_SUCCESS;
1366
1367 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
1368 uint32_t samples = 1 << i;
1369 for (uint32_t j = 0; j < NUM_META_FS_KEYS; ++j) {
1370 VkFormat format = radv_fs_key_format_exemplars[j];
1371 unsigned fs_key = radv_format_meta_fs_key(format);
1372 assert(!state->clear[i].color_pipelines[fs_key]);
1373
1374 res = create_color_renderpass(device, format, samples,
1375 &state->clear[i].render_pass[fs_key]);
1376 if (res != VK_SUCCESS)
1377 goto fail;
1378
1379 res = create_color_pipeline(device, samples, 0, &state->clear[i].color_pipelines[fs_key],
1380 state->clear[i].render_pass[fs_key]);
1381 if (res != VK_SUCCESS)
1382 goto fail;
1383
1384 }
1385
1386 res = create_depthstencil_renderpass(device,
1387 samples,
1388 &state->clear[i].depthstencil_rp);
1389 if (res != VK_SUCCESS)
1390 goto fail;
1391
1392 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
1393 res = create_depthstencil_pipeline(device,
1394 VK_IMAGE_ASPECT_DEPTH_BIT,
1395 samples,
1396 j,
1397 false,
1398 &state->clear[i].depth_only_pipeline[j],
1399 state->clear[i].depthstencil_rp);
1400 if (res != VK_SUCCESS)
1401 goto fail;
1402
1403 res = create_depthstencil_pipeline(device,
1404 VK_IMAGE_ASPECT_STENCIL_BIT,
1405 samples,
1406 j,
1407 false,
1408 &state->clear[i].stencil_only_pipeline[j],
1409 state->clear[i].depthstencil_rp);
1410 if (res != VK_SUCCESS)
1411 goto fail;
1412
1413 res = create_depthstencil_pipeline(device,
1414 VK_IMAGE_ASPECT_DEPTH_BIT |
1415 VK_IMAGE_ASPECT_STENCIL_BIT,
1416 samples,
1417 j,
1418 false,
1419 &state->clear[i].depthstencil_pipeline[j],
1420 state->clear[i].depthstencil_rp);
1421 if (res != VK_SUCCESS)
1422 goto fail;
1423
1424 res = create_depthstencil_pipeline(device,
1425 VK_IMAGE_ASPECT_DEPTH_BIT,
1426 samples,
1427 j,
1428 true,
1429 &state->clear[i].depth_only_unrestricted_pipeline[j],
1430 state->clear[i].depthstencil_rp);
1431 if (res != VK_SUCCESS)
1432 goto fail;
1433
1434 res = create_depthstencil_pipeline(device,
1435 VK_IMAGE_ASPECT_STENCIL_BIT,
1436 samples,
1437 j,
1438 true,
1439 &state->clear[i].stencil_only_unrestricted_pipeline[j],
1440 state->clear[i].depthstencil_rp);
1441 if (res != VK_SUCCESS)
1442 goto fail;
1443
1444 res = create_depthstencil_pipeline(device,
1445 VK_IMAGE_ASPECT_DEPTH_BIT |
1446 VK_IMAGE_ASPECT_STENCIL_BIT,
1447 samples,
1448 j,
1449 true,
1450 &state->clear[i].depthstencil_unrestricted_pipeline[j],
1451 state->clear[i].depthstencil_rp);
1452 if (res != VK_SUCCESS)
1453 goto fail;
1454 }
1455 }
1456 return VK_SUCCESS;
1457
1458 fail:
1459 radv_device_finish_meta_clear_state(device);
1460 return res;
1461 }
1462
1463 static uint32_t
1464 radv_get_cmask_fast_clear_value(const struct radv_image *image)
1465 {
1466 uint32_t value = 0; /* Default value when no DCC. */
1467
1468 /* The fast-clear value is different for images that have both DCC and
1469 * CMASK metadata.
1470 */
1471 if (radv_image_has_dcc(image)) {
1472 /* DCC fast clear with MSAA should clear CMASK to 0xC. */
1473 return image->info.samples > 1 ? 0xcccccccc : 0xffffffff;
1474 }
1475
1476 return value;
1477 }
1478
1479 uint32_t
1480 radv_clear_cmask(struct radv_cmd_buffer *cmd_buffer,
1481 struct radv_image *image,
1482 const VkImageSubresourceRange *range, uint32_t value)
1483 {
1484 uint64_t offset = image->offset + image->planes[0].surface.cmask_offset;
1485 uint64_t size;
1486
1487 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1488 /* TODO: clear layers. */
1489 size = image->planes[0].surface.cmask_size;
1490 } else {
1491 unsigned cmask_slice_size =
1492 image->planes[0].surface.cmask_slice_size;
1493
1494 offset += cmask_slice_size * range->baseArrayLayer;
1495 size = cmask_slice_size * radv_get_layerCount(image, range);
1496 }
1497
1498 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1499 }
1500
1501
1502 uint32_t
1503 radv_clear_fmask(struct radv_cmd_buffer *cmd_buffer,
1504 struct radv_image *image,
1505 const VkImageSubresourceRange *range, uint32_t value)
1506 {
1507 uint64_t offset = image->offset + image->planes[0].surface.fmask_offset;
1508 uint64_t size;
1509
1510 /* MSAA images do not support mipmap levels. */
1511 assert(range->baseMipLevel == 0 &&
1512 radv_get_levelCount(image, range) == 1);
1513
1514 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1515 /* TODO: clear layers. */
1516 size = image->planes[0].surface.fmask_size;
1517 } else {
1518 unsigned fmask_slice_size =
1519 image->planes[0].surface.u.legacy.fmask.slice_size;
1520
1521
1522 offset += fmask_slice_size * range->baseArrayLayer;
1523 size = fmask_slice_size * radv_get_layerCount(image, range);
1524 }
1525
1526 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1527 }
1528
1529 uint32_t
1530 radv_clear_dcc(struct radv_cmd_buffer *cmd_buffer,
1531 struct radv_image *image,
1532 const VkImageSubresourceRange *range, uint32_t value)
1533 {
1534 uint32_t level_count = radv_get_levelCount(image, range);
1535 uint32_t flush_bits = 0;
1536
1537 /* Mark the image as being compressed. */
1538 radv_update_dcc_metadata(cmd_buffer, image, range, true);
1539
1540 for (uint32_t l = 0; l < level_count; l++) {
1541 uint64_t offset = image->offset + image->planes[0].surface.dcc_offset;
1542 uint32_t level = range->baseMipLevel + l;
1543 uint64_t size;
1544
1545 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1546 /* Mipmap levels aren't implemented. */
1547 assert(level == 0);
1548 size = image->planes[0].surface.dcc_size;
1549 } else {
1550 const struct legacy_surf_level *surf_level =
1551 &image->planes[0].surface.u.legacy.level[level];
1552
1553 /* If dcc_fast_clear_size is 0 (which might happens for
1554 * mipmaps) the fill buffer operation below is a no-op.
1555 * This can only happen during initialization as the
1556 * fast clear path fallbacks to slow clears if one
1557 * level can't be fast cleared.
1558 */
1559 offset += surf_level->dcc_offset +
1560 surf_level->dcc_slice_fast_clear_size * range->baseArrayLayer;
1561 size = surf_level->dcc_slice_fast_clear_size * radv_get_layerCount(image, range);
1562 }
1563
1564 flush_bits |= radv_fill_buffer(cmd_buffer, image->bo, offset,
1565 size, value);
1566 }
1567
1568 return flush_bits;
1569 }
1570
1571 uint32_t
1572 radv_clear_htile(struct radv_cmd_buffer *cmd_buffer,
1573 const struct radv_image *image,
1574 const VkImageSubresourceRange *range,
1575 uint32_t value)
1576 {
1577 unsigned layer_count = radv_get_layerCount(image, range);
1578 uint64_t size = image->planes[0].surface.htile_slice_size * layer_count;
1579 uint64_t offset = image->offset + image->planes[0].surface.htile_offset +
1580 image->planes[0].surface.htile_slice_size * range->baseArrayLayer;
1581 uint32_t htile_mask, flush_bits;
1582
1583 htile_mask = radv_get_htile_mask(image, range->aspectMask);
1584
1585 if (htile_mask == UINT_MAX) {
1586 /* Clear the whole HTILE buffer. */
1587 flush_bits = radv_fill_buffer(cmd_buffer, image->bo, offset,
1588 size, value);
1589 } else {
1590 /* Only clear depth or stencil bytes in the HTILE buffer. */
1591 flush_bits = clear_htile_mask(cmd_buffer, image->bo, offset,
1592 size, value, htile_mask);
1593 }
1594
1595 return flush_bits;
1596 }
1597
1598 enum {
1599 RADV_DCC_CLEAR_REG = 0x20202020U,
1600 RADV_DCC_CLEAR_MAIN_1 = 0x80808080U,
1601 RADV_DCC_CLEAR_SECONDARY_1 = 0x40404040U
1602 };
1603
1604 static void vi_get_fast_clear_parameters(struct radv_device *device,
1605 VkFormat image_format,
1606 VkFormat view_format,
1607 const VkClearColorValue *clear_value,
1608 uint32_t* reset_value,
1609 bool *can_avoid_fast_clear_elim)
1610 {
1611 bool values[4] = {};
1612 int extra_channel;
1613 bool main_value = false;
1614 bool extra_value = false;
1615 bool has_color = false;
1616 bool has_alpha = false;
1617 int i;
1618 *can_avoid_fast_clear_elim = false;
1619
1620 *reset_value = RADV_DCC_CLEAR_REG;
1621
1622 const struct vk_format_description *desc = vk_format_description(view_format);
1623 if (view_format == VK_FORMAT_B10G11R11_UFLOAT_PACK32 ||
1624 view_format == VK_FORMAT_R5G6B5_UNORM_PACK16 ||
1625 view_format == VK_FORMAT_B5G6R5_UNORM_PACK16)
1626 extra_channel = -1;
1627 else if (desc->layout == VK_FORMAT_LAYOUT_PLAIN) {
1628 if (vi_alpha_is_on_msb(device, view_format))
1629 extra_channel = desc->nr_channels - 1;
1630 else
1631 extra_channel = 0;
1632 } else
1633 return;
1634
1635 for (i = 0; i < 4; i++) {
1636 int index = desc->swizzle[i] - VK_SWIZZLE_X;
1637 if (desc->swizzle[i] < VK_SWIZZLE_X ||
1638 desc->swizzle[i] > VK_SWIZZLE_W)
1639 continue;
1640
1641 if (desc->channel[i].pure_integer &&
1642 desc->channel[i].type == VK_FORMAT_TYPE_SIGNED) {
1643 /* Use the maximum value for clamping the clear color. */
1644 int max = u_bit_consecutive(0, desc->channel[i].size - 1);
1645
1646 values[i] = clear_value->int32[i] != 0;
1647 if (clear_value->int32[i] != 0 && MIN2(clear_value->int32[i], max) != max)
1648 return;
1649 } else if (desc->channel[i].pure_integer &&
1650 desc->channel[i].type == VK_FORMAT_TYPE_UNSIGNED) {
1651 /* Use the maximum value for clamping the clear color. */
1652 unsigned max = u_bit_consecutive(0, desc->channel[i].size);
1653
1654 values[i] = clear_value->uint32[i] != 0U;
1655 if (clear_value->uint32[i] != 0U && MIN2(clear_value->uint32[i], max) != max)
1656 return;
1657 } else {
1658 values[i] = clear_value->float32[i] != 0.0F;
1659 if (clear_value->float32[i] != 0.0F && clear_value->float32[i] != 1.0F)
1660 return;
1661 }
1662
1663 if (index == extra_channel) {
1664 extra_value = values[i];
1665 has_alpha = true;
1666 } else {
1667 main_value = values[i];
1668 has_color = true;
1669 }
1670 }
1671
1672 /* If alpha isn't present, make it the same as color, and vice versa. */
1673 if (!has_alpha)
1674 extra_value = main_value;
1675 else if (!has_color)
1676 main_value = extra_value;
1677
1678 for (int i = 0; i < 4; ++i)
1679 if (values[i] != main_value &&
1680 desc->swizzle[i] - VK_SWIZZLE_X != extra_channel &&
1681 desc->swizzle[i] >= VK_SWIZZLE_X &&
1682 desc->swizzle[i] <= VK_SWIZZLE_W)
1683 return;
1684
1685 *can_avoid_fast_clear_elim = true;
1686 *reset_value = 0;
1687 if (main_value)
1688 *reset_value |= RADV_DCC_CLEAR_MAIN_1;
1689
1690 if (extra_value)
1691 *reset_value |= RADV_DCC_CLEAR_SECONDARY_1;
1692 return;
1693 }
1694
1695 static bool
1696 radv_can_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1697 const struct radv_image_view *iview,
1698 VkImageLayout image_layout,
1699 bool in_render_loop,
1700 const VkClearRect *clear_rect,
1701 VkClearColorValue clear_value,
1702 uint32_t view_mask)
1703 {
1704 uint32_t clear_color[2];
1705
1706 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1707 return false;
1708
1709 if (!radv_layout_can_fast_clear(iview->image, image_layout, in_render_loop,
1710 radv_image_queue_family_mask(iview->image,
1711 cmd_buffer->queue_family_index,
1712 cmd_buffer->queue_family_index)))
1713 return false;
1714
1715 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1716 clear_rect->rect.extent.width != iview->image->info.width ||
1717 clear_rect->rect.extent.height != iview->image->info.height)
1718 return false;
1719
1720 if (view_mask && (iview->image->info.array_size >= 32 ||
1721 (1u << iview->image->info.array_size) - 1u != view_mask))
1722 return false;
1723 if (!view_mask && clear_rect->baseArrayLayer != 0)
1724 return false;
1725 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1726 return false;
1727
1728 /* DCC */
1729 if (!radv_format_pack_clear_color(iview->vk_format,
1730 clear_color, &clear_value))
1731 return false;
1732
1733 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1734 bool can_avoid_fast_clear_elim;
1735 uint32_t reset_value;
1736
1737 vi_get_fast_clear_parameters(cmd_buffer->device,
1738 iview->image->vk_format,
1739 iview->vk_format,
1740 &clear_value, &reset_value,
1741 &can_avoid_fast_clear_elim);
1742
1743 if (iview->image->info.samples > 1) {
1744 /* DCC fast clear with MSAA should clear CMASK. */
1745 /* FIXME: This doesn't work for now. There is a
1746 * hardware bug with fast clears and DCC for MSAA
1747 * textures. AMDVLK has a workaround but it doesn't
1748 * seem to work here. Note that we might emit useless
1749 * CB flushes but that shouldn't matter.
1750 */
1751 if (!can_avoid_fast_clear_elim)
1752 return false;
1753 }
1754
1755 if (iview->image->info.levels > 1 &&
1756 cmd_buffer->device->physical_device->rad_info.chip_class == GFX8) {
1757 for (uint32_t l = 0; l < iview->level_count; l++) {
1758 uint32_t level = iview->base_mip + l;
1759 struct legacy_surf_level *surf_level =
1760 &iview->image->planes[0].surface.u.legacy.level[level];
1761
1762 /* Do not fast clears if one level can't be
1763 * fast cleared.
1764 */
1765 if (!surf_level->dcc_fast_clear_size)
1766 return false;
1767 }
1768 }
1769 }
1770
1771 return true;
1772 }
1773
1774
1775 static void
1776 radv_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1777 const struct radv_image_view *iview,
1778 const VkClearAttachment *clear_att,
1779 uint32_t subpass_att,
1780 enum radv_cmd_flush_bits *pre_flush,
1781 enum radv_cmd_flush_bits *post_flush)
1782 {
1783 VkClearColorValue clear_value = clear_att->clearValue.color;
1784 uint32_t clear_color[2], flush_bits = 0;
1785 uint32_t cmask_clear_value;
1786 VkImageSubresourceRange range = {
1787 .aspectMask = iview->aspect_mask,
1788 .baseMipLevel = iview->base_mip,
1789 .levelCount = iview->level_count,
1790 .baseArrayLayer = iview->base_layer,
1791 .layerCount = iview->layer_count,
1792 };
1793
1794 if (pre_flush) {
1795 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_CB |
1796 RADV_CMD_FLAG_FLUSH_AND_INV_CB_META) & ~ *pre_flush;
1797 *pre_flush |= cmd_buffer->state.flush_bits;
1798 }
1799
1800 /* DCC */
1801 radv_format_pack_clear_color(iview->vk_format, clear_color, &clear_value);
1802
1803 cmask_clear_value = radv_get_cmask_fast_clear_value(iview->image);
1804
1805 /* clear cmask buffer */
1806 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1807 uint32_t reset_value;
1808 bool can_avoid_fast_clear_elim;
1809 bool need_decompress_pass = false;
1810
1811 vi_get_fast_clear_parameters(cmd_buffer->device,
1812 iview->image->vk_format,
1813 iview->vk_format,
1814 &clear_value, &reset_value,
1815 &can_avoid_fast_clear_elim);
1816
1817 if (radv_image_has_cmask(iview->image)) {
1818 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1819 &range, cmask_clear_value);
1820
1821 need_decompress_pass = true;
1822 }
1823
1824 if (!can_avoid_fast_clear_elim)
1825 need_decompress_pass = true;
1826
1827 flush_bits |= radv_clear_dcc(cmd_buffer, iview->image, &range,
1828 reset_value);
1829
1830 radv_update_fce_metadata(cmd_buffer, iview->image, &range,
1831 need_decompress_pass);
1832 } else {
1833 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1834 &range, cmask_clear_value);
1835 }
1836
1837 if (post_flush) {
1838 *post_flush |= flush_bits;
1839 }
1840
1841 radv_update_color_clear_metadata(cmd_buffer, iview, subpass_att,
1842 clear_color);
1843 }
1844
1845 /**
1846 * The parameters mean that same as those in vkCmdClearAttachments.
1847 */
1848 static void
1849 emit_clear(struct radv_cmd_buffer *cmd_buffer,
1850 const VkClearAttachment *clear_att,
1851 const VkClearRect *clear_rect,
1852 enum radv_cmd_flush_bits *pre_flush,
1853 enum radv_cmd_flush_bits *post_flush,
1854 uint32_t view_mask,
1855 bool ds_resolve_clear)
1856 {
1857 const struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
1858 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
1859 VkImageAspectFlags aspects = clear_att->aspectMask;
1860
1861 if (aspects & VK_IMAGE_ASPECT_COLOR_BIT) {
1862 const uint32_t subpass_att = clear_att->colorAttachment;
1863 assert(subpass_att < subpass->color_count);
1864 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
1865 if (pass_att == VK_ATTACHMENT_UNUSED)
1866 return;
1867
1868 VkImageLayout image_layout = subpass->color_attachments[subpass_att].layout;
1869 bool in_render_loop = subpass->color_attachments[subpass_att].in_render_loop;
1870 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[pass_att].iview : NULL;
1871 VkClearColorValue clear_value = clear_att->clearValue.color;
1872
1873 if (radv_can_fast_clear_color(cmd_buffer, iview, image_layout, in_render_loop,
1874 clear_rect, clear_value, view_mask)) {
1875 radv_fast_clear_color(cmd_buffer, iview, clear_att,
1876 subpass_att, pre_flush,
1877 post_flush);
1878 } else {
1879 emit_color_clear(cmd_buffer, clear_att, clear_rect, view_mask);
1880 }
1881 } else {
1882 struct radv_subpass_attachment *ds_att = subpass->depth_stencil_attachment;
1883
1884 if (ds_resolve_clear)
1885 ds_att = subpass->ds_resolve_attachment;
1886
1887 if (!ds_att || ds_att->attachment == VK_ATTACHMENT_UNUSED)
1888 return;
1889
1890 VkImageLayout image_layout = ds_att->layout;
1891 bool in_render_loop = ds_att->in_render_loop;
1892 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[ds_att->attachment].iview : NULL;
1893 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1894
1895 assert(aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
1896 VK_IMAGE_ASPECT_STENCIL_BIT));
1897
1898 if (radv_can_fast_clear_depth(cmd_buffer, iview, image_layout,
1899 in_render_loop, aspects, clear_rect,
1900 clear_value, view_mask)) {
1901 radv_fast_clear_depth(cmd_buffer, iview, clear_att,
1902 pre_flush, post_flush);
1903 } else {
1904 emit_depthstencil_clear(cmd_buffer, clear_att, clear_rect,
1905 ds_att, view_mask);
1906 }
1907 }
1908 }
1909
1910 static inline bool
1911 radv_attachment_needs_clear(struct radv_cmd_state *cmd_state, uint32_t a)
1912 {
1913 uint32_t view_mask = cmd_state->subpass->view_mask;
1914 return (a != VK_ATTACHMENT_UNUSED &&
1915 cmd_state->attachments[a].pending_clear_aspects &&
1916 (!view_mask || (view_mask & ~cmd_state->attachments[a].cleared_views)));
1917 }
1918
1919 static bool
1920 radv_subpass_needs_clear(struct radv_cmd_buffer *cmd_buffer)
1921 {
1922 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1923 uint32_t a;
1924
1925 if (!cmd_state->subpass)
1926 return false;
1927
1928 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1929 a = cmd_state->subpass->color_attachments[i].attachment;
1930 if (radv_attachment_needs_clear(cmd_state, a))
1931 return true;
1932 }
1933
1934 if (cmd_state->subpass->depth_stencil_attachment) {
1935 a = cmd_state->subpass->depth_stencil_attachment->attachment;
1936 if (radv_attachment_needs_clear(cmd_state, a))
1937 return true;
1938 }
1939
1940 if (!cmd_state->subpass->ds_resolve_attachment)
1941 return false;
1942
1943 a = cmd_state->subpass->ds_resolve_attachment->attachment;
1944 return radv_attachment_needs_clear(cmd_state, a);
1945 }
1946
1947 static void
1948 radv_subpass_clear_attachment(struct radv_cmd_buffer *cmd_buffer,
1949 struct radv_attachment_state *attachment,
1950 const VkClearAttachment *clear_att,
1951 enum radv_cmd_flush_bits *pre_flush,
1952 enum radv_cmd_flush_bits *post_flush,
1953 bool ds_resolve_clear)
1954 {
1955 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1956 uint32_t view_mask = cmd_state->subpass->view_mask;
1957
1958 VkClearRect clear_rect = {
1959 .rect = cmd_state->render_area,
1960 .baseArrayLayer = 0,
1961 .layerCount = cmd_state->framebuffer->layers,
1962 };
1963
1964 radv_describe_begin_render_pass_clear(cmd_buffer, clear_att->aspectMask);
1965
1966 emit_clear(cmd_buffer, clear_att, &clear_rect, pre_flush, post_flush,
1967 view_mask & ~attachment->cleared_views, ds_resolve_clear);
1968 if (view_mask)
1969 attachment->cleared_views |= view_mask;
1970 else
1971 attachment->pending_clear_aspects = 0;
1972
1973 radv_describe_end_render_pass_clear(cmd_buffer);
1974 }
1975
1976 /**
1977 * Emit any pending attachment clears for the current subpass.
1978 *
1979 * @see radv_attachment_state::pending_clear_aspects
1980 */
1981 void
1982 radv_cmd_buffer_clear_subpass(struct radv_cmd_buffer *cmd_buffer)
1983 {
1984 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1985 struct radv_meta_saved_state saved_state;
1986 enum radv_cmd_flush_bits pre_flush = 0;
1987 enum radv_cmd_flush_bits post_flush = 0;
1988
1989 if (!radv_subpass_needs_clear(cmd_buffer))
1990 return;
1991
1992 radv_meta_save(&saved_state, cmd_buffer,
1993 RADV_META_SAVE_GRAPHICS_PIPELINE |
1994 RADV_META_SAVE_CONSTANTS);
1995
1996 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1997 uint32_t a = cmd_state->subpass->color_attachments[i].attachment;
1998
1999 if (!radv_attachment_needs_clear(cmd_state, a))
2000 continue;
2001
2002 assert(cmd_state->attachments[a].pending_clear_aspects ==
2003 VK_IMAGE_ASPECT_COLOR_BIT);
2004
2005 VkClearAttachment clear_att = {
2006 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2007 .colorAttachment = i, /* Use attachment index relative to subpass */
2008 .clearValue = cmd_state->attachments[a].clear_value,
2009 };
2010
2011 radv_subpass_clear_attachment(cmd_buffer,
2012 &cmd_state->attachments[a],
2013 &clear_att, &pre_flush,
2014 &post_flush, false);
2015 }
2016
2017 if (cmd_state->subpass->depth_stencil_attachment) {
2018 uint32_t ds = cmd_state->subpass->depth_stencil_attachment->attachment;
2019 if (radv_attachment_needs_clear(cmd_state, ds)) {
2020 VkClearAttachment clear_att = {
2021 .aspectMask = cmd_state->attachments[ds].pending_clear_aspects,
2022 .clearValue = cmd_state->attachments[ds].clear_value,
2023 };
2024
2025 radv_subpass_clear_attachment(cmd_buffer,
2026 &cmd_state->attachments[ds],
2027 &clear_att, &pre_flush,
2028 &post_flush, false);
2029 }
2030 }
2031
2032 if (cmd_state->subpass->ds_resolve_attachment) {
2033 uint32_t ds_resolve = cmd_state->subpass->ds_resolve_attachment->attachment;
2034 if (radv_attachment_needs_clear(cmd_state, ds_resolve)) {
2035 VkClearAttachment clear_att = {
2036 .aspectMask = cmd_state->attachments[ds_resolve].pending_clear_aspects,
2037 .clearValue = cmd_state->attachments[ds_resolve].clear_value,
2038 };
2039
2040 radv_subpass_clear_attachment(cmd_buffer,
2041 &cmd_state->attachments[ds_resolve],
2042 &clear_att, &pre_flush,
2043 &post_flush, true);
2044 }
2045 }
2046
2047 radv_meta_restore(&saved_state, cmd_buffer);
2048 cmd_buffer->state.flush_bits |= post_flush;
2049 }
2050
2051 static void
2052 radv_clear_image_layer(struct radv_cmd_buffer *cmd_buffer,
2053 struct radv_image *image,
2054 VkImageLayout image_layout,
2055 const VkImageSubresourceRange *range,
2056 VkFormat format, int level, int layer,
2057 const VkClearValue *clear_val)
2058 {
2059 VkDevice device_h = radv_device_to_handle(cmd_buffer->device);
2060 struct radv_image_view iview;
2061 uint32_t width = radv_minify(image->info.width, range->baseMipLevel + level);
2062 uint32_t height = radv_minify(image->info.height, range->baseMipLevel + level);
2063
2064 radv_image_view_init(&iview, cmd_buffer->device,
2065 &(VkImageViewCreateInfo) {
2066 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
2067 .image = radv_image_to_handle(image),
2068 .viewType = radv_meta_get_view_type(image),
2069 .format = format,
2070 .subresourceRange = {
2071 .aspectMask = range->aspectMask,
2072 .baseMipLevel = range->baseMipLevel + level,
2073 .levelCount = 1,
2074 .baseArrayLayer = range->baseArrayLayer + layer,
2075 .layerCount = 1
2076 },
2077 }, NULL);
2078
2079 VkFramebuffer fb;
2080 radv_CreateFramebuffer(device_h,
2081 &(VkFramebufferCreateInfo) {
2082 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
2083 .attachmentCount = 1,
2084 .pAttachments = (VkImageView[]) {
2085 radv_image_view_to_handle(&iview),
2086 },
2087 .width = width,
2088 .height = height,
2089 .layers = 1
2090 },
2091 &cmd_buffer->pool->alloc,
2092 &fb);
2093
2094 VkAttachmentDescription att_desc = {
2095 .format = iview.vk_format,
2096 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
2097 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
2098 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
2099 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE,
2100 .initialLayout = image_layout,
2101 .finalLayout = image_layout,
2102 };
2103
2104 VkSubpassDescription subpass_desc = {
2105 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
2106 .inputAttachmentCount = 0,
2107 .colorAttachmentCount = 0,
2108 .pColorAttachments = NULL,
2109 .pResolveAttachments = NULL,
2110 .pDepthStencilAttachment = NULL,
2111 .preserveAttachmentCount = 0,
2112 .pPreserveAttachments = NULL,
2113 };
2114
2115 const VkAttachmentReference att_ref = {
2116 .attachment = 0,
2117 .layout = image_layout,
2118 };
2119
2120 if (range->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
2121 subpass_desc.colorAttachmentCount = 1;
2122 subpass_desc.pColorAttachments = &att_ref;
2123 } else {
2124 subpass_desc.pDepthStencilAttachment = &att_ref;
2125 }
2126
2127 VkRenderPass pass;
2128 radv_CreateRenderPass(device_h,
2129 &(VkRenderPassCreateInfo) {
2130 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2131 .attachmentCount = 1,
2132 .pAttachments = &att_desc,
2133 .subpassCount = 1,
2134 .pSubpasses = &subpass_desc,
2135 .dependencyCount = 2,
2136 .pDependencies = (VkSubpassDependency[]) {
2137 {
2138 .srcSubpass = VK_SUBPASS_EXTERNAL,
2139 .dstSubpass = 0,
2140 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2141 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
2142 .srcAccessMask = 0,
2143 .dstAccessMask = 0,
2144 .dependencyFlags = 0
2145 },
2146 {
2147 .srcSubpass = 0,
2148 .dstSubpass = VK_SUBPASS_EXTERNAL,
2149 .srcStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2150 .dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
2151 .srcAccessMask = 0,
2152 .dstAccessMask = 0,
2153 .dependencyFlags = 0
2154 }
2155 }
2156 },
2157 &cmd_buffer->pool->alloc,
2158 &pass);
2159
2160 radv_cmd_buffer_begin_render_pass(cmd_buffer,
2161 &(VkRenderPassBeginInfo) {
2162 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
2163 .renderArea = {
2164 .offset = { 0, 0, },
2165 .extent = {
2166 .width = width,
2167 .height = height,
2168 },
2169 },
2170 .renderPass = pass,
2171 .framebuffer = fb,
2172 .clearValueCount = 0,
2173 .pClearValues = NULL,
2174 });
2175
2176 radv_cmd_buffer_set_subpass(cmd_buffer,
2177 &cmd_buffer->state.pass->subpasses[0]);
2178
2179 VkClearAttachment clear_att = {
2180 .aspectMask = range->aspectMask,
2181 .colorAttachment = 0,
2182 .clearValue = *clear_val,
2183 };
2184
2185 VkClearRect clear_rect = {
2186 .rect = {
2187 .offset = { 0, 0 },
2188 .extent = { width, height },
2189 },
2190 .baseArrayLayer = range->baseArrayLayer,
2191 .layerCount = 1, /* FINISHME: clear multi-layer framebuffer */
2192 };
2193
2194 emit_clear(cmd_buffer, &clear_att, &clear_rect, NULL, NULL, 0, false);
2195
2196 radv_cmd_buffer_end_render_pass(cmd_buffer);
2197 radv_DestroyRenderPass(device_h, pass,
2198 &cmd_buffer->pool->alloc);
2199 radv_DestroyFramebuffer(device_h, fb,
2200 &cmd_buffer->pool->alloc);
2201 }
2202
2203 /**
2204 * Return TRUE if a fast color or depth clear has been performed.
2205 */
2206 static bool
2207 radv_fast_clear_range(struct radv_cmd_buffer *cmd_buffer,
2208 struct radv_image *image,
2209 VkFormat format,
2210 VkImageLayout image_layout,
2211 bool in_render_loop,
2212 const VkImageSubresourceRange *range,
2213 const VkClearValue *clear_val)
2214 {
2215 struct radv_image_view iview;
2216
2217 radv_image_view_init(&iview, cmd_buffer->device,
2218 &(VkImageViewCreateInfo) {
2219 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
2220 .image = radv_image_to_handle(image),
2221 .viewType = radv_meta_get_view_type(image),
2222 .format = image->vk_format,
2223 .subresourceRange = {
2224 .aspectMask = range->aspectMask,
2225 .baseMipLevel = range->baseMipLevel,
2226 .levelCount = range->levelCount,
2227 .baseArrayLayer = range->baseArrayLayer,
2228 .layerCount = range->layerCount,
2229 },
2230 }, NULL);
2231
2232 VkClearRect clear_rect = {
2233 .rect = {
2234 .offset = { 0, 0 },
2235 .extent = {
2236 radv_minify(image->info.width, range->baseMipLevel),
2237 radv_minify(image->info.height, range->baseMipLevel),
2238 },
2239 },
2240 .baseArrayLayer = range->baseArrayLayer,
2241 .layerCount = range->layerCount,
2242 };
2243
2244 VkClearAttachment clear_att = {
2245 .aspectMask = range->aspectMask,
2246 .colorAttachment = 0,
2247 .clearValue = *clear_val,
2248 };
2249
2250 if (vk_format_is_color(format)) {
2251 if (radv_can_fast_clear_color(cmd_buffer, &iview, image_layout,
2252 in_render_loop, &clear_rect,
2253 clear_att.clearValue.color, 0)) {
2254 radv_fast_clear_color(cmd_buffer, &iview, &clear_att,
2255 clear_att.colorAttachment,
2256 NULL, NULL);
2257 return true;
2258 }
2259 } else {
2260 if (radv_can_fast_clear_depth(cmd_buffer, &iview, image_layout,
2261 in_render_loop,range->aspectMask,
2262 &clear_rect, clear_att.clearValue.depthStencil,
2263 0)) {
2264 radv_fast_clear_depth(cmd_buffer, &iview, &clear_att,
2265 NULL, NULL);
2266 return true;
2267 }
2268 }
2269
2270 return false;
2271 }
2272
2273 static void
2274 radv_cmd_clear_image(struct radv_cmd_buffer *cmd_buffer,
2275 struct radv_image *image,
2276 VkImageLayout image_layout,
2277 const VkClearValue *clear_value,
2278 uint32_t range_count,
2279 const VkImageSubresourceRange *ranges,
2280 bool cs)
2281 {
2282 VkFormat format = image->vk_format;
2283 VkClearValue internal_clear_value = *clear_value;
2284
2285 if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
2286 uint32_t value;
2287 format = VK_FORMAT_R32_UINT;
2288 value = float3_to_rgb9e5(clear_value->color.float32);
2289 internal_clear_value.color.uint32[0] = value;
2290 }
2291
2292 if (format == VK_FORMAT_R4G4_UNORM_PACK8) {
2293 uint8_t r, g;
2294 format = VK_FORMAT_R8_UINT;
2295 r = float_to_ubyte(clear_value->color.float32[0]) >> 4;
2296 g = float_to_ubyte(clear_value->color.float32[1]) >> 4;
2297 internal_clear_value.color.uint32[0] = (r << 4) | (g & 0xf);
2298 }
2299
2300 if (format == VK_FORMAT_R32G32B32_UINT ||
2301 format == VK_FORMAT_R32G32B32_SINT ||
2302 format == VK_FORMAT_R32G32B32_SFLOAT)
2303 cs = true;
2304
2305 for (uint32_t r = 0; r < range_count; r++) {
2306 const VkImageSubresourceRange *range = &ranges[r];
2307
2308 /* Try to perform a fast clear first, otherwise fallback to
2309 * the legacy path.
2310 */
2311 if (!cs &&
2312 radv_fast_clear_range(cmd_buffer, image, format,
2313 image_layout, false, range,
2314 &internal_clear_value)) {
2315 continue;
2316 }
2317
2318 for (uint32_t l = 0; l < radv_get_levelCount(image, range); ++l) {
2319 const uint32_t layer_count = image->type == VK_IMAGE_TYPE_3D ?
2320 radv_minify(image->info.depth, range->baseMipLevel + l) :
2321 radv_get_layerCount(image, range);
2322 for (uint32_t s = 0; s < layer_count; ++s) {
2323
2324 if (cs) {
2325 struct radv_meta_blit2d_surf surf;
2326 surf.format = format;
2327 surf.image = image;
2328 surf.level = range->baseMipLevel + l;
2329 surf.layer = range->baseArrayLayer + s;
2330 surf.aspect_mask = range->aspectMask;
2331 radv_meta_clear_image_cs(cmd_buffer, &surf,
2332 &internal_clear_value.color);
2333 } else {
2334 radv_clear_image_layer(cmd_buffer, image, image_layout,
2335 range, format, l, s, &internal_clear_value);
2336 }
2337 }
2338 }
2339 }
2340 }
2341
2342 void radv_CmdClearColorImage(
2343 VkCommandBuffer commandBuffer,
2344 VkImage image_h,
2345 VkImageLayout imageLayout,
2346 const VkClearColorValue* pColor,
2347 uint32_t rangeCount,
2348 const VkImageSubresourceRange* pRanges)
2349 {
2350 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2351 RADV_FROM_HANDLE(radv_image, image, image_h);
2352 struct radv_meta_saved_state saved_state;
2353 bool cs = cmd_buffer->queue_family_index == RADV_QUEUE_COMPUTE;
2354
2355 if (cs) {
2356 radv_meta_save(&saved_state, cmd_buffer,
2357 RADV_META_SAVE_COMPUTE_PIPELINE |
2358 RADV_META_SAVE_CONSTANTS |
2359 RADV_META_SAVE_DESCRIPTORS);
2360 } else {
2361 radv_meta_save(&saved_state, cmd_buffer,
2362 RADV_META_SAVE_GRAPHICS_PIPELINE |
2363 RADV_META_SAVE_CONSTANTS);
2364 }
2365
2366 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2367 (const VkClearValue *) pColor,
2368 rangeCount, pRanges, cs);
2369
2370 radv_meta_restore(&saved_state, cmd_buffer);
2371 }
2372
2373 void radv_CmdClearDepthStencilImage(
2374 VkCommandBuffer commandBuffer,
2375 VkImage image_h,
2376 VkImageLayout imageLayout,
2377 const VkClearDepthStencilValue* pDepthStencil,
2378 uint32_t rangeCount,
2379 const VkImageSubresourceRange* pRanges)
2380 {
2381 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2382 RADV_FROM_HANDLE(radv_image, image, image_h);
2383 struct radv_meta_saved_state saved_state;
2384
2385 radv_meta_save(&saved_state, cmd_buffer,
2386 RADV_META_SAVE_GRAPHICS_PIPELINE |
2387 RADV_META_SAVE_CONSTANTS);
2388
2389 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2390 (const VkClearValue *) pDepthStencil,
2391 rangeCount, pRanges, false);
2392
2393 radv_meta_restore(&saved_state, cmd_buffer);
2394 }
2395
2396 void radv_CmdClearAttachments(
2397 VkCommandBuffer commandBuffer,
2398 uint32_t attachmentCount,
2399 const VkClearAttachment* pAttachments,
2400 uint32_t rectCount,
2401 const VkClearRect* pRects)
2402 {
2403 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2404 struct radv_meta_saved_state saved_state;
2405 enum radv_cmd_flush_bits pre_flush = 0;
2406 enum radv_cmd_flush_bits post_flush = 0;
2407
2408 if (!cmd_buffer->state.subpass)
2409 return;
2410
2411 radv_meta_save(&saved_state, cmd_buffer,
2412 RADV_META_SAVE_GRAPHICS_PIPELINE |
2413 RADV_META_SAVE_CONSTANTS);
2414
2415 /* FINISHME: We can do better than this dumb loop. It thrashes too much
2416 * state.
2417 */
2418 for (uint32_t a = 0; a < attachmentCount; ++a) {
2419 for (uint32_t r = 0; r < rectCount; ++r) {
2420 emit_clear(cmd_buffer, &pAttachments[a], &pRects[r], &pre_flush, &post_flush,
2421 cmd_buffer->state.subpass->view_mask, false);
2422 }
2423 }
2424
2425 radv_meta_restore(&saved_state, cmd_buffer);
2426 cmd_buffer->state.flush_bits |= post_flush;
2427 }