radv: add radv_secure_compile_type enum
[mesa.git] / src / amd / vulkan / radv_meta_clear.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "radv_debug.h"
25 #include "radv_meta.h"
26 #include "radv_private.h"
27 #include "nir/nir_builder.h"
28
29 #include "util/format_rgb9e5.h"
30 #include "vk_format.h"
31
32 enum {
33 DEPTH_CLEAR_SLOW,
34 DEPTH_CLEAR_FAST_EXPCLEAR,
35 DEPTH_CLEAR_FAST_NO_EXPCLEAR
36 };
37
38 static void
39 build_color_shaders(struct nir_shader **out_vs,
40 struct nir_shader **out_fs,
41 uint32_t frag_output)
42 {
43 nir_builder vs_b;
44 nir_builder fs_b;
45
46 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
47 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
48
49 vs_b.shader->info.name = ralloc_strdup(vs_b.shader, "meta_clear_color_vs");
50 fs_b.shader->info.name = ralloc_strdup(fs_b.shader, "meta_clear_color_fs");
51
52 const struct glsl_type *position_type = glsl_vec4_type();
53 const struct glsl_type *color_type = glsl_vec4_type();
54
55 nir_variable *vs_out_pos =
56 nir_variable_create(vs_b.shader, nir_var_shader_out, position_type,
57 "gl_Position");
58 vs_out_pos->data.location = VARYING_SLOT_POS;
59
60 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
61 nir_intrinsic_set_base(in_color_load, 0);
62 nir_intrinsic_set_range(in_color_load, 16);
63 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
64 in_color_load->num_components = 4;
65 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 4, 32, "clear color");
66 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
67
68 nir_variable *fs_out_color =
69 nir_variable_create(fs_b.shader, nir_var_shader_out, color_type,
70 "f_color");
71 fs_out_color->data.location = FRAG_RESULT_DATA0 + frag_output;
72
73 nir_store_var(&fs_b, fs_out_color, &in_color_load->dest.ssa, 0xf);
74
75 nir_ssa_def *outvec = radv_meta_gen_rect_vertices(&vs_b);
76 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
77
78 const struct glsl_type *layer_type = glsl_int_type();
79 nir_variable *vs_out_layer =
80 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
81 "v_layer");
82 vs_out_layer->data.location = VARYING_SLOT_LAYER;
83 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
84 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
85 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
86
87 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
88 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
89
90 *out_vs = vs_b.shader;
91 *out_fs = fs_b.shader;
92 }
93
94 static VkResult
95 create_pipeline(struct radv_device *device,
96 struct radv_render_pass *render_pass,
97 uint32_t samples,
98 struct nir_shader *vs_nir,
99 struct nir_shader *fs_nir,
100 const VkPipelineVertexInputStateCreateInfo *vi_state,
101 const VkPipelineDepthStencilStateCreateInfo *ds_state,
102 const VkPipelineColorBlendStateCreateInfo *cb_state,
103 const VkPipelineLayout layout,
104 const struct radv_graphics_pipeline_create_info *extra,
105 const VkAllocationCallbacks *alloc,
106 VkPipeline *pipeline)
107 {
108 VkDevice device_h = radv_device_to_handle(device);
109 VkResult result;
110
111 struct radv_shader_module vs_m = { .nir = vs_nir };
112 struct radv_shader_module fs_m = { .nir = fs_nir };
113
114 result = radv_graphics_pipeline_create(device_h,
115 radv_pipeline_cache_to_handle(&device->meta_state.cache),
116 &(VkGraphicsPipelineCreateInfo) {
117 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
118 .stageCount = fs_nir ? 2 : 1,
119 .pStages = (VkPipelineShaderStageCreateInfo[]) {
120 {
121 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
122 .stage = VK_SHADER_STAGE_VERTEX_BIT,
123 .module = radv_shader_module_to_handle(&vs_m),
124 .pName = "main",
125 },
126 {
127 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
128 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
129 .module = radv_shader_module_to_handle(&fs_m),
130 .pName = "main",
131 },
132 },
133 .pVertexInputState = vi_state,
134 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
135 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
136 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
137 .primitiveRestartEnable = false,
138 },
139 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
140 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
141 .viewportCount = 1,
142 .scissorCount = 1,
143 },
144 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
145 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
146 .rasterizerDiscardEnable = false,
147 .polygonMode = VK_POLYGON_MODE_FILL,
148 .cullMode = VK_CULL_MODE_NONE,
149 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
150 .depthBiasEnable = false,
151 },
152 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
153 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
154 .rasterizationSamples = samples,
155 .sampleShadingEnable = false,
156 .pSampleMask = NULL,
157 .alphaToCoverageEnable = false,
158 .alphaToOneEnable = false,
159 },
160 .pDepthStencilState = ds_state,
161 .pColorBlendState = cb_state,
162 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
163 /* The meta clear pipeline declares all state as dynamic.
164 * As a consequence, vkCmdBindPipeline writes no dynamic state
165 * to the cmd buffer. Therefore, at the end of the meta clear,
166 * we need only restore dynamic state was vkCmdSet.
167 */
168 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
169 .dynamicStateCount = 8,
170 .pDynamicStates = (VkDynamicState[]) {
171 /* Everything except stencil write mask */
172 VK_DYNAMIC_STATE_VIEWPORT,
173 VK_DYNAMIC_STATE_SCISSOR,
174 VK_DYNAMIC_STATE_LINE_WIDTH,
175 VK_DYNAMIC_STATE_DEPTH_BIAS,
176 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
177 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
178 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
179 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
180 },
181 },
182 .layout = layout,
183 .flags = 0,
184 .renderPass = radv_render_pass_to_handle(render_pass),
185 .subpass = 0,
186 },
187 extra,
188 alloc,
189 pipeline);
190
191 ralloc_free(vs_nir);
192 ralloc_free(fs_nir);
193
194 return result;
195 }
196
197 static VkResult
198 create_color_renderpass(struct radv_device *device,
199 VkFormat vk_format,
200 uint32_t samples,
201 VkRenderPass *pass)
202 {
203 mtx_lock(&device->meta_state.mtx);
204 if (*pass) {
205 mtx_unlock (&device->meta_state.mtx);
206 return VK_SUCCESS;
207 }
208
209 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
210 &(VkRenderPassCreateInfo) {
211 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
212 .attachmentCount = 1,
213 .pAttachments = &(VkAttachmentDescription) {
214 .format = vk_format,
215 .samples = samples,
216 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
217 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
218 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
219 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
220 },
221 .subpassCount = 1,
222 .pSubpasses = &(VkSubpassDescription) {
223 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
224 .inputAttachmentCount = 0,
225 .colorAttachmentCount = 1,
226 .pColorAttachments = &(VkAttachmentReference) {
227 .attachment = 0,
228 .layout = VK_IMAGE_LAYOUT_GENERAL,
229 },
230 .pResolveAttachments = NULL,
231 .pDepthStencilAttachment = &(VkAttachmentReference) {
232 .attachment = VK_ATTACHMENT_UNUSED,
233 .layout = VK_IMAGE_LAYOUT_GENERAL,
234 },
235 .preserveAttachmentCount = 0,
236 .pPreserveAttachments = NULL,
237 },
238 .dependencyCount = 0,
239 }, &device->meta_state.alloc, pass);
240 mtx_unlock(&device->meta_state.mtx);
241 return result;
242 }
243
244 static VkResult
245 create_color_pipeline(struct radv_device *device,
246 uint32_t samples,
247 uint32_t frag_output,
248 VkPipeline *pipeline,
249 VkRenderPass pass)
250 {
251 struct nir_shader *vs_nir;
252 struct nir_shader *fs_nir;
253 VkResult result;
254
255 mtx_lock(&device->meta_state.mtx);
256 if (*pipeline) {
257 mtx_unlock(&device->meta_state.mtx);
258 return VK_SUCCESS;
259 }
260
261 build_color_shaders(&vs_nir, &fs_nir, frag_output);
262
263 const VkPipelineVertexInputStateCreateInfo vi_state = {
264 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
265 .vertexBindingDescriptionCount = 0,
266 .vertexAttributeDescriptionCount = 0,
267 };
268
269 const VkPipelineDepthStencilStateCreateInfo ds_state = {
270 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
271 .depthTestEnable = false,
272 .depthWriteEnable = false,
273 .depthBoundsTestEnable = false,
274 .stencilTestEnable = false,
275 };
276
277 VkPipelineColorBlendAttachmentState blend_attachment_state[MAX_RTS] = { 0 };
278 blend_attachment_state[frag_output] = (VkPipelineColorBlendAttachmentState) {
279 .blendEnable = false,
280 .colorWriteMask = VK_COLOR_COMPONENT_A_BIT |
281 VK_COLOR_COMPONENT_R_BIT |
282 VK_COLOR_COMPONENT_G_BIT |
283 VK_COLOR_COMPONENT_B_BIT,
284 };
285
286 const VkPipelineColorBlendStateCreateInfo cb_state = {
287 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
288 .logicOpEnable = false,
289 .attachmentCount = MAX_RTS,
290 .pAttachments = blend_attachment_state
291 };
292
293
294 struct radv_graphics_pipeline_create_info extra = {
295 .use_rectlist = true,
296 };
297 result = create_pipeline(device, radv_render_pass_from_handle(pass),
298 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
299 device->meta_state.clear_color_p_layout,
300 &extra, &device->meta_state.alloc, pipeline);
301
302 mtx_unlock(&device->meta_state.mtx);
303 return result;
304 }
305
306 static void
307 finish_meta_clear_htile_mask_state(struct radv_device *device)
308 {
309 struct radv_meta_state *state = &device->meta_state;
310
311 radv_DestroyPipeline(radv_device_to_handle(device),
312 state->clear_htile_mask_pipeline,
313 &state->alloc);
314 radv_DestroyPipelineLayout(radv_device_to_handle(device),
315 state->clear_htile_mask_p_layout,
316 &state->alloc);
317 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
318 state->clear_htile_mask_ds_layout,
319 &state->alloc);
320 }
321
322 void
323 radv_device_finish_meta_clear_state(struct radv_device *device)
324 {
325 struct radv_meta_state *state = &device->meta_state;
326
327 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
328 for (uint32_t j = 0; j < ARRAY_SIZE(state->clear[i].color_pipelines); ++j) {
329 radv_DestroyPipeline(radv_device_to_handle(device),
330 state->clear[i].color_pipelines[j],
331 &state->alloc);
332 radv_DestroyRenderPass(radv_device_to_handle(device),
333 state->clear[i].render_pass[j],
334 &state->alloc);
335 }
336
337 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
338 radv_DestroyPipeline(radv_device_to_handle(device),
339 state->clear[i].depth_only_pipeline[j],
340 &state->alloc);
341 radv_DestroyPipeline(radv_device_to_handle(device),
342 state->clear[i].stencil_only_pipeline[j],
343 &state->alloc);
344 radv_DestroyPipeline(radv_device_to_handle(device),
345 state->clear[i].depthstencil_pipeline[j],
346 &state->alloc);
347
348 radv_DestroyPipeline(radv_device_to_handle(device),
349 state->clear[i].depth_only_unrestricted_pipeline[j],
350 &state->alloc);
351 radv_DestroyPipeline(radv_device_to_handle(device),
352 state->clear[i].stencil_only_unrestricted_pipeline[j],
353 &state->alloc);
354 radv_DestroyPipeline(radv_device_to_handle(device),
355 state->clear[i].depthstencil_unrestricted_pipeline[j],
356 &state->alloc);
357 }
358 radv_DestroyRenderPass(radv_device_to_handle(device),
359 state->clear[i].depthstencil_rp,
360 &state->alloc);
361 }
362 radv_DestroyPipelineLayout(radv_device_to_handle(device),
363 state->clear_color_p_layout,
364 &state->alloc);
365 radv_DestroyPipelineLayout(radv_device_to_handle(device),
366 state->clear_depth_p_layout,
367 &state->alloc);
368 radv_DestroyPipelineLayout(radv_device_to_handle(device),
369 state->clear_depth_unrestricted_p_layout,
370 &state->alloc);
371
372 finish_meta_clear_htile_mask_state(device);
373 }
374
375 static void
376 emit_color_clear(struct radv_cmd_buffer *cmd_buffer,
377 const VkClearAttachment *clear_att,
378 const VkClearRect *clear_rect,
379 uint32_t view_mask)
380 {
381 struct radv_device *device = cmd_buffer->device;
382 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
383 const uint32_t subpass_att = clear_att->colorAttachment;
384 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
385 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
386 cmd_buffer->state.attachments[pass_att].iview : NULL;
387 uint32_t samples, samples_log2;
388 VkFormat format;
389 unsigned fs_key;
390 VkClearColorValue clear_value = clear_att->clearValue.color;
391 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
392 VkPipeline pipeline;
393
394 /* When a framebuffer is bound to the current command buffer, get the
395 * number of samples from it. Otherwise, get the number of samples from
396 * the render pass because it's likely a secondary command buffer.
397 */
398 if (iview) {
399 samples = iview->image->info.samples;
400 format = iview->vk_format;
401 } else {
402 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
403 format = cmd_buffer->state.pass->attachments[pass_att].format;
404 }
405
406 samples_log2 = ffs(samples) - 1;
407 fs_key = radv_format_meta_fs_key(format);
408
409 if (fs_key == -1) {
410 radv_finishme("color clears incomplete");
411 return;
412 }
413
414 if (device->meta_state.clear[samples_log2].render_pass[fs_key] == VK_NULL_HANDLE) {
415 VkResult ret = create_color_renderpass(device, radv_fs_key_format_exemplars[fs_key],
416 samples,
417 &device->meta_state.clear[samples_log2].render_pass[fs_key]);
418 if (ret != VK_SUCCESS) {
419 cmd_buffer->record_result = ret;
420 return;
421 }
422 }
423
424 if (device->meta_state.clear[samples_log2].color_pipelines[fs_key] == VK_NULL_HANDLE) {
425 VkResult ret = create_color_pipeline(device, samples, 0,
426 &device->meta_state.clear[samples_log2].color_pipelines[fs_key],
427 device->meta_state.clear[samples_log2].render_pass[fs_key]);
428 if (ret != VK_SUCCESS) {
429 cmd_buffer->record_result = ret;
430 return;
431 }
432 }
433
434 pipeline = device->meta_state.clear[samples_log2].color_pipelines[fs_key];
435 if (!pipeline) {
436 radv_finishme("color clears incomplete");
437 return;
438 }
439 assert(samples_log2 < ARRAY_SIZE(device->meta_state.clear));
440 assert(pipeline);
441 assert(clear_att->aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
442 assert(clear_att->colorAttachment < subpass->color_count);
443
444 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
445 device->meta_state.clear_color_p_layout,
446 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16,
447 &clear_value);
448
449 struct radv_subpass clear_subpass = {
450 .color_count = 1,
451 .color_attachments = (struct radv_subpass_attachment[]) {
452 subpass->color_attachments[clear_att->colorAttachment]
453 },
454 .depth_stencil_attachment = NULL,
455 };
456
457 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
458
459 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
460 pipeline);
461
462 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
463 .x = clear_rect->rect.offset.x,
464 .y = clear_rect->rect.offset.y,
465 .width = clear_rect->rect.extent.width,
466 .height = clear_rect->rect.extent.height,
467 .minDepth = 0.0f,
468 .maxDepth = 1.0f
469 });
470
471 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
472
473 if (view_mask) {
474 unsigned i;
475 for_each_bit(i, view_mask)
476 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
477 } else {
478 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
479 }
480
481 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
482 }
483
484
485 static void
486 build_depthstencil_shader(struct nir_shader **out_vs,
487 struct nir_shader **out_fs,
488 bool unrestricted)
489 {
490 nir_builder vs_b, fs_b;
491
492 nir_builder_init_simple_shader(&vs_b, NULL, MESA_SHADER_VERTEX, NULL);
493 nir_builder_init_simple_shader(&fs_b, NULL, MESA_SHADER_FRAGMENT, NULL);
494
495 vs_b.shader->info.name = ralloc_strdup(vs_b.shader, "meta_clear_depthstencil_vs");
496 fs_b.shader->info.name = ralloc_strdup(fs_b.shader, "meta_clear_depthstencil_fs");
497 const struct glsl_type *position_out_type = glsl_vec4_type();
498
499 nir_variable *vs_out_pos =
500 nir_variable_create(vs_b.shader, nir_var_shader_out, position_out_type,
501 "gl_Position");
502 vs_out_pos->data.location = VARYING_SLOT_POS;
503
504 nir_ssa_def *z;
505 if (unrestricted) {
506 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(fs_b.shader, nir_intrinsic_load_push_constant);
507 nir_intrinsic_set_base(in_color_load, 0);
508 nir_intrinsic_set_range(in_color_load, 4);
509 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&fs_b, 0));
510 in_color_load->num_components = 1;
511 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 1, 32, "depth value");
512 nir_builder_instr_insert(&fs_b, &in_color_load->instr);
513
514 nir_variable *fs_out_depth =
515 nir_variable_create(fs_b.shader, nir_var_shader_out,
516 glsl_int_type(), "f_depth");
517 fs_out_depth->data.location = FRAG_RESULT_DEPTH;
518 nir_store_var(&fs_b, fs_out_depth, &in_color_load->dest.ssa, 0x1);
519
520 z = nir_imm_float(&vs_b, 0.0);
521 } else {
522 nir_intrinsic_instr *in_color_load = nir_intrinsic_instr_create(vs_b.shader, nir_intrinsic_load_push_constant);
523 nir_intrinsic_set_base(in_color_load, 0);
524 nir_intrinsic_set_range(in_color_load, 4);
525 in_color_load->src[0] = nir_src_for_ssa(nir_imm_int(&vs_b, 0));
526 in_color_load->num_components = 1;
527 nir_ssa_dest_init(&in_color_load->instr, &in_color_load->dest, 1, 32, "depth value");
528 nir_builder_instr_insert(&vs_b, &in_color_load->instr);
529
530 z = &in_color_load->dest.ssa;
531 }
532
533 nir_ssa_def *outvec = radv_meta_gen_rect_vertices_comp2(&vs_b, z);
534 nir_store_var(&vs_b, vs_out_pos, outvec, 0xf);
535
536 const struct glsl_type *layer_type = glsl_int_type();
537 nir_variable *vs_out_layer =
538 nir_variable_create(vs_b.shader, nir_var_shader_out, layer_type,
539 "v_layer");
540 vs_out_layer->data.location = VARYING_SLOT_LAYER;
541 vs_out_layer->data.interpolation = INTERP_MODE_FLAT;
542 nir_ssa_def *inst_id = nir_load_instance_id(&vs_b);
543 nir_ssa_def *base_instance = nir_load_base_instance(&vs_b);
544
545 nir_ssa_def *layer_id = nir_iadd(&vs_b, inst_id, base_instance);
546 nir_store_var(&vs_b, vs_out_layer, layer_id, 0x1);
547
548 *out_vs = vs_b.shader;
549 *out_fs = fs_b.shader;
550 }
551
552 static VkResult
553 create_depthstencil_renderpass(struct radv_device *device,
554 uint32_t samples,
555 VkRenderPass *render_pass)
556 {
557 mtx_lock(&device->meta_state.mtx);
558 if (*render_pass) {
559 mtx_unlock(&device->meta_state.mtx);
560 return VK_SUCCESS;
561 }
562
563 VkResult result = radv_CreateRenderPass(radv_device_to_handle(device),
564 &(VkRenderPassCreateInfo) {
565 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
566 .attachmentCount = 1,
567 .pAttachments = &(VkAttachmentDescription) {
568 .format = VK_FORMAT_D32_SFLOAT_S8_UINT,
569 .samples = samples,
570 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
571 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
572 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
573 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
574 },
575 .subpassCount = 1,
576 .pSubpasses = &(VkSubpassDescription) {
577 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
578 .inputAttachmentCount = 0,
579 .colorAttachmentCount = 0,
580 .pColorAttachments = NULL,
581 .pResolveAttachments = NULL,
582 .pDepthStencilAttachment = &(VkAttachmentReference) {
583 .attachment = 0,
584 .layout = VK_IMAGE_LAYOUT_GENERAL,
585 },
586 .preserveAttachmentCount = 0,
587 .pPreserveAttachments = NULL,
588 },
589 .dependencyCount = 0,
590 }, &device->meta_state.alloc, render_pass);
591 mtx_unlock(&device->meta_state.mtx);
592 return result;
593 }
594
595 static VkResult
596 create_depthstencil_pipeline(struct radv_device *device,
597 VkImageAspectFlags aspects,
598 uint32_t samples,
599 int index,
600 bool unrestricted,
601 VkPipeline *pipeline,
602 VkRenderPass render_pass)
603 {
604 struct nir_shader *vs_nir, *fs_nir;
605 VkResult result;
606
607 mtx_lock(&device->meta_state.mtx);
608 if (*pipeline) {
609 mtx_unlock(&device->meta_state.mtx);
610 return VK_SUCCESS;
611 }
612
613 build_depthstencil_shader(&vs_nir, &fs_nir, unrestricted);
614
615 const VkPipelineVertexInputStateCreateInfo vi_state = {
616 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
617 .vertexBindingDescriptionCount = 0,
618 .vertexAttributeDescriptionCount = 0,
619 };
620
621 const VkPipelineDepthStencilStateCreateInfo ds_state = {
622 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
623 .depthTestEnable = (aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
624 .depthCompareOp = VK_COMPARE_OP_ALWAYS,
625 .depthWriteEnable = (aspects & VK_IMAGE_ASPECT_DEPTH_BIT),
626 .depthBoundsTestEnable = false,
627 .stencilTestEnable = (aspects & VK_IMAGE_ASPECT_STENCIL_BIT),
628 .front = {
629 .passOp = VK_STENCIL_OP_REPLACE,
630 .compareOp = VK_COMPARE_OP_ALWAYS,
631 .writeMask = UINT32_MAX,
632 .reference = 0, /* dynamic */
633 },
634 .back = { 0 /* dont care */ },
635 };
636
637 const VkPipelineColorBlendStateCreateInfo cb_state = {
638 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
639 .logicOpEnable = false,
640 .attachmentCount = 0,
641 .pAttachments = NULL,
642 };
643
644 struct radv_graphics_pipeline_create_info extra = {
645 .use_rectlist = true,
646 };
647
648 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT) {
649 extra.db_depth_clear = index == DEPTH_CLEAR_SLOW ? false : true;
650 extra.db_depth_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
651 }
652 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
653 extra.db_stencil_clear = index == DEPTH_CLEAR_SLOW ? false : true;
654 extra.db_stencil_disable_expclear = index == DEPTH_CLEAR_FAST_NO_EXPCLEAR ? true : false;
655 }
656 result = create_pipeline(device, radv_render_pass_from_handle(render_pass),
657 samples, vs_nir, fs_nir, &vi_state, &ds_state, &cb_state,
658 device->meta_state.clear_depth_p_layout,
659 &extra, &device->meta_state.alloc, pipeline);
660
661 mtx_unlock(&device->meta_state.mtx);
662 return result;
663 }
664
665 static bool depth_view_can_fast_clear(struct radv_cmd_buffer *cmd_buffer,
666 const struct radv_image_view *iview,
667 VkImageAspectFlags aspects,
668 VkImageLayout layout,
669 bool in_render_loop,
670 const VkClearRect *clear_rect,
671 VkClearDepthStencilValue clear_value)
672 {
673 if (!iview)
674 return false;
675
676 uint32_t queue_mask = radv_image_queue_family_mask(iview->image,
677 cmd_buffer->queue_family_index,
678 cmd_buffer->queue_family_index);
679 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
680 clear_rect->rect.extent.width != iview->extent.width ||
681 clear_rect->rect.extent.height != iview->extent.height)
682 return false;
683 if (radv_image_is_tc_compat_htile(iview->image) &&
684 (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) && clear_value.depth != 0.0 &&
685 clear_value.depth != 1.0) ||
686 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) && clear_value.stencil != 0)))
687 return false;
688 if (radv_image_has_htile(iview->image) &&
689 iview->base_mip == 0 &&
690 iview->base_layer == 0 &&
691 iview->layer_count == iview->image->info.array_size &&
692 radv_layout_is_htile_compressed(iview->image, layout, in_render_loop, queue_mask) &&
693 radv_image_extent_compare(iview->image, &iview->extent))
694 return true;
695 return false;
696 }
697
698 static VkPipeline
699 pick_depthstencil_pipeline(struct radv_cmd_buffer *cmd_buffer,
700 struct radv_meta_state *meta_state,
701 const struct radv_image_view *iview,
702 int samples_log2,
703 VkImageAspectFlags aspects,
704 VkImageLayout layout,
705 bool in_render_loop,
706 const VkClearRect *clear_rect,
707 VkClearDepthStencilValue clear_value)
708 {
709 bool fast = depth_view_can_fast_clear(cmd_buffer, iview, aspects, layout,
710 in_render_loop, clear_rect, clear_value);
711 bool unrestricted = cmd_buffer->device->enabled_extensions.EXT_depth_range_unrestricted;
712 int index = DEPTH_CLEAR_SLOW;
713 VkPipeline *pipeline;
714
715 if (fast) {
716 /* we don't know the previous clear values, so we always have
717 * the NO_EXPCLEAR path */
718 index = DEPTH_CLEAR_FAST_NO_EXPCLEAR;
719 }
720
721 switch (aspects) {
722 case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
723 pipeline = unrestricted ?
724 &meta_state->clear[samples_log2].depthstencil_unrestricted_pipeline[index] :
725 &meta_state->clear[samples_log2].depthstencil_pipeline[index];
726 break;
727 case VK_IMAGE_ASPECT_DEPTH_BIT:
728 pipeline = unrestricted ?
729 &meta_state->clear[samples_log2].depth_only_unrestricted_pipeline[index] :
730 &meta_state->clear[samples_log2].depth_only_pipeline[index];
731 break;
732 case VK_IMAGE_ASPECT_STENCIL_BIT:
733 pipeline = unrestricted ?
734 &meta_state->clear[samples_log2].stencil_only_unrestricted_pipeline[index] :
735 &meta_state->clear[samples_log2].stencil_only_pipeline[index];
736 break;
737 default:
738 unreachable("expected depth or stencil aspect");
739 }
740
741 if (cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp == VK_NULL_HANDLE) {
742 VkResult ret = create_depthstencil_renderpass(cmd_buffer->device, 1u << samples_log2,
743 &cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
744 if (ret != VK_SUCCESS) {
745 cmd_buffer->record_result = ret;
746 return VK_NULL_HANDLE;
747 }
748 }
749
750 if (*pipeline == VK_NULL_HANDLE) {
751 VkResult ret = create_depthstencil_pipeline(cmd_buffer->device, aspects, 1u << samples_log2, index, unrestricted,
752 pipeline, cmd_buffer->device->meta_state.clear[samples_log2].depthstencil_rp);
753 if (ret != VK_SUCCESS) {
754 cmd_buffer->record_result = ret;
755 return VK_NULL_HANDLE;
756 }
757 }
758 return *pipeline;
759 }
760
761 static void
762 emit_depthstencil_clear(struct radv_cmd_buffer *cmd_buffer,
763 const VkClearAttachment *clear_att,
764 const VkClearRect *clear_rect,
765 struct radv_subpass_attachment *ds_att,
766 uint32_t view_mask)
767 {
768 struct radv_device *device = cmd_buffer->device;
769 struct radv_meta_state *meta_state = &device->meta_state;
770 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
771 const uint32_t pass_att = ds_att->attachment;
772 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
773 VkImageAspectFlags aspects = clear_att->aspectMask;
774 const struct radv_image_view *iview = cmd_buffer->state.attachments ?
775 cmd_buffer->state.attachments[pass_att].iview : NULL;
776 uint32_t samples, samples_log2;
777 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
778
779 /* When a framebuffer is bound to the current command buffer, get the
780 * number of samples from it. Otherwise, get the number of samples from
781 * the render pass because it's likely a secondary command buffer.
782 */
783 if (iview) {
784 samples = iview->image->info.samples;
785 } else {
786 samples = cmd_buffer->state.pass->attachments[pass_att].samples;
787 }
788
789 samples_log2 = ffs(samples) - 1;
790
791 assert(pass_att != VK_ATTACHMENT_UNUSED);
792
793 if (!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT))
794 clear_value.depth = 1.0f;
795
796 if (cmd_buffer->device->enabled_extensions.EXT_depth_range_unrestricted) {
797 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
798 device->meta_state.clear_depth_unrestricted_p_layout,
799 VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4,
800 &clear_value.depth);
801 } else {
802 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
803 device->meta_state.clear_depth_p_layout,
804 VK_SHADER_STAGE_VERTEX_BIT, 0, 4,
805 &clear_value.depth);
806 }
807
808 uint32_t prev_reference = cmd_buffer->state.dynamic.stencil_reference.front;
809 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
810 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
811 clear_value.stencil);
812 }
813
814 VkPipeline pipeline = pick_depthstencil_pipeline(cmd_buffer,
815 meta_state,
816 iview,
817 samples_log2,
818 aspects,
819 ds_att->layout,
820 ds_att->in_render_loop,
821 clear_rect,
822 clear_value);
823 if (!pipeline)
824 return;
825
826 struct radv_subpass clear_subpass = {
827 .color_count = 0,
828 .color_attachments = NULL,
829 .depth_stencil_attachment = ds_att,
830 };
831
832 radv_cmd_buffer_set_subpass(cmd_buffer, &clear_subpass);
833
834 radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
835 pipeline);
836
837 if (depth_view_can_fast_clear(cmd_buffer, iview, aspects,
838 ds_att->layout, ds_att->in_render_loop,
839 clear_rect, clear_value))
840 radv_update_ds_clear_metadata(cmd_buffer, iview,
841 clear_value, aspects);
842
843 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
844 .x = clear_rect->rect.offset.x,
845 .y = clear_rect->rect.offset.y,
846 .width = clear_rect->rect.extent.width,
847 .height = clear_rect->rect.extent.height,
848 .minDepth = 0.0f,
849 .maxDepth = 1.0f
850 });
851
852 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &clear_rect->rect);
853
854 if (view_mask) {
855 unsigned i;
856 for_each_bit(i, view_mask)
857 radv_CmdDraw(cmd_buffer_h, 3, 1, 0, i);
858 } else {
859 radv_CmdDraw(cmd_buffer_h, 3, clear_rect->layerCount, 0, clear_rect->baseArrayLayer);
860 }
861
862 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
863 radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
864 prev_reference);
865 }
866
867 radv_cmd_buffer_set_subpass(cmd_buffer, subpass);
868 }
869
870 static uint32_t
871 clear_htile_mask(struct radv_cmd_buffer *cmd_buffer,
872 struct radeon_winsys_bo *bo, uint64_t offset, uint64_t size,
873 uint32_t htile_value, uint32_t htile_mask)
874 {
875 struct radv_device *device = cmd_buffer->device;
876 struct radv_meta_state *state = &device->meta_state;
877 uint64_t block_count = round_up_u64(size, 1024);
878 struct radv_meta_saved_state saved_state;
879
880 radv_meta_save(&saved_state, cmd_buffer,
881 RADV_META_SAVE_COMPUTE_PIPELINE |
882 RADV_META_SAVE_CONSTANTS |
883 RADV_META_SAVE_DESCRIPTORS);
884
885 struct radv_buffer dst_buffer = {
886 .bo = bo,
887 .offset = offset,
888 .size = size
889 };
890
891 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
892 VK_PIPELINE_BIND_POINT_COMPUTE,
893 state->clear_htile_mask_pipeline);
894
895 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
896 state->clear_htile_mask_p_layout,
897 0, /* set */
898 1, /* descriptorWriteCount */
899 (VkWriteDescriptorSet[]) {
900 {
901 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
902 .dstBinding = 0,
903 .dstArrayElement = 0,
904 .descriptorCount = 1,
905 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
906 .pBufferInfo = &(VkDescriptorBufferInfo) {
907 .buffer = radv_buffer_to_handle(&dst_buffer),
908 .offset = 0,
909 .range = size
910 }
911 }
912 });
913
914 const unsigned constants[2] = {
915 htile_value & htile_mask,
916 ~htile_mask,
917 };
918
919 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
920 state->clear_htile_mask_p_layout,
921 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
922 constants);
923
924 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
925
926 radv_meta_restore(&saved_state, cmd_buffer);
927
928 return RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
929 RADV_CMD_FLAG_INV_VCACHE |
930 RADV_CMD_FLAG_WB_L2;
931 }
932
933 static uint32_t
934 radv_get_htile_fast_clear_value(const struct radv_image *image,
935 VkClearDepthStencilValue value)
936 {
937 uint32_t clear_value;
938
939 if (!image->planes[0].surface.has_stencil) {
940 clear_value = value.depth ? 0xfffffff0 : 0;
941 } else {
942 clear_value = value.depth ? 0xfffc0000 : 0;
943 }
944
945 return clear_value;
946 }
947
948 static uint32_t
949 radv_get_htile_mask(const struct radv_image *image, VkImageAspectFlags aspects)
950 {
951 uint32_t mask = 0;
952
953 if (!image->planes[0].surface.has_stencil) {
954 /* All the HTILE buffer is used when there is no stencil. */
955 mask = UINT32_MAX;
956 } else {
957 if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT)
958 mask |= 0xfffffc0f;
959 if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT)
960 mask |= 0x000003f0;
961 }
962
963 return mask;
964 }
965
966 static bool
967 radv_is_fast_clear_depth_allowed(VkClearDepthStencilValue value)
968 {
969 return value.depth == 1.0f || value.depth == 0.0f;
970 }
971
972 static bool
973 radv_is_fast_clear_stencil_allowed(VkClearDepthStencilValue value)
974 {
975 return value.stencil == 0;
976 }
977
978 /**
979 * Determine if the given image can be fast cleared.
980 */
981 static bool
982 radv_image_can_fast_clear(struct radv_device *device, struct radv_image *image)
983 {
984 if (device->instance->debug_flags & RADV_DEBUG_NO_FAST_CLEARS)
985 return false;
986
987 if (vk_format_is_color(image->vk_format)) {
988 if (!radv_image_has_cmask(image) && !radv_image_has_dcc(image))
989 return false;
990
991 /* RB+ doesn't work with CMASK fast clear on Stoney. */
992 if (!radv_image_has_dcc(image) &&
993 device->physical_device->rad_info.family == CHIP_STONEY)
994 return false;
995 } else {
996 if (!radv_image_has_htile(image))
997 return false;
998 }
999
1000 /* Do not fast clears 3D images. */
1001 if (image->type == VK_IMAGE_TYPE_3D)
1002 return false;
1003
1004 return true;
1005 }
1006
1007 /**
1008 * Determine if the given image view can be fast cleared.
1009 */
1010 static bool
1011 radv_image_view_can_fast_clear(struct radv_device *device,
1012 const struct radv_image_view *iview)
1013 {
1014 struct radv_image *image;
1015
1016 if (!iview)
1017 return false;
1018 image = iview->image;
1019
1020 /* Only fast clear if the image itself can be fast cleared. */
1021 if (!radv_image_can_fast_clear(device, image))
1022 return false;
1023
1024 /* Only fast clear if all layers are bound. */
1025 if (iview->base_layer > 0 ||
1026 iview->layer_count != image->info.array_size)
1027 return false;
1028
1029 /* Only fast clear if the view covers the whole image. */
1030 if (!radv_image_extent_compare(image, &iview->extent))
1031 return false;
1032
1033 return true;
1034 }
1035
1036 static bool
1037 radv_can_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
1038 const struct radv_image_view *iview,
1039 VkImageLayout image_layout,
1040 bool in_render_loop,
1041 VkImageAspectFlags aspects,
1042 const VkClearRect *clear_rect,
1043 const VkClearDepthStencilValue clear_value,
1044 uint32_t view_mask)
1045 {
1046 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1047 return false;
1048
1049 if (!radv_layout_is_htile_compressed(iview->image, image_layout, in_render_loop,
1050 radv_image_queue_family_mask(iview->image,
1051 cmd_buffer->queue_family_index,
1052 cmd_buffer->queue_family_index)))
1053 return false;
1054
1055 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1056 clear_rect->rect.extent.width != iview->image->info.width ||
1057 clear_rect->rect.extent.height != iview->image->info.height)
1058 return false;
1059
1060 if (view_mask && (iview->image->info.array_size >= 32 ||
1061 (1u << iview->image->info.array_size) - 1u != view_mask))
1062 return false;
1063 if (!view_mask && clear_rect->baseArrayLayer != 0)
1064 return false;
1065 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1066 return false;
1067
1068 if (cmd_buffer->device->physical_device->rad_info.chip_class < GFX9 &&
1069 (!(aspects & VK_IMAGE_ASPECT_DEPTH_BIT) ||
1070 ((vk_format_aspects(iview->image->vk_format) & VK_IMAGE_ASPECT_STENCIL_BIT) &&
1071 !(aspects & VK_IMAGE_ASPECT_STENCIL_BIT))))
1072 return false;
1073
1074 if (((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) &&
1075 !radv_is_fast_clear_depth_allowed(clear_value)) ||
1076 ((aspects & VK_IMAGE_ASPECT_STENCIL_BIT) &&
1077 !radv_is_fast_clear_stencil_allowed(clear_value)))
1078 return false;
1079
1080 return true;
1081 }
1082
1083 static void
1084 radv_fast_clear_depth(struct radv_cmd_buffer *cmd_buffer,
1085 const struct radv_image_view *iview,
1086 const VkClearAttachment *clear_att,
1087 enum radv_cmd_flush_bits *pre_flush,
1088 enum radv_cmd_flush_bits *post_flush)
1089 {
1090 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1091 VkImageAspectFlags aspects = clear_att->aspectMask;
1092 uint32_t clear_word, flush_bits;
1093 uint32_t htile_mask;
1094
1095 clear_word = radv_get_htile_fast_clear_value(iview->image, clear_value);
1096 htile_mask = radv_get_htile_mask(iview->image, aspects);
1097
1098 if (pre_flush) {
1099 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_DB |
1100 RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) & ~ *pre_flush;
1101 *pre_flush |= cmd_buffer->state.flush_bits;
1102 }
1103
1104 if (htile_mask == UINT_MAX) {
1105 /* Clear the whole HTILE buffer. */
1106 flush_bits = radv_fill_buffer(cmd_buffer, iview->image->bo,
1107 iview->image->offset + iview->image->htile_offset,
1108 iview->image->planes[0].surface.htile_size, clear_word);
1109 } else {
1110 /* Only clear depth or stencil bytes in the HTILE buffer. */
1111 assert(cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9);
1112 flush_bits = clear_htile_mask(cmd_buffer, iview->image->bo,
1113 iview->image->offset + iview->image->htile_offset,
1114 iview->image->planes[0].surface.htile_size, clear_word,
1115 htile_mask);
1116 }
1117
1118 radv_update_ds_clear_metadata(cmd_buffer, iview, clear_value, aspects);
1119 if (post_flush) {
1120 *post_flush |= flush_bits;
1121 }
1122 }
1123
1124 static nir_shader *
1125 build_clear_htile_mask_shader()
1126 {
1127 nir_builder b;
1128
1129 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
1130 b.shader->info.name = ralloc_strdup(b.shader, "meta_clear_htile_mask");
1131 b.shader->info.cs.local_size[0] = 64;
1132 b.shader->info.cs.local_size[1] = 1;
1133 b.shader->info.cs.local_size[2] = 1;
1134
1135 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
1136 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
1137 nir_ssa_def *block_size = nir_imm_ivec4(&b,
1138 b.shader->info.cs.local_size[0],
1139 b.shader->info.cs.local_size[1],
1140 b.shader->info.cs.local_size[2], 0);
1141
1142 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
1143
1144 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
1145 offset = nir_channel(&b, offset, 0);
1146
1147 nir_intrinsic_instr *buf =
1148 nir_intrinsic_instr_create(b.shader,
1149 nir_intrinsic_vulkan_resource_index);
1150
1151 buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1152 buf->num_components = 1;
1153 nir_intrinsic_set_desc_set(buf, 0);
1154 nir_intrinsic_set_binding(buf, 0);
1155 nir_ssa_dest_init(&buf->instr, &buf->dest, buf->num_components, 32, NULL);
1156 nir_builder_instr_insert(&b, &buf->instr);
1157
1158 nir_intrinsic_instr *constants =
1159 nir_intrinsic_instr_create(b.shader,
1160 nir_intrinsic_load_push_constant);
1161 nir_intrinsic_set_base(constants, 0);
1162 nir_intrinsic_set_range(constants, 8);
1163 constants->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
1164 constants->num_components = 2;
1165 nir_ssa_dest_init(&constants->instr, &constants->dest, 2, 32, "constants");
1166 nir_builder_instr_insert(&b, &constants->instr);
1167
1168 nir_intrinsic_instr *load =
1169 nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_ssbo);
1170 load->src[0] = nir_src_for_ssa(&buf->dest.ssa);
1171 load->src[1] = nir_src_for_ssa(offset);
1172 nir_ssa_dest_init(&load->instr, &load->dest, 4, 32, NULL);
1173 load->num_components = 4;
1174 nir_builder_instr_insert(&b, &load->instr);
1175
1176 /* data = (data & ~htile_mask) | (htile_value & htile_mask) */
1177 nir_ssa_def *data =
1178 nir_iand(&b, &load->dest.ssa,
1179 nir_channel(&b, &constants->dest.ssa, 1));
1180 data = nir_ior(&b, data, nir_channel(&b, &constants->dest.ssa, 0));
1181
1182 nir_intrinsic_instr *store =
1183 nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
1184 store->src[0] = nir_src_for_ssa(data);
1185 store->src[1] = nir_src_for_ssa(&buf->dest.ssa);
1186 store->src[2] = nir_src_for_ssa(offset);
1187 nir_intrinsic_set_write_mask(store, 0xf);
1188 nir_intrinsic_set_access(store, ACCESS_NON_READABLE);
1189 store->num_components = 4;
1190 nir_builder_instr_insert(&b, &store->instr);
1191
1192 return b.shader;
1193 }
1194
1195 static VkResult
1196 init_meta_clear_htile_mask_state(struct radv_device *device)
1197 {
1198 struct radv_meta_state *state = &device->meta_state;
1199 struct radv_shader_module cs = { .nir = NULL };
1200 VkResult result;
1201
1202 cs.nir = build_clear_htile_mask_shader();
1203
1204 VkDescriptorSetLayoutCreateInfo ds_layout_info = {
1205 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1206 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
1207 .bindingCount = 1,
1208 .pBindings = (VkDescriptorSetLayoutBinding[]) {
1209 {
1210 .binding = 0,
1211 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
1212 .descriptorCount = 1,
1213 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
1214 .pImmutableSamplers = NULL
1215 },
1216 }
1217 };
1218
1219 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
1220 &ds_layout_info, &state->alloc,
1221 &state->clear_htile_mask_ds_layout);
1222 if (result != VK_SUCCESS)
1223 goto fail;
1224
1225 VkPipelineLayoutCreateInfo p_layout_info = {
1226 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1227 .setLayoutCount = 1,
1228 .pSetLayouts = &state->clear_htile_mask_ds_layout,
1229 .pushConstantRangeCount = 1,
1230 .pPushConstantRanges = &(VkPushConstantRange){
1231 VK_SHADER_STAGE_COMPUTE_BIT, 0, 8,
1232 },
1233 };
1234
1235 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
1236 &p_layout_info, &state->alloc,
1237 &state->clear_htile_mask_p_layout);
1238 if (result != VK_SUCCESS)
1239 goto fail;
1240
1241 VkPipelineShaderStageCreateInfo shader_stage = {
1242 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
1243 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
1244 .module = radv_shader_module_to_handle(&cs),
1245 .pName = "main",
1246 .pSpecializationInfo = NULL,
1247 };
1248
1249 VkComputePipelineCreateInfo pipeline_info = {
1250 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
1251 .stage = shader_stage,
1252 .flags = 0,
1253 .layout = state->clear_htile_mask_p_layout,
1254 };
1255
1256 result = radv_CreateComputePipelines(radv_device_to_handle(device),
1257 radv_pipeline_cache_to_handle(&state->cache),
1258 1, &pipeline_info, NULL,
1259 &state->clear_htile_mask_pipeline);
1260
1261 ralloc_free(cs.nir);
1262 return result;
1263 fail:
1264 ralloc_free(cs.nir);
1265 return result;
1266 }
1267
1268 VkResult
1269 radv_device_init_meta_clear_state(struct radv_device *device, bool on_demand)
1270 {
1271 VkResult res;
1272 struct radv_meta_state *state = &device->meta_state;
1273
1274 VkPipelineLayoutCreateInfo pl_color_create_info = {
1275 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1276 .setLayoutCount = 0,
1277 .pushConstantRangeCount = 1,
1278 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16},
1279 };
1280
1281 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1282 &pl_color_create_info,
1283 &device->meta_state.alloc,
1284 &device->meta_state.clear_color_p_layout);
1285 if (res != VK_SUCCESS)
1286 goto fail;
1287
1288 VkPipelineLayoutCreateInfo pl_depth_create_info = {
1289 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1290 .setLayoutCount = 0,
1291 .pushConstantRangeCount = 1,
1292 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
1293 };
1294
1295 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1296 &pl_depth_create_info,
1297 &device->meta_state.alloc,
1298 &device->meta_state.clear_depth_p_layout);
1299 if (res != VK_SUCCESS)
1300 goto fail;
1301
1302 VkPipelineLayoutCreateInfo pl_depth_unrestricted_create_info = {
1303 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1304 .setLayoutCount = 0,
1305 .pushConstantRangeCount = 1,
1306 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
1307 };
1308
1309 res = radv_CreatePipelineLayout(radv_device_to_handle(device),
1310 &pl_depth_unrestricted_create_info,
1311 &device->meta_state.alloc,
1312 &device->meta_state.clear_depth_unrestricted_p_layout);
1313 if (res != VK_SUCCESS)
1314 goto fail;
1315
1316 res = init_meta_clear_htile_mask_state(device);
1317 if (res != VK_SUCCESS)
1318 goto fail;
1319
1320 if (on_demand)
1321 return VK_SUCCESS;
1322
1323 for (uint32_t i = 0; i < ARRAY_SIZE(state->clear); ++i) {
1324 uint32_t samples = 1 << i;
1325 for (uint32_t j = 0; j < NUM_META_FS_KEYS; ++j) {
1326 VkFormat format = radv_fs_key_format_exemplars[j];
1327 unsigned fs_key = radv_format_meta_fs_key(format);
1328 assert(!state->clear[i].color_pipelines[fs_key]);
1329
1330 res = create_color_renderpass(device, format, samples,
1331 &state->clear[i].render_pass[fs_key]);
1332 if (res != VK_SUCCESS)
1333 goto fail;
1334
1335 res = create_color_pipeline(device, samples, 0, &state->clear[i].color_pipelines[fs_key],
1336 state->clear[i].render_pass[fs_key]);
1337 if (res != VK_SUCCESS)
1338 goto fail;
1339
1340 }
1341
1342 res = create_depthstencil_renderpass(device,
1343 samples,
1344 &state->clear[i].depthstencil_rp);
1345 if (res != VK_SUCCESS)
1346 goto fail;
1347
1348 for (uint32_t j = 0; j < NUM_DEPTH_CLEAR_PIPELINES; j++) {
1349 res = create_depthstencil_pipeline(device,
1350 VK_IMAGE_ASPECT_DEPTH_BIT,
1351 samples,
1352 j,
1353 false,
1354 &state->clear[i].depth_only_pipeline[j],
1355 state->clear[i].depthstencil_rp);
1356 if (res != VK_SUCCESS)
1357 goto fail;
1358
1359 res = create_depthstencil_pipeline(device,
1360 VK_IMAGE_ASPECT_STENCIL_BIT,
1361 samples,
1362 j,
1363 false,
1364 &state->clear[i].stencil_only_pipeline[j],
1365 state->clear[i].depthstencil_rp);
1366 if (res != VK_SUCCESS)
1367 goto fail;
1368
1369 res = create_depthstencil_pipeline(device,
1370 VK_IMAGE_ASPECT_DEPTH_BIT |
1371 VK_IMAGE_ASPECT_STENCIL_BIT,
1372 samples,
1373 j,
1374 false,
1375 &state->clear[i].depthstencil_pipeline[j],
1376 state->clear[i].depthstencil_rp);
1377 if (res != VK_SUCCESS)
1378 goto fail;
1379
1380 res = create_depthstencil_pipeline(device,
1381 VK_IMAGE_ASPECT_DEPTH_BIT,
1382 samples,
1383 j,
1384 true,
1385 &state->clear[i].depth_only_unrestricted_pipeline[j],
1386 state->clear[i].depthstencil_rp);
1387 if (res != VK_SUCCESS)
1388 goto fail;
1389
1390 res = create_depthstencil_pipeline(device,
1391 VK_IMAGE_ASPECT_STENCIL_BIT,
1392 samples,
1393 j,
1394 true,
1395 &state->clear[i].stencil_only_unrestricted_pipeline[j],
1396 state->clear[i].depthstencil_rp);
1397 if (res != VK_SUCCESS)
1398 goto fail;
1399
1400 res = create_depthstencil_pipeline(device,
1401 VK_IMAGE_ASPECT_DEPTH_BIT |
1402 VK_IMAGE_ASPECT_STENCIL_BIT,
1403 samples,
1404 j,
1405 true,
1406 &state->clear[i].depthstencil_unrestricted_pipeline[j],
1407 state->clear[i].depthstencil_rp);
1408 if (res != VK_SUCCESS)
1409 goto fail;
1410 }
1411 }
1412 return VK_SUCCESS;
1413
1414 fail:
1415 radv_device_finish_meta_clear_state(device);
1416 return res;
1417 }
1418
1419 static uint32_t
1420 radv_get_cmask_fast_clear_value(const struct radv_image *image)
1421 {
1422 uint32_t value = 0; /* Default value when no DCC. */
1423
1424 /* The fast-clear value is different for images that have both DCC and
1425 * CMASK metadata.
1426 */
1427 if (radv_image_has_dcc(image)) {
1428 /* DCC fast clear with MSAA should clear CMASK to 0xC. */
1429 return image->info.samples > 1 ? 0xcccccccc : 0xffffffff;
1430 }
1431
1432 return value;
1433 }
1434
1435 uint32_t
1436 radv_clear_cmask(struct radv_cmd_buffer *cmd_buffer,
1437 struct radv_image *image,
1438 const VkImageSubresourceRange *range, uint32_t value)
1439 {
1440 uint64_t offset = image->offset + image->cmask_offset;
1441 uint64_t size;
1442
1443 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1444 /* TODO: clear layers. */
1445 size = image->planes[0].surface.cmask_size;
1446 } else {
1447 unsigned cmask_slice_size =
1448 image->planes[0].surface.cmask_slice_size;
1449
1450 offset += cmask_slice_size * range->baseArrayLayer;
1451 size = cmask_slice_size * radv_get_layerCount(image, range);
1452 }
1453
1454 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1455 }
1456
1457
1458 uint32_t
1459 radv_clear_fmask(struct radv_cmd_buffer *cmd_buffer,
1460 struct radv_image *image,
1461 const VkImageSubresourceRange *range, uint32_t value)
1462 {
1463 uint64_t offset = image->offset + image->fmask_offset;
1464 uint64_t size;
1465
1466 /* MSAA images do not support mipmap levels. */
1467 assert(range->baseMipLevel == 0 &&
1468 radv_get_levelCount(image, range) == 1);
1469
1470 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1471 /* TODO: clear layers. */
1472 size = image->planes[0].surface.fmask_size;
1473 } else {
1474 unsigned fmask_slice_size =
1475 image->planes[0].surface.u.legacy.fmask.slice_size;
1476
1477
1478 offset += fmask_slice_size * range->baseArrayLayer;
1479 size = fmask_slice_size * radv_get_layerCount(image, range);
1480 }
1481
1482 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1483 }
1484
1485 uint32_t
1486 radv_clear_dcc(struct radv_cmd_buffer *cmd_buffer,
1487 struct radv_image *image,
1488 const VkImageSubresourceRange *range, uint32_t value)
1489 {
1490 uint32_t level_count = radv_get_levelCount(image, range);
1491 uint32_t flush_bits = 0;
1492
1493 /* Mark the image as being compressed. */
1494 radv_update_dcc_metadata(cmd_buffer, image, range, true);
1495
1496 for (uint32_t l = 0; l < level_count; l++) {
1497 uint64_t offset = image->offset + image->dcc_offset;
1498 uint32_t level = range->baseMipLevel + l;
1499 uint64_t size;
1500
1501 if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX9) {
1502 /* Mipmap levels aren't implemented. */
1503 assert(level == 0);
1504 size = image->planes[0].surface.dcc_size;
1505 } else {
1506 const struct legacy_surf_level *surf_level =
1507 &image->planes[0].surface.u.legacy.level[level];
1508
1509 /* If dcc_fast_clear_size is 0 (which might happens for
1510 * mipmaps) the fill buffer operation below is a no-op.
1511 * This can only happen during initialization as the
1512 * fast clear path fallbacks to slow clears if one
1513 * level can't be fast cleared.
1514 */
1515 offset += surf_level->dcc_offset +
1516 surf_level->dcc_slice_fast_clear_size * range->baseArrayLayer;
1517 size = surf_level->dcc_slice_fast_clear_size * radv_get_layerCount(image, range);
1518 }
1519
1520 flush_bits |= radv_fill_buffer(cmd_buffer, image->bo, offset,
1521 size, value);
1522 }
1523
1524 return flush_bits;
1525 }
1526
1527 uint32_t
1528 radv_clear_htile(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image,
1529 const VkImageSubresourceRange *range, uint32_t value)
1530 {
1531 unsigned layer_count = radv_get_layerCount(image, range);
1532 uint64_t size = image->planes[0].surface.htile_slice_size * layer_count;
1533 uint64_t offset = image->offset + image->htile_offset +
1534 image->planes[0].surface.htile_slice_size * range->baseArrayLayer;
1535
1536 return radv_fill_buffer(cmd_buffer, image->bo, offset, size, value);
1537 }
1538
1539 enum {
1540 RADV_DCC_CLEAR_REG = 0x20202020U,
1541 RADV_DCC_CLEAR_MAIN_1 = 0x80808080U,
1542 RADV_DCC_CLEAR_SECONDARY_1 = 0x40404040U
1543 };
1544
1545 static void vi_get_fast_clear_parameters(struct radv_device *device,
1546 VkFormat image_format,
1547 VkFormat view_format,
1548 const VkClearColorValue *clear_value,
1549 uint32_t* reset_value,
1550 bool *can_avoid_fast_clear_elim)
1551 {
1552 bool values[4] = {};
1553 int extra_channel;
1554 bool main_value = false;
1555 bool extra_value = false;
1556 bool has_color = false;
1557 bool has_alpha = false;
1558 int i;
1559 *can_avoid_fast_clear_elim = false;
1560
1561 *reset_value = RADV_DCC_CLEAR_REG;
1562
1563 const struct vk_format_description *desc = vk_format_description(view_format);
1564 if (view_format == VK_FORMAT_B10G11R11_UFLOAT_PACK32 ||
1565 view_format == VK_FORMAT_R5G6B5_UNORM_PACK16 ||
1566 view_format == VK_FORMAT_B5G6R5_UNORM_PACK16)
1567 extra_channel = -1;
1568 else if (desc->layout == VK_FORMAT_LAYOUT_PLAIN) {
1569 if (vi_alpha_is_on_msb(device, view_format))
1570 extra_channel = desc->nr_channels - 1;
1571 else
1572 extra_channel = 0;
1573 } else
1574 return;
1575
1576 for (i = 0; i < 4; i++) {
1577 int index = desc->swizzle[i] - VK_SWIZZLE_X;
1578 if (desc->swizzle[i] < VK_SWIZZLE_X ||
1579 desc->swizzle[i] > VK_SWIZZLE_W)
1580 continue;
1581
1582 if (desc->channel[i].pure_integer &&
1583 desc->channel[i].type == VK_FORMAT_TYPE_SIGNED) {
1584 /* Use the maximum value for clamping the clear color. */
1585 int max = u_bit_consecutive(0, desc->channel[i].size - 1);
1586
1587 values[i] = clear_value->int32[i] != 0;
1588 if (clear_value->int32[i] != 0 && MIN2(clear_value->int32[i], max) != max)
1589 return;
1590 } else if (desc->channel[i].pure_integer &&
1591 desc->channel[i].type == VK_FORMAT_TYPE_UNSIGNED) {
1592 /* Use the maximum value for clamping the clear color. */
1593 unsigned max = u_bit_consecutive(0, desc->channel[i].size);
1594
1595 values[i] = clear_value->uint32[i] != 0U;
1596 if (clear_value->uint32[i] != 0U && MIN2(clear_value->uint32[i], max) != max)
1597 return;
1598 } else {
1599 values[i] = clear_value->float32[i] != 0.0F;
1600 if (clear_value->float32[i] != 0.0F && clear_value->float32[i] != 1.0F)
1601 return;
1602 }
1603
1604 if (index == extra_channel) {
1605 extra_value = values[i];
1606 has_alpha = true;
1607 } else {
1608 main_value = values[i];
1609 has_color = true;
1610 }
1611 }
1612
1613 /* If alpha isn't present, make it the same as color, and vice versa. */
1614 if (!has_alpha)
1615 extra_value = main_value;
1616 else if (!has_color)
1617 main_value = extra_value;
1618
1619 for (int i = 0; i < 4; ++i)
1620 if (values[i] != main_value &&
1621 desc->swizzle[i] - VK_SWIZZLE_X != extra_channel &&
1622 desc->swizzle[i] >= VK_SWIZZLE_X &&
1623 desc->swizzle[i] <= VK_SWIZZLE_W)
1624 return;
1625
1626 *can_avoid_fast_clear_elim = true;
1627 *reset_value = 0;
1628 if (main_value)
1629 *reset_value |= RADV_DCC_CLEAR_MAIN_1;
1630
1631 if (extra_value)
1632 *reset_value |= RADV_DCC_CLEAR_SECONDARY_1;
1633 return;
1634 }
1635
1636 static bool
1637 radv_can_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1638 const struct radv_image_view *iview,
1639 VkImageLayout image_layout,
1640 bool in_render_loop,
1641 const VkClearRect *clear_rect,
1642 VkClearColorValue clear_value,
1643 uint32_t view_mask)
1644 {
1645 uint32_t clear_color[2];
1646
1647 if (!radv_image_view_can_fast_clear(cmd_buffer->device, iview))
1648 return false;
1649
1650 if (!radv_layout_can_fast_clear(iview->image, image_layout, in_render_loop,
1651 radv_image_queue_family_mask(iview->image,
1652 cmd_buffer->queue_family_index,
1653 cmd_buffer->queue_family_index)))
1654 return false;
1655
1656 if (clear_rect->rect.offset.x || clear_rect->rect.offset.y ||
1657 clear_rect->rect.extent.width != iview->image->info.width ||
1658 clear_rect->rect.extent.height != iview->image->info.height)
1659 return false;
1660
1661 if (view_mask && (iview->image->info.array_size >= 32 ||
1662 (1u << iview->image->info.array_size) - 1u != view_mask))
1663 return false;
1664 if (!view_mask && clear_rect->baseArrayLayer != 0)
1665 return false;
1666 if (!view_mask && clear_rect->layerCount != iview->image->info.array_size)
1667 return false;
1668
1669 /* DCC */
1670 if (!radv_format_pack_clear_color(iview->vk_format,
1671 clear_color, &clear_value))
1672 return false;
1673
1674 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1675 bool can_avoid_fast_clear_elim;
1676 uint32_t reset_value;
1677
1678 vi_get_fast_clear_parameters(cmd_buffer->device,
1679 iview->image->vk_format,
1680 iview->vk_format,
1681 &clear_value, &reset_value,
1682 &can_avoid_fast_clear_elim);
1683
1684 if (iview->image->info.samples > 1) {
1685 /* DCC fast clear with MSAA should clear CMASK. */
1686 /* FIXME: This doesn't work for now. There is a
1687 * hardware bug with fast clears and DCC for MSAA
1688 * textures. AMDVLK has a workaround but it doesn't
1689 * seem to work here. Note that we might emit useless
1690 * CB flushes but that shouldn't matter.
1691 */
1692 if (!can_avoid_fast_clear_elim)
1693 return false;
1694 }
1695
1696 if (iview->image->info.levels > 1 &&
1697 cmd_buffer->device->physical_device->rad_info.chip_class == GFX8) {
1698 for (uint32_t l = 0; l < iview->level_count; l++) {
1699 uint32_t level = iview->base_mip + l;
1700 struct legacy_surf_level *surf_level =
1701 &iview->image->planes[0].surface.u.legacy.level[level];
1702
1703 /* Do not fast clears if one level can't be
1704 * fast cleared.
1705 */
1706 if (!surf_level->dcc_fast_clear_size)
1707 return false;
1708 }
1709 }
1710 }
1711
1712 return true;
1713 }
1714
1715
1716 static void
1717 radv_fast_clear_color(struct radv_cmd_buffer *cmd_buffer,
1718 const struct radv_image_view *iview,
1719 const VkClearAttachment *clear_att,
1720 uint32_t subpass_att,
1721 enum radv_cmd_flush_bits *pre_flush,
1722 enum radv_cmd_flush_bits *post_flush)
1723 {
1724 VkClearColorValue clear_value = clear_att->clearValue.color;
1725 uint32_t clear_color[2], flush_bits = 0;
1726 uint32_t cmask_clear_value;
1727 VkImageSubresourceRange range = {
1728 .aspectMask = iview->aspect_mask,
1729 .baseMipLevel = iview->base_mip,
1730 .levelCount = iview->level_count,
1731 .baseArrayLayer = iview->base_layer,
1732 .layerCount = iview->layer_count,
1733 };
1734
1735 if (pre_flush) {
1736 cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_CB |
1737 RADV_CMD_FLAG_FLUSH_AND_INV_CB_META) & ~ *pre_flush;
1738 *pre_flush |= cmd_buffer->state.flush_bits;
1739 }
1740
1741 /* DCC */
1742 radv_format_pack_clear_color(iview->vk_format, clear_color, &clear_value);
1743
1744 cmask_clear_value = radv_get_cmask_fast_clear_value(iview->image);
1745
1746 /* clear cmask buffer */
1747 if (radv_dcc_enabled(iview->image, iview->base_mip)) {
1748 uint32_t reset_value;
1749 bool can_avoid_fast_clear_elim;
1750 bool need_decompress_pass = false;
1751
1752 vi_get_fast_clear_parameters(cmd_buffer->device,
1753 iview->image->vk_format,
1754 iview->vk_format,
1755 &clear_value, &reset_value,
1756 &can_avoid_fast_clear_elim);
1757
1758 if (radv_image_has_cmask(iview->image)) {
1759 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1760 &range, cmask_clear_value);
1761
1762 need_decompress_pass = true;
1763 }
1764
1765 if (!can_avoid_fast_clear_elim)
1766 need_decompress_pass = true;
1767
1768 flush_bits |= radv_clear_dcc(cmd_buffer, iview->image, &range,
1769 reset_value);
1770
1771 radv_update_fce_metadata(cmd_buffer, iview->image, &range,
1772 need_decompress_pass);
1773 } else {
1774 flush_bits = radv_clear_cmask(cmd_buffer, iview->image,
1775 &range, cmask_clear_value);
1776 }
1777
1778 if (post_flush) {
1779 *post_flush |= flush_bits;
1780 }
1781
1782 radv_update_color_clear_metadata(cmd_buffer, iview, subpass_att,
1783 clear_color);
1784 }
1785
1786 /**
1787 * The parameters mean that same as those in vkCmdClearAttachments.
1788 */
1789 static void
1790 emit_clear(struct radv_cmd_buffer *cmd_buffer,
1791 const VkClearAttachment *clear_att,
1792 const VkClearRect *clear_rect,
1793 enum radv_cmd_flush_bits *pre_flush,
1794 enum radv_cmd_flush_bits *post_flush,
1795 uint32_t view_mask,
1796 bool ds_resolve_clear)
1797 {
1798 const struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
1799 const struct radv_subpass *subpass = cmd_buffer->state.subpass;
1800 VkImageAspectFlags aspects = clear_att->aspectMask;
1801
1802 if (aspects & VK_IMAGE_ASPECT_COLOR_BIT) {
1803 const uint32_t subpass_att = clear_att->colorAttachment;
1804 assert(subpass_att < subpass->color_count);
1805 const uint32_t pass_att = subpass->color_attachments[subpass_att].attachment;
1806 if (pass_att == VK_ATTACHMENT_UNUSED)
1807 return;
1808
1809 VkImageLayout image_layout = subpass->color_attachments[subpass_att].layout;
1810 bool in_render_loop = subpass->color_attachments[subpass_att].in_render_loop;
1811 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[pass_att].iview : NULL;
1812 VkClearColorValue clear_value = clear_att->clearValue.color;
1813
1814 if (radv_can_fast_clear_color(cmd_buffer, iview, image_layout, in_render_loop,
1815 clear_rect, clear_value, view_mask)) {
1816 radv_fast_clear_color(cmd_buffer, iview, clear_att,
1817 subpass_att, pre_flush,
1818 post_flush);
1819 } else {
1820 emit_color_clear(cmd_buffer, clear_att, clear_rect, view_mask);
1821 }
1822 } else {
1823 struct radv_subpass_attachment *ds_att = subpass->depth_stencil_attachment;
1824
1825 if (ds_resolve_clear)
1826 ds_att = subpass->ds_resolve_attachment;
1827
1828 if (!ds_att || ds_att->attachment == VK_ATTACHMENT_UNUSED)
1829 return;
1830
1831 VkImageLayout image_layout = ds_att->layout;
1832 bool in_render_loop = ds_att->in_render_loop;
1833 const struct radv_image_view *iview = fb ? cmd_buffer->state.attachments[ds_att->attachment].iview : NULL;
1834 VkClearDepthStencilValue clear_value = clear_att->clearValue.depthStencil;
1835
1836 assert(aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
1837 VK_IMAGE_ASPECT_STENCIL_BIT));
1838
1839 if (radv_can_fast_clear_depth(cmd_buffer, iview, image_layout,
1840 in_render_loop, aspects, clear_rect,
1841 clear_value, view_mask)) {
1842 radv_fast_clear_depth(cmd_buffer, iview, clear_att,
1843 pre_flush, post_flush);
1844 } else {
1845 emit_depthstencil_clear(cmd_buffer, clear_att, clear_rect,
1846 ds_att, view_mask);
1847 }
1848 }
1849 }
1850
1851 static inline bool
1852 radv_attachment_needs_clear(struct radv_cmd_state *cmd_state, uint32_t a)
1853 {
1854 uint32_t view_mask = cmd_state->subpass->view_mask;
1855 return (a != VK_ATTACHMENT_UNUSED &&
1856 cmd_state->attachments[a].pending_clear_aspects &&
1857 (!view_mask || (view_mask & ~cmd_state->attachments[a].cleared_views)));
1858 }
1859
1860 static bool
1861 radv_subpass_needs_clear(struct radv_cmd_buffer *cmd_buffer)
1862 {
1863 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1864 uint32_t a;
1865
1866 if (!cmd_state->subpass)
1867 return false;
1868
1869 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1870 a = cmd_state->subpass->color_attachments[i].attachment;
1871 if (radv_attachment_needs_clear(cmd_state, a))
1872 return true;
1873 }
1874
1875 if (cmd_state->subpass->depth_stencil_attachment) {
1876 a = cmd_state->subpass->depth_stencil_attachment->attachment;
1877 if (radv_attachment_needs_clear(cmd_state, a))
1878 return true;
1879 }
1880
1881 if (!cmd_state->subpass->ds_resolve_attachment)
1882 return false;
1883
1884 a = cmd_state->subpass->ds_resolve_attachment->attachment;
1885 return radv_attachment_needs_clear(cmd_state, a);
1886 }
1887
1888 static void
1889 radv_subpass_clear_attachment(struct radv_cmd_buffer *cmd_buffer,
1890 struct radv_attachment_state *attachment,
1891 const VkClearAttachment *clear_att,
1892 enum radv_cmd_flush_bits *pre_flush,
1893 enum radv_cmd_flush_bits *post_flush,
1894 bool ds_resolve_clear)
1895 {
1896 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1897 uint32_t view_mask = cmd_state->subpass->view_mask;
1898
1899 VkClearRect clear_rect = {
1900 .rect = cmd_state->render_area,
1901 .baseArrayLayer = 0,
1902 .layerCount = cmd_state->framebuffer->layers,
1903 };
1904
1905 emit_clear(cmd_buffer, clear_att, &clear_rect, pre_flush, post_flush,
1906 view_mask & ~attachment->cleared_views, ds_resolve_clear);
1907 if (view_mask)
1908 attachment->cleared_views |= view_mask;
1909 else
1910 attachment->pending_clear_aspects = 0;
1911 }
1912
1913 /**
1914 * Emit any pending attachment clears for the current subpass.
1915 *
1916 * @see radv_attachment_state::pending_clear_aspects
1917 */
1918 void
1919 radv_cmd_buffer_clear_subpass(struct radv_cmd_buffer *cmd_buffer)
1920 {
1921 struct radv_cmd_state *cmd_state = &cmd_buffer->state;
1922 struct radv_meta_saved_state saved_state;
1923 enum radv_cmd_flush_bits pre_flush = 0;
1924 enum radv_cmd_flush_bits post_flush = 0;
1925
1926 if (!radv_subpass_needs_clear(cmd_buffer))
1927 return;
1928
1929 radv_meta_save(&saved_state, cmd_buffer,
1930 RADV_META_SAVE_GRAPHICS_PIPELINE |
1931 RADV_META_SAVE_CONSTANTS);
1932
1933 for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
1934 uint32_t a = cmd_state->subpass->color_attachments[i].attachment;
1935
1936 if (!radv_attachment_needs_clear(cmd_state, a))
1937 continue;
1938
1939 assert(cmd_state->attachments[a].pending_clear_aspects ==
1940 VK_IMAGE_ASPECT_COLOR_BIT);
1941
1942 VkClearAttachment clear_att = {
1943 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1944 .colorAttachment = i, /* Use attachment index relative to subpass */
1945 .clearValue = cmd_state->attachments[a].clear_value,
1946 };
1947
1948 radv_subpass_clear_attachment(cmd_buffer,
1949 &cmd_state->attachments[a],
1950 &clear_att, &pre_flush,
1951 &post_flush, false);
1952 }
1953
1954 if (cmd_state->subpass->depth_stencil_attachment) {
1955 uint32_t ds = cmd_state->subpass->depth_stencil_attachment->attachment;
1956 if (radv_attachment_needs_clear(cmd_state, ds)) {
1957 VkClearAttachment clear_att = {
1958 .aspectMask = cmd_state->attachments[ds].pending_clear_aspects,
1959 .clearValue = cmd_state->attachments[ds].clear_value,
1960 };
1961
1962 radv_subpass_clear_attachment(cmd_buffer,
1963 &cmd_state->attachments[ds],
1964 &clear_att, &pre_flush,
1965 &post_flush, false);
1966 }
1967 }
1968
1969 if (cmd_state->subpass->ds_resolve_attachment) {
1970 uint32_t ds_resolve = cmd_state->subpass->ds_resolve_attachment->attachment;
1971 if (radv_attachment_needs_clear(cmd_state, ds_resolve)) {
1972 VkClearAttachment clear_att = {
1973 .aspectMask = cmd_state->attachments[ds_resolve].pending_clear_aspects,
1974 .clearValue = cmd_state->attachments[ds_resolve].clear_value,
1975 };
1976
1977 radv_subpass_clear_attachment(cmd_buffer,
1978 &cmd_state->attachments[ds_resolve],
1979 &clear_att, &pre_flush,
1980 &post_flush, true);
1981 }
1982 }
1983
1984 radv_meta_restore(&saved_state, cmd_buffer);
1985 cmd_buffer->state.flush_bits |= post_flush;
1986 }
1987
1988 static void
1989 radv_clear_image_layer(struct radv_cmd_buffer *cmd_buffer,
1990 struct radv_image *image,
1991 VkImageLayout image_layout,
1992 const VkImageSubresourceRange *range,
1993 VkFormat format, int level, int layer,
1994 const VkClearValue *clear_val)
1995 {
1996 VkDevice device_h = radv_device_to_handle(cmd_buffer->device);
1997 struct radv_image_view iview;
1998 uint32_t width = radv_minify(image->info.width, range->baseMipLevel + level);
1999 uint32_t height = radv_minify(image->info.height, range->baseMipLevel + level);
2000
2001 radv_image_view_init(&iview, cmd_buffer->device,
2002 &(VkImageViewCreateInfo) {
2003 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
2004 .image = radv_image_to_handle(image),
2005 .viewType = radv_meta_get_view_type(image),
2006 .format = format,
2007 .subresourceRange = {
2008 .aspectMask = range->aspectMask,
2009 .baseMipLevel = range->baseMipLevel + level,
2010 .levelCount = 1,
2011 .baseArrayLayer = range->baseArrayLayer + layer,
2012 .layerCount = 1
2013 },
2014 }, NULL);
2015
2016 VkFramebuffer fb;
2017 radv_CreateFramebuffer(device_h,
2018 &(VkFramebufferCreateInfo) {
2019 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
2020 .attachmentCount = 1,
2021 .pAttachments = (VkImageView[]) {
2022 radv_image_view_to_handle(&iview),
2023 },
2024 .width = width,
2025 .height = height,
2026 .layers = 1
2027 },
2028 &cmd_buffer->pool->alloc,
2029 &fb);
2030
2031 VkAttachmentDescription att_desc = {
2032 .format = iview.vk_format,
2033 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
2034 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
2035 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
2036 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE,
2037 .initialLayout = image_layout,
2038 .finalLayout = image_layout,
2039 };
2040
2041 VkSubpassDescription subpass_desc = {
2042 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
2043 .inputAttachmentCount = 0,
2044 .colorAttachmentCount = 0,
2045 .pColorAttachments = NULL,
2046 .pResolveAttachments = NULL,
2047 .pDepthStencilAttachment = NULL,
2048 .preserveAttachmentCount = 0,
2049 .pPreserveAttachments = NULL,
2050 };
2051
2052 const VkAttachmentReference att_ref = {
2053 .attachment = 0,
2054 .layout = image_layout,
2055 };
2056
2057 if (range->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
2058 subpass_desc.colorAttachmentCount = 1;
2059 subpass_desc.pColorAttachments = &att_ref;
2060 } else {
2061 subpass_desc.pDepthStencilAttachment = &att_ref;
2062 }
2063
2064 VkRenderPass pass;
2065 radv_CreateRenderPass(device_h,
2066 &(VkRenderPassCreateInfo) {
2067 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2068 .attachmentCount = 1,
2069 .pAttachments = &att_desc,
2070 .subpassCount = 1,
2071 .pSubpasses = &subpass_desc,
2072 },
2073 &cmd_buffer->pool->alloc,
2074 &pass);
2075
2076 radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
2077 &(VkRenderPassBeginInfo) {
2078 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
2079 .renderArea = {
2080 .offset = { 0, 0, },
2081 .extent = {
2082 .width = width,
2083 .height = height,
2084 },
2085 },
2086 .renderPass = pass,
2087 .framebuffer = fb,
2088 .clearValueCount = 0,
2089 .pClearValues = NULL,
2090 },
2091 VK_SUBPASS_CONTENTS_INLINE);
2092
2093 VkClearAttachment clear_att = {
2094 .aspectMask = range->aspectMask,
2095 .colorAttachment = 0,
2096 .clearValue = *clear_val,
2097 };
2098
2099 VkClearRect clear_rect = {
2100 .rect = {
2101 .offset = { 0, 0 },
2102 .extent = { width, height },
2103 },
2104 .baseArrayLayer = range->baseArrayLayer,
2105 .layerCount = 1, /* FINISHME: clear multi-layer framebuffer */
2106 };
2107
2108 emit_clear(cmd_buffer, &clear_att, &clear_rect, NULL, NULL, 0, false);
2109
2110 radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
2111 radv_DestroyRenderPass(device_h, pass,
2112 &cmd_buffer->pool->alloc);
2113 radv_DestroyFramebuffer(device_h, fb,
2114 &cmd_buffer->pool->alloc);
2115 }
2116
2117 /**
2118 * Return TRUE if a fast color or depth clear has been performed.
2119 */
2120 static bool
2121 radv_fast_clear_range(struct radv_cmd_buffer *cmd_buffer,
2122 struct radv_image *image,
2123 VkFormat format,
2124 VkImageLayout image_layout,
2125 bool in_render_loop,
2126 const VkImageSubresourceRange *range,
2127 const VkClearValue *clear_val)
2128 {
2129 struct radv_image_view iview;
2130
2131 radv_image_view_init(&iview, cmd_buffer->device,
2132 &(VkImageViewCreateInfo) {
2133 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
2134 .image = radv_image_to_handle(image),
2135 .viewType = radv_meta_get_view_type(image),
2136 .format = image->vk_format,
2137 .subresourceRange = {
2138 .aspectMask = range->aspectMask,
2139 .baseMipLevel = range->baseMipLevel,
2140 .levelCount = range->levelCount,
2141 .baseArrayLayer = range->baseArrayLayer,
2142 .layerCount = range->layerCount,
2143 },
2144 }, NULL);
2145
2146 VkClearRect clear_rect = {
2147 .rect = {
2148 .offset = { 0, 0 },
2149 .extent = {
2150 radv_minify(image->info.width, range->baseMipLevel),
2151 radv_minify(image->info.height, range->baseMipLevel),
2152 },
2153 },
2154 .baseArrayLayer = range->baseArrayLayer,
2155 .layerCount = range->layerCount,
2156 };
2157
2158 VkClearAttachment clear_att = {
2159 .aspectMask = range->aspectMask,
2160 .colorAttachment = 0,
2161 .clearValue = *clear_val,
2162 };
2163
2164 if (vk_format_is_color(format)) {
2165 if (radv_can_fast_clear_color(cmd_buffer, &iview, image_layout,
2166 in_render_loop, &clear_rect,
2167 clear_att.clearValue.color, 0)) {
2168 radv_fast_clear_color(cmd_buffer, &iview, &clear_att,
2169 clear_att.colorAttachment,
2170 NULL, NULL);
2171 return true;
2172 }
2173 } else {
2174 if (radv_can_fast_clear_depth(cmd_buffer, &iview, image_layout,
2175 in_render_loop,range->aspectMask,
2176 &clear_rect, clear_att.clearValue.depthStencil,
2177 0)) {
2178 radv_fast_clear_depth(cmd_buffer, &iview, &clear_att,
2179 NULL, NULL);
2180 return true;
2181 }
2182 }
2183
2184 return false;
2185 }
2186
2187 static void
2188 radv_cmd_clear_image(struct radv_cmd_buffer *cmd_buffer,
2189 struct radv_image *image,
2190 VkImageLayout image_layout,
2191 const VkClearValue *clear_value,
2192 uint32_t range_count,
2193 const VkImageSubresourceRange *ranges,
2194 bool cs)
2195 {
2196 VkFormat format = image->vk_format;
2197 VkClearValue internal_clear_value = *clear_value;
2198
2199 if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
2200 uint32_t value;
2201 format = VK_FORMAT_R32_UINT;
2202 value = float3_to_rgb9e5(clear_value->color.float32);
2203 internal_clear_value.color.uint32[0] = value;
2204 }
2205
2206 if (format == VK_FORMAT_R4G4_UNORM_PACK8) {
2207 uint8_t r, g;
2208 format = VK_FORMAT_R8_UINT;
2209 r = float_to_ubyte(clear_value->color.float32[0]) >> 4;
2210 g = float_to_ubyte(clear_value->color.float32[1]) >> 4;
2211 internal_clear_value.color.uint32[0] = (r << 4) | (g & 0xf);
2212 }
2213
2214 if (format == VK_FORMAT_R32G32B32_UINT ||
2215 format == VK_FORMAT_R32G32B32_SINT ||
2216 format == VK_FORMAT_R32G32B32_SFLOAT)
2217 cs = true;
2218
2219 for (uint32_t r = 0; r < range_count; r++) {
2220 const VkImageSubresourceRange *range = &ranges[r];
2221
2222 /* Try to perform a fast clear first, otherwise fallback to
2223 * the legacy path.
2224 */
2225 if (!cs &&
2226 radv_fast_clear_range(cmd_buffer, image, format,
2227 image_layout, false, range,
2228 &internal_clear_value)) {
2229 continue;
2230 }
2231
2232 for (uint32_t l = 0; l < radv_get_levelCount(image, range); ++l) {
2233 const uint32_t layer_count = image->type == VK_IMAGE_TYPE_3D ?
2234 radv_minify(image->info.depth, range->baseMipLevel + l) :
2235 radv_get_layerCount(image, range);
2236 for (uint32_t s = 0; s < layer_count; ++s) {
2237
2238 if (cs) {
2239 struct radv_meta_blit2d_surf surf;
2240 surf.format = format;
2241 surf.image = image;
2242 surf.level = range->baseMipLevel + l;
2243 surf.layer = range->baseArrayLayer + s;
2244 surf.aspect_mask = range->aspectMask;
2245 radv_meta_clear_image_cs(cmd_buffer, &surf,
2246 &internal_clear_value.color);
2247 } else {
2248 radv_clear_image_layer(cmd_buffer, image, image_layout,
2249 range, format, l, s, &internal_clear_value);
2250 }
2251 }
2252 }
2253 }
2254 }
2255
2256 void radv_CmdClearColorImage(
2257 VkCommandBuffer commandBuffer,
2258 VkImage image_h,
2259 VkImageLayout imageLayout,
2260 const VkClearColorValue* pColor,
2261 uint32_t rangeCount,
2262 const VkImageSubresourceRange* pRanges)
2263 {
2264 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2265 RADV_FROM_HANDLE(radv_image, image, image_h);
2266 struct radv_meta_saved_state saved_state;
2267 bool cs = cmd_buffer->queue_family_index == RADV_QUEUE_COMPUTE;
2268
2269 if (cs) {
2270 radv_meta_save(&saved_state, cmd_buffer,
2271 RADV_META_SAVE_COMPUTE_PIPELINE |
2272 RADV_META_SAVE_CONSTANTS |
2273 RADV_META_SAVE_DESCRIPTORS);
2274 } else {
2275 radv_meta_save(&saved_state, cmd_buffer,
2276 RADV_META_SAVE_GRAPHICS_PIPELINE |
2277 RADV_META_SAVE_CONSTANTS);
2278 }
2279
2280 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2281 (const VkClearValue *) pColor,
2282 rangeCount, pRanges, cs);
2283
2284 radv_meta_restore(&saved_state, cmd_buffer);
2285 }
2286
2287 void radv_CmdClearDepthStencilImage(
2288 VkCommandBuffer commandBuffer,
2289 VkImage image_h,
2290 VkImageLayout imageLayout,
2291 const VkClearDepthStencilValue* pDepthStencil,
2292 uint32_t rangeCount,
2293 const VkImageSubresourceRange* pRanges)
2294 {
2295 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2296 RADV_FROM_HANDLE(radv_image, image, image_h);
2297 struct radv_meta_saved_state saved_state;
2298
2299 radv_meta_save(&saved_state, cmd_buffer,
2300 RADV_META_SAVE_GRAPHICS_PIPELINE |
2301 RADV_META_SAVE_CONSTANTS);
2302
2303 radv_cmd_clear_image(cmd_buffer, image, imageLayout,
2304 (const VkClearValue *) pDepthStencil,
2305 rangeCount, pRanges, false);
2306
2307 radv_meta_restore(&saved_state, cmd_buffer);
2308 }
2309
2310 void radv_CmdClearAttachments(
2311 VkCommandBuffer commandBuffer,
2312 uint32_t attachmentCount,
2313 const VkClearAttachment* pAttachments,
2314 uint32_t rectCount,
2315 const VkClearRect* pRects)
2316 {
2317 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
2318 struct radv_meta_saved_state saved_state;
2319 enum radv_cmd_flush_bits pre_flush = 0;
2320 enum radv_cmd_flush_bits post_flush = 0;
2321
2322 if (!cmd_buffer->state.subpass)
2323 return;
2324
2325 radv_meta_save(&saved_state, cmd_buffer,
2326 RADV_META_SAVE_GRAPHICS_PIPELINE |
2327 RADV_META_SAVE_CONSTANTS);
2328
2329 /* FINISHME: We can do better than this dumb loop. It thrashes too much
2330 * state.
2331 */
2332 for (uint32_t a = 0; a < attachmentCount; ++a) {
2333 for (uint32_t r = 0; r < rectCount; ++r) {
2334 emit_clear(cmd_buffer, &pAttachments[a], &pRects[r], &pre_flush, &post_flush,
2335 cmd_buffer->state.subpass->view_mask, false);
2336 }
2337 }
2338
2339 radv_meta_restore(&saved_state, cmd_buffer);
2340 cmd_buffer->state.flush_bits |= post_flush;
2341 }