2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
28 * Compute shader implementation of image->buffer copy.
32 build_nir_itob_compute_shader(struct radv_device
*dev
)
35 const struct glsl_type
*sampler_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
39 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
43 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
44 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itob_cs");
45 b
.shader
->info
->cs
.local_size
[0] = 16;
46 b
.shader
->info
->cs
.local_size
[1] = 16;
47 b
.shader
->info
->cs
.local_size
[2] = 1;
48 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
49 sampler_type
, "s_tex");
50 input_img
->data
.descriptor_set
= 0;
51 input_img
->data
.binding
= 0;
53 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
55 output_img
->data
.descriptor_set
= 0;
56 output_img
->data
.binding
= 1;
58 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
59 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
60 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
61 b
.shader
->info
->cs
.local_size
[0],
62 b
.shader
->info
->cs
.local_size
[1],
63 b
.shader
->info
->cs
.local_size
[2], 0);
65 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
69 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
70 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
71 offset
->num_components
= 2;
72 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
73 nir_builder_instr_insert(&b
, &offset
->instr
);
75 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
76 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
77 stride
->num_components
= 1;
78 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
79 nir_builder_instr_insert(&b
, &stride
->instr
);
81 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
83 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
84 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
85 tex
->op
= nir_texop_txf
;
86 tex
->src
[0].src_type
= nir_tex_src_coord
;
87 tex
->src
[0].src
= nir_src_for_ssa(img_coord
);
88 tex
->src
[1].src_type
= nir_tex_src_lod
;
89 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
90 tex
->dest_type
= nir_type_float
;
91 tex
->is_array
= false;
92 tex
->coord_components
= 2;
93 tex
->texture
= nir_deref_var_create(tex
, input_img
);
96 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
97 nir_builder_instr_insert(&b
, &tex
->instr
);
99 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
100 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
102 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
103 tmp
= nir_iadd(&b
, tmp
, pos_x
);
105 nir_ssa_def
*coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
107 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
108 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
109 store
->src
[0] = nir_src_for_ssa(coord
);
110 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
111 store
->src
[2] = nir_src_for_ssa(outval
);
112 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
114 nir_builder_instr_insert(&b
, &store
->instr
);
118 /* Image to buffer - don't write use image accessors */
120 radv_device_init_meta_itob_state(struct radv_device
*device
)
123 struct radv_shader_module cs
= { .nir
= NULL
};
125 zero(device
->meta_state
.itob
);
127 cs
.nir
= build_nir_itob_compute_shader(device
);
130 * two descriptors one for the image being sampled
131 * one for the buffer being written.
133 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
134 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
136 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
139 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
140 .descriptorCount
= 1,
141 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
142 .pImmutableSamplers
= NULL
146 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
147 .descriptorCount
= 1,
148 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
149 .pImmutableSamplers
= NULL
154 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
156 &device
->meta_state
.alloc
,
157 &device
->meta_state
.itob
.img_ds_layout
);
158 if (result
!= VK_SUCCESS
)
162 VkPipelineLayoutCreateInfo pl_create_info
= {
163 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
165 .pSetLayouts
= &device
->meta_state
.itob
.img_ds_layout
,
166 .pushConstantRangeCount
= 1,
167 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
170 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
172 &device
->meta_state
.alloc
,
173 &device
->meta_state
.itob
.img_p_layout
);
174 if (result
!= VK_SUCCESS
)
179 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
180 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
181 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
182 .module
= radv_shader_module_to_handle(&cs
),
184 .pSpecializationInfo
= NULL
,
187 VkComputePipelineCreateInfo vk_pipeline_info
= {
188 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
189 .stage
= pipeline_shader_stage
,
191 .layout
= device
->meta_state
.itob
.img_p_layout
,
194 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
195 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
196 1, &vk_pipeline_info
, NULL
,
197 &device
->meta_state
.itob
.pipeline
);
198 if (result
!= VK_SUCCESS
)
209 radv_device_finish_meta_itob_state(struct radv_device
*device
)
211 if (device
->meta_state
.itob
.img_p_layout
) {
212 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
213 device
->meta_state
.itob
.img_p_layout
,
214 &device
->meta_state
.alloc
);
216 if (device
->meta_state
.itob
.img_ds_layout
) {
217 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
218 device
->meta_state
.itob
.img_ds_layout
,
219 &device
->meta_state
.alloc
);
221 if (device
->meta_state
.itob
.pipeline
) {
222 radv_DestroyPipeline(radv_device_to_handle(device
),
223 device
->meta_state
.itob
.pipeline
,
224 &device
->meta_state
.alloc
);
229 build_nir_btoi_compute_shader(struct radv_device
*dev
)
232 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
236 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
240 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
241 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_btoi_cs");
242 b
.shader
->info
->cs
.local_size
[0] = 16;
243 b
.shader
->info
->cs
.local_size
[1] = 16;
244 b
.shader
->info
->cs
.local_size
[2] = 1;
245 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
247 input_img
->data
.descriptor_set
= 0;
248 input_img
->data
.binding
= 0;
250 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
251 img_type
, "out_img");
252 output_img
->data
.descriptor_set
= 0;
253 output_img
->data
.binding
= 1;
255 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
256 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
257 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
258 b
.shader
->info
->cs
.local_size
[0],
259 b
.shader
->info
->cs
.local_size
[1],
260 b
.shader
->info
->cs
.local_size
[2], 0);
262 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
264 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
265 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
266 offset
->num_components
= 2;
267 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
268 nir_builder_instr_insert(&b
, &offset
->instr
);
270 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
271 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
272 stride
->num_components
= 1;
273 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
274 nir_builder_instr_insert(&b
, &stride
->instr
);
276 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
277 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
279 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
280 tmp
= nir_iadd(&b
, tmp
, pos_x
);
282 nir_ssa_def
*buf_coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
284 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
286 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
287 tex
->sampler_dim
= GLSL_SAMPLER_DIM_BUF
;
288 tex
->op
= nir_texop_txf
;
289 tex
->src
[0].src_type
= nir_tex_src_coord
;
290 tex
->src
[0].src
= nir_src_for_ssa(buf_coord
);
291 tex
->src
[1].src_type
= nir_tex_src_lod
;
292 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
293 tex
->dest_type
= nir_type_float
;
294 tex
->is_array
= false;
295 tex
->coord_components
= 1;
296 tex
->texture
= nir_deref_var_create(tex
, input_img
);
299 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
300 nir_builder_instr_insert(&b
, &tex
->instr
);
302 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
303 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
304 store
->src
[0] = nir_src_for_ssa(img_coord
);
305 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
306 store
->src
[2] = nir_src_for_ssa(outval
);
307 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
309 nir_builder_instr_insert(&b
, &store
->instr
);
313 /* Buffer to image - don't write use image accessors */
315 radv_device_init_meta_btoi_state(struct radv_device
*device
)
318 struct radv_shader_module cs
= { .nir
= NULL
};
320 zero(device
->meta_state
.btoi
);
322 cs
.nir
= build_nir_btoi_compute_shader(device
);
325 * two descriptors one for the image being sampled
326 * one for the buffer being written.
328 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
329 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
331 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
334 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
335 .descriptorCount
= 1,
336 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
337 .pImmutableSamplers
= NULL
341 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
342 .descriptorCount
= 1,
343 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
344 .pImmutableSamplers
= NULL
349 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
351 &device
->meta_state
.alloc
,
352 &device
->meta_state
.btoi
.img_ds_layout
);
353 if (result
!= VK_SUCCESS
)
357 VkPipelineLayoutCreateInfo pl_create_info
= {
358 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
360 .pSetLayouts
= &device
->meta_state
.btoi
.img_ds_layout
,
361 .pushConstantRangeCount
= 1,
362 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
365 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
367 &device
->meta_state
.alloc
,
368 &device
->meta_state
.btoi
.img_p_layout
);
369 if (result
!= VK_SUCCESS
)
374 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
375 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
376 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
377 .module
= radv_shader_module_to_handle(&cs
),
379 .pSpecializationInfo
= NULL
,
382 VkComputePipelineCreateInfo vk_pipeline_info
= {
383 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
384 .stage
= pipeline_shader_stage
,
386 .layout
= device
->meta_state
.btoi
.img_p_layout
,
389 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
390 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
391 1, &vk_pipeline_info
, NULL
,
392 &device
->meta_state
.btoi
.pipeline
);
393 if (result
!= VK_SUCCESS
)
404 radv_device_finish_meta_btoi_state(struct radv_device
*device
)
406 if (device
->meta_state
.btoi
.img_p_layout
) {
407 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
408 device
->meta_state
.btoi
.img_p_layout
,
409 &device
->meta_state
.alloc
);
411 if (device
->meta_state
.btoi
.img_ds_layout
) {
412 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
413 device
->meta_state
.btoi
.img_ds_layout
,
414 &device
->meta_state
.alloc
);
416 if (device
->meta_state
.btoi
.pipeline
) {
417 radv_DestroyPipeline(radv_device_to_handle(device
),
418 device
->meta_state
.btoi
.pipeline
,
419 &device
->meta_state
.alloc
);
424 build_nir_itoi_compute_shader(struct radv_device
*dev
)
427 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
431 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
435 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
436 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itoi_cs");
437 b
.shader
->info
->cs
.local_size
[0] = 16;
438 b
.shader
->info
->cs
.local_size
[1] = 16;
439 b
.shader
->info
->cs
.local_size
[2] = 1;
440 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
442 input_img
->data
.descriptor_set
= 0;
443 input_img
->data
.binding
= 0;
445 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
446 img_type
, "out_img");
447 output_img
->data
.descriptor_set
= 0;
448 output_img
->data
.binding
= 1;
450 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
451 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
452 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
453 b
.shader
->info
->cs
.local_size
[0],
454 b
.shader
->info
->cs
.local_size
[1],
455 b
.shader
->info
->cs
.local_size
[2], 0);
457 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
459 nir_intrinsic_instr
*src_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
460 src_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
461 src_offset
->num_components
= 2;
462 nir_ssa_dest_init(&src_offset
->instr
, &src_offset
->dest
, 2, 32, "src_offset");
463 nir_builder_instr_insert(&b
, &src_offset
->instr
);
465 nir_intrinsic_instr
*dst_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
466 dst_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
467 dst_offset
->num_components
= 2;
468 nir_ssa_dest_init(&dst_offset
->instr
, &dst_offset
->dest
, 2, 32, "dst_offset");
469 nir_builder_instr_insert(&b
, &dst_offset
->instr
);
471 nir_ssa_def
*src_coord
= nir_iadd(&b
, global_id
, &src_offset
->dest
.ssa
);
473 nir_ssa_def
*dst_coord
= nir_iadd(&b
, global_id
, &dst_offset
->dest
.ssa
);
475 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
476 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
477 tex
->op
= nir_texop_txf
;
478 tex
->src
[0].src_type
= nir_tex_src_coord
;
479 tex
->src
[0].src
= nir_src_for_ssa(src_coord
);
480 tex
->src
[1].src_type
= nir_tex_src_lod
;
481 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
482 tex
->dest_type
= nir_type_float
;
483 tex
->is_array
= false;
484 tex
->coord_components
= 2;
485 tex
->texture
= nir_deref_var_create(tex
, input_img
);
488 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
489 nir_builder_instr_insert(&b
, &tex
->instr
);
491 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
492 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
493 store
->src
[0] = nir_src_for_ssa(dst_coord
);
494 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
495 store
->src
[2] = nir_src_for_ssa(outval
);
496 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
498 nir_builder_instr_insert(&b
, &store
->instr
);
502 /* image to image - don't write use image accessors */
504 radv_device_init_meta_itoi_state(struct radv_device
*device
)
507 struct radv_shader_module cs
= { .nir
= NULL
};
509 zero(device
->meta_state
.itoi
);
511 cs
.nir
= build_nir_itoi_compute_shader(device
);
514 * two descriptors one for the image being sampled
515 * one for the buffer being written.
517 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
518 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
520 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
523 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
524 .descriptorCount
= 1,
525 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
526 .pImmutableSamplers
= NULL
530 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
531 .descriptorCount
= 1,
532 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
533 .pImmutableSamplers
= NULL
538 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
540 &device
->meta_state
.alloc
,
541 &device
->meta_state
.itoi
.img_ds_layout
);
542 if (result
!= VK_SUCCESS
)
546 VkPipelineLayoutCreateInfo pl_create_info
= {
547 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
549 .pSetLayouts
= &device
->meta_state
.itoi
.img_ds_layout
,
550 .pushConstantRangeCount
= 1,
551 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
554 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
556 &device
->meta_state
.alloc
,
557 &device
->meta_state
.itoi
.img_p_layout
);
558 if (result
!= VK_SUCCESS
)
563 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
564 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
565 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
566 .module
= radv_shader_module_to_handle(&cs
),
568 .pSpecializationInfo
= NULL
,
571 VkComputePipelineCreateInfo vk_pipeline_info
= {
572 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
573 .stage
= pipeline_shader_stage
,
575 .layout
= device
->meta_state
.itoi
.img_p_layout
,
578 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
579 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
580 1, &vk_pipeline_info
, NULL
,
581 &device
->meta_state
.itoi
.pipeline
);
582 if (result
!= VK_SUCCESS
)
593 radv_device_finish_meta_itoi_state(struct radv_device
*device
)
595 if (device
->meta_state
.itoi
.img_p_layout
) {
596 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
597 device
->meta_state
.itoi
.img_p_layout
,
598 &device
->meta_state
.alloc
);
600 if (device
->meta_state
.itoi
.img_ds_layout
) {
601 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
602 device
->meta_state
.itoi
.img_ds_layout
,
603 &device
->meta_state
.alloc
);
605 if (device
->meta_state
.itoi
.pipeline
) {
606 radv_DestroyPipeline(radv_device_to_handle(device
),
607 device
->meta_state
.itoi
.pipeline
,
608 &device
->meta_state
.alloc
);
613 build_nir_cleari_compute_shader(struct radv_device
*dev
)
616 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
620 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
621 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_cleari_cs");
622 b
.shader
->info
->cs
.local_size
[0] = 16;
623 b
.shader
->info
->cs
.local_size
[1] = 16;
624 b
.shader
->info
->cs
.local_size
[2] = 1;
626 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
627 img_type
, "out_img");
628 output_img
->data
.descriptor_set
= 0;
629 output_img
->data
.binding
= 0;
631 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
632 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
633 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
634 b
.shader
->info
->cs
.local_size
[0],
635 b
.shader
->info
->cs
.local_size
[1],
636 b
.shader
->info
->cs
.local_size
[2], 0);
638 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
640 nir_intrinsic_instr
*clear_val
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
641 clear_val
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
642 clear_val
->num_components
= 4;
643 nir_ssa_dest_init(&clear_val
->instr
, &clear_val
->dest
, 4, 32, "clear_value");
644 nir_builder_instr_insert(&b
, &clear_val
->instr
);
646 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
647 store
->src
[0] = nir_src_for_ssa(global_id
);
648 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
649 store
->src
[2] = nir_src_for_ssa(&clear_val
->dest
.ssa
);
650 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
652 nir_builder_instr_insert(&b
, &store
->instr
);
657 radv_device_init_meta_cleari_state(struct radv_device
*device
)
660 struct radv_shader_module cs
= { .nir
= NULL
};
662 zero(device
->meta_state
.cleari
);
664 cs
.nir
= build_nir_cleari_compute_shader(device
);
667 * two descriptors one for the image being sampled
668 * one for the buffer being written.
670 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
671 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
673 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
676 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
677 .descriptorCount
= 1,
678 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
679 .pImmutableSamplers
= NULL
684 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
686 &device
->meta_state
.alloc
,
687 &device
->meta_state
.cleari
.img_ds_layout
);
688 if (result
!= VK_SUCCESS
)
692 VkPipelineLayoutCreateInfo pl_create_info
= {
693 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
695 .pSetLayouts
= &device
->meta_state
.cleari
.img_ds_layout
,
696 .pushConstantRangeCount
= 1,
697 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
700 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
702 &device
->meta_state
.alloc
,
703 &device
->meta_state
.cleari
.img_p_layout
);
704 if (result
!= VK_SUCCESS
)
709 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
710 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
711 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
712 .module
= radv_shader_module_to_handle(&cs
),
714 .pSpecializationInfo
= NULL
,
717 VkComputePipelineCreateInfo vk_pipeline_info
= {
718 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
719 .stage
= pipeline_shader_stage
,
721 .layout
= device
->meta_state
.cleari
.img_p_layout
,
724 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
725 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
726 1, &vk_pipeline_info
, NULL
,
727 &device
->meta_state
.cleari
.pipeline
);
728 if (result
!= VK_SUCCESS
)
739 radv_device_finish_meta_cleari_state(struct radv_device
*device
)
741 if (device
->meta_state
.cleari
.img_p_layout
) {
742 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
743 device
->meta_state
.cleari
.img_p_layout
,
744 &device
->meta_state
.alloc
);
746 if (device
->meta_state
.cleari
.img_ds_layout
) {
747 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
748 device
->meta_state
.cleari
.img_ds_layout
,
749 &device
->meta_state
.alloc
);
751 if (device
->meta_state
.cleari
.pipeline
) {
752 radv_DestroyPipeline(radv_device_to_handle(device
),
753 device
->meta_state
.cleari
.pipeline
,
754 &device
->meta_state
.alloc
);
759 radv_device_finish_meta_bufimage_state(struct radv_device
*device
)
761 radv_device_finish_meta_itob_state(device
);
762 radv_device_finish_meta_btoi_state(device
);
763 radv_device_finish_meta_itoi_state(device
);
764 radv_device_finish_meta_cleari_state(device
);
768 radv_device_init_meta_bufimage_state(struct radv_device
*device
)
772 result
= radv_device_init_meta_itob_state(device
);
773 if (result
!= VK_SUCCESS
)
776 result
= radv_device_init_meta_btoi_state(device
);
777 if (result
!= VK_SUCCESS
)
780 result
= radv_device_init_meta_itoi_state(device
);
781 if (result
!= VK_SUCCESS
)
784 result
= radv_device_init_meta_cleari_state(device
);
785 if (result
!= VK_SUCCESS
)
790 radv_device_finish_meta_itoi_state(device
);
792 radv_device_finish_meta_btoi_state(device
);
794 radv_device_finish_meta_itob_state(device
);
799 radv_meta_begin_itoi(struct radv_cmd_buffer
*cmd_buffer
,
800 struct radv_meta_saved_compute_state
*save
)
802 radv_meta_save_compute(save
, cmd_buffer
, 16);
806 radv_meta_end_itoi(struct radv_cmd_buffer
*cmd_buffer
,
807 struct radv_meta_saved_compute_state
*save
)
809 radv_meta_restore_compute(save
, cmd_buffer
, 16);
813 radv_meta_begin_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
814 struct radv_meta_saved_compute_state
*save
)
816 radv_meta_save_compute(save
, cmd_buffer
, 12);
820 radv_meta_end_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
821 struct radv_meta_saved_compute_state
*save
)
823 radv_meta_restore_compute(save
, cmd_buffer
, 12);
827 radv_meta_begin_cleari(struct radv_cmd_buffer
*cmd_buffer
,
828 struct radv_meta_saved_compute_state
*save
)
830 radv_meta_save_compute(save
, cmd_buffer
, 16);
834 radv_meta_end_cleari(struct radv_cmd_buffer
*cmd_buffer
,
835 struct radv_meta_saved_compute_state
*save
)
837 radv_meta_restore_compute(save
, cmd_buffer
, 16);
841 create_iview(struct radv_cmd_buffer
*cmd_buffer
,
842 struct radv_meta_blit2d_surf
*surf
,
843 VkImageUsageFlags usage
,
844 struct radv_image_view
*iview
)
847 radv_image_view_init(iview
, cmd_buffer
->device
,
848 &(VkImageViewCreateInfo
) {
849 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
850 .image
= radv_image_to_handle(surf
->image
),
851 .viewType
= VK_IMAGE_VIEW_TYPE_2D
,
852 .format
= surf
->format
,
853 .subresourceRange
= {
854 .aspectMask
= surf
->aspect_mask
,
855 .baseMipLevel
= surf
->level
,
857 .baseArrayLayer
= surf
->layer
,
860 }, cmd_buffer
, usage
);
864 create_bview(struct radv_cmd_buffer
*cmd_buffer
,
865 struct radv_buffer
*buffer
,
868 struct radv_buffer_view
*bview
)
870 radv_buffer_view_init(bview
, cmd_buffer
->device
,
871 &(VkBufferViewCreateInfo
) {
872 .sType
= VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
,
874 .buffer
= radv_buffer_to_handle(buffer
),
877 .range
= VK_WHOLE_SIZE
,
883 struct radv_image_view src_iview
;
884 struct radv_buffer_view dst_bview
;
889 itob_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
890 struct itob_temps
*tmp
)
892 struct radv_device
*device
= cmd_buffer
->device
;
893 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
895 radv_temp_descriptor_set_create(device
, cmd_buffer
,
896 device
->meta_state
.itob
.img_ds_layout
,
899 radv_UpdateDescriptorSets(vk_device
,
901 (VkWriteDescriptorSet
[]) {
903 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
906 .dstArrayElement
= 0,
907 .descriptorCount
= 1,
908 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
909 .pImageInfo
= (VkDescriptorImageInfo
[]) {
911 .sampler
= VK_NULL_HANDLE
,
912 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
913 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
918 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
921 .dstArrayElement
= 0,
922 .descriptorCount
= 1,
923 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
924 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->dst_bview
) },
928 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
929 VK_PIPELINE_BIND_POINT_COMPUTE
,
930 device
->meta_state
.itob
.img_p_layout
, 0, 1,
935 itob_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
937 VkPipeline pipeline
=
938 cmd_buffer
->device
->meta_state
.itob
.pipeline
;
940 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
941 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
942 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
947 radv_meta_image_to_buffer(struct radv_cmd_buffer
*cmd_buffer
,
948 struct radv_meta_blit2d_surf
*src
,
949 struct radv_meta_blit2d_buffer
*dst
,
951 struct radv_meta_blit2d_rect
*rects
)
953 struct radv_device
*device
= cmd_buffer
->device
;
954 struct itob_temps temps
;
956 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
957 create_bview(cmd_buffer
, dst
->buffer
, dst
->offset
, dst
->format
, &temps
.dst_bview
);
958 itob_bind_descriptors(cmd_buffer
, &temps
);
960 itob_bind_pipeline(cmd_buffer
);
962 for (unsigned r
= 0; r
< num_rects
; ++r
) {
963 unsigned push_constants
[3] = {
968 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
969 device
->meta_state
.itob
.img_p_layout
,
970 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
973 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
975 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
979 struct radv_buffer_view src_bview
;
980 struct radv_image_view dst_iview
;
985 btoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
986 struct btoi_temps
*tmp
)
988 struct radv_device
*device
= cmd_buffer
->device
;
989 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
991 radv_temp_descriptor_set_create(device
, cmd_buffer
,
992 device
->meta_state
.btoi
.img_ds_layout
,
995 radv_UpdateDescriptorSets(vk_device
,
997 (VkWriteDescriptorSet
[]) {
999 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1002 .dstArrayElement
= 0,
1003 .descriptorCount
= 1,
1004 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
1005 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->src_bview
) },
1008 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1011 .dstArrayElement
= 0,
1012 .descriptorCount
= 1,
1013 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1014 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1017 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1018 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1024 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1025 VK_PIPELINE_BIND_POINT_COMPUTE
,
1026 device
->meta_state
.btoi
.img_p_layout
, 0, 1,
1027 &tmp
->set
, 0, NULL
);
1031 btoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1033 VkPipeline pipeline
=
1034 cmd_buffer
->device
->meta_state
.btoi
.pipeline
;
1036 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1037 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1038 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1043 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1044 struct radv_meta_blit2d_buffer
*src
,
1045 struct radv_meta_blit2d_surf
*dst
,
1047 struct radv_meta_blit2d_rect
*rects
)
1049 struct radv_device
*device
= cmd_buffer
->device
;
1050 struct btoi_temps temps
;
1052 create_bview(cmd_buffer
, src
->buffer
, src
->offset
, src
->format
, &temps
.src_bview
);
1053 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1054 btoi_bind_descriptors(cmd_buffer
, &temps
);
1056 btoi_bind_pipeline(cmd_buffer
);
1058 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1059 unsigned push_constants
[3] = {
1064 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1065 device
->meta_state
.btoi
.img_p_layout
,
1066 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
1069 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1071 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
1075 struct radv_image_view src_iview
;
1076 struct radv_image_view dst_iview
;
1077 VkDescriptorSet set
;
1081 itoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1082 struct itoi_temps
*tmp
)
1084 struct radv_device
*device
= cmd_buffer
->device
;
1085 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
1087 radv_temp_descriptor_set_create(device
, cmd_buffer
,
1088 device
->meta_state
.itoi
.img_ds_layout
,
1091 radv_UpdateDescriptorSets(vk_device
,
1093 (VkWriteDescriptorSet
[]) {
1095 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1098 .dstArrayElement
= 0,
1099 .descriptorCount
= 1,
1100 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
1101 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1104 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
1105 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1110 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1113 .dstArrayElement
= 0,
1114 .descriptorCount
= 1,
1115 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1116 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1119 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1120 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1126 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1127 VK_PIPELINE_BIND_POINT_COMPUTE
,
1128 device
->meta_state
.itoi
.img_p_layout
, 0, 1,
1129 &tmp
->set
, 0, NULL
);
1133 itoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1135 VkPipeline pipeline
=
1136 cmd_buffer
->device
->meta_state
.itoi
.pipeline
;
1138 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1139 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1140 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1145 radv_meta_image_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1146 struct radv_meta_blit2d_surf
*src
,
1147 struct radv_meta_blit2d_surf
*dst
,
1149 struct radv_meta_blit2d_rect
*rects
)
1151 struct radv_device
*device
= cmd_buffer
->device
;
1152 struct itoi_temps temps
;
1154 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
1155 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1157 itoi_bind_descriptors(cmd_buffer
, &temps
);
1159 itoi_bind_pipeline(cmd_buffer
);
1161 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1162 unsigned push_constants
[4] = {
1168 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1169 device
->meta_state
.itoi
.img_p_layout
,
1170 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1173 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1175 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
1178 struct cleari_temps
{
1179 struct radv_image_view dst_iview
;
1180 VkDescriptorSet set
;
1184 cleari_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1185 struct cleari_temps
*tmp
)
1187 struct radv_device
*device
= cmd_buffer
->device
;
1188 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
1190 radv_temp_descriptor_set_create(device
, cmd_buffer
,
1191 device
->meta_state
.cleari
.img_ds_layout
,
1194 radv_UpdateDescriptorSets(vk_device
,
1196 (VkWriteDescriptorSet
[]) {
1198 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1201 .dstArrayElement
= 0,
1202 .descriptorCount
= 1,
1203 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1204 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1207 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1208 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1214 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1215 VK_PIPELINE_BIND_POINT_COMPUTE
,
1216 device
->meta_state
.cleari
.img_p_layout
, 0, 1,
1217 &tmp
->set
, 0, NULL
);
1221 cleari_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1223 VkPipeline pipeline
=
1224 cmd_buffer
->device
->meta_state
.cleari
.pipeline
;
1226 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1227 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1228 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1233 radv_meta_clear_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1234 struct radv_meta_blit2d_surf
*dst
,
1235 const VkClearColorValue
*clear_color
)
1237 struct radv_device
*device
= cmd_buffer
->device
;
1238 struct cleari_temps temps
;
1240 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1241 cleari_bind_descriptors(cmd_buffer
, &temps
);
1243 cleari_bind_pipeline(cmd_buffer
);
1245 unsigned push_constants
[4] = {
1246 clear_color
->uint32
[0],
1247 clear_color
->uint32
[1],
1248 clear_color
->uint32
[2],
1249 clear_color
->uint32
[3],
1252 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1253 device
->meta_state
.cleari
.img_p_layout
,
1254 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1257 radv_unaligned_dispatch(cmd_buffer
, dst
->image
->extent
.width
, dst
->image
->extent
.height
, 1);
1258 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);