2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
28 * GFX queue: Compute shader implementation of image->buffer copy
29 * Compute queue: implementation also of buffer->image, image->image, and image clear.
33 build_nir_itob_compute_shader(struct radv_device
*dev
)
36 const struct glsl_type
*sampler_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
40 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
44 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
45 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itob_cs");
46 b
.shader
->info
->cs
.local_size
[0] = 16;
47 b
.shader
->info
->cs
.local_size
[1] = 16;
48 b
.shader
->info
->cs
.local_size
[2] = 1;
49 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
50 sampler_type
, "s_tex");
51 input_img
->data
.descriptor_set
= 0;
52 input_img
->data
.binding
= 0;
54 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
56 output_img
->data
.descriptor_set
= 0;
57 output_img
->data
.binding
= 1;
59 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
60 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
61 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
62 b
.shader
->info
->cs
.local_size
[0],
63 b
.shader
->info
->cs
.local_size
[1],
64 b
.shader
->info
->cs
.local_size
[2], 0);
66 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
70 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
71 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
72 offset
->num_components
= 2;
73 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
74 nir_builder_instr_insert(&b
, &offset
->instr
);
76 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
77 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
78 stride
->num_components
= 1;
79 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
80 nir_builder_instr_insert(&b
, &stride
->instr
);
82 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
84 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
85 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
86 tex
->op
= nir_texop_txf
;
87 tex
->src
[0].src_type
= nir_tex_src_coord
;
88 tex
->src
[0].src
= nir_src_for_ssa(img_coord
);
89 tex
->src
[1].src_type
= nir_tex_src_lod
;
90 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
91 tex
->dest_type
= nir_type_float
;
92 tex
->is_array
= false;
93 tex
->coord_components
= 2;
94 tex
->texture
= nir_deref_var_create(tex
, input_img
);
97 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
98 nir_builder_instr_insert(&b
, &tex
->instr
);
100 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
101 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
103 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
104 tmp
= nir_iadd(&b
, tmp
, pos_x
);
106 nir_ssa_def
*coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
108 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
109 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
110 store
->src
[0] = nir_src_for_ssa(coord
);
111 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
112 store
->src
[2] = nir_src_for_ssa(outval
);
113 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
115 nir_builder_instr_insert(&b
, &store
->instr
);
119 /* Image to buffer - don't write use image accessors */
121 radv_device_init_meta_itob_state(struct radv_device
*device
)
124 struct radv_shader_module cs
= { .nir
= NULL
};
126 zero(device
->meta_state
.itob
);
128 cs
.nir
= build_nir_itob_compute_shader(device
);
131 * two descriptors one for the image being sampled
132 * one for the buffer being written.
134 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
135 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
137 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
140 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
141 .descriptorCount
= 1,
142 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
143 .pImmutableSamplers
= NULL
147 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
148 .descriptorCount
= 1,
149 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
150 .pImmutableSamplers
= NULL
155 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
157 &device
->meta_state
.alloc
,
158 &device
->meta_state
.itob
.img_ds_layout
);
159 if (result
!= VK_SUCCESS
)
163 VkPipelineLayoutCreateInfo pl_create_info
= {
164 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
166 .pSetLayouts
= &device
->meta_state
.itob
.img_ds_layout
,
167 .pushConstantRangeCount
= 1,
168 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
171 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
173 &device
->meta_state
.alloc
,
174 &device
->meta_state
.itob
.img_p_layout
);
175 if (result
!= VK_SUCCESS
)
180 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
181 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
182 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
183 .module
= radv_shader_module_to_handle(&cs
),
185 .pSpecializationInfo
= NULL
,
188 VkComputePipelineCreateInfo vk_pipeline_info
= {
189 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
190 .stage
= pipeline_shader_stage
,
192 .layout
= device
->meta_state
.itob
.img_p_layout
,
195 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
196 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
197 1, &vk_pipeline_info
, NULL
,
198 &device
->meta_state
.itob
.pipeline
);
199 if (result
!= VK_SUCCESS
)
210 radv_device_finish_meta_itob_state(struct radv_device
*device
)
212 if (device
->meta_state
.itob
.img_p_layout
) {
213 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
214 device
->meta_state
.itob
.img_p_layout
,
215 &device
->meta_state
.alloc
);
217 if (device
->meta_state
.itob
.img_ds_layout
) {
218 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
219 device
->meta_state
.itob
.img_ds_layout
,
220 &device
->meta_state
.alloc
);
222 if (device
->meta_state
.itob
.pipeline
) {
223 radv_DestroyPipeline(radv_device_to_handle(device
),
224 device
->meta_state
.itob
.pipeline
,
225 &device
->meta_state
.alloc
);
230 build_nir_btoi_compute_shader(struct radv_device
*dev
)
233 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
237 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
241 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
242 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_btoi_cs");
243 b
.shader
->info
->cs
.local_size
[0] = 16;
244 b
.shader
->info
->cs
.local_size
[1] = 16;
245 b
.shader
->info
->cs
.local_size
[2] = 1;
246 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
248 input_img
->data
.descriptor_set
= 0;
249 input_img
->data
.binding
= 0;
251 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
252 img_type
, "out_img");
253 output_img
->data
.descriptor_set
= 0;
254 output_img
->data
.binding
= 1;
256 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
257 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
258 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
259 b
.shader
->info
->cs
.local_size
[0],
260 b
.shader
->info
->cs
.local_size
[1],
261 b
.shader
->info
->cs
.local_size
[2], 0);
263 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
265 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
266 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
267 offset
->num_components
= 2;
268 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
269 nir_builder_instr_insert(&b
, &offset
->instr
);
271 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
272 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
273 stride
->num_components
= 1;
274 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
275 nir_builder_instr_insert(&b
, &stride
->instr
);
277 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
278 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
280 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
281 tmp
= nir_iadd(&b
, tmp
, pos_x
);
283 nir_ssa_def
*buf_coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
285 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
287 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
288 tex
->sampler_dim
= GLSL_SAMPLER_DIM_BUF
;
289 tex
->op
= nir_texop_txf
;
290 tex
->src
[0].src_type
= nir_tex_src_coord
;
291 tex
->src
[0].src
= nir_src_for_ssa(buf_coord
);
292 tex
->src
[1].src_type
= nir_tex_src_lod
;
293 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
294 tex
->dest_type
= nir_type_float
;
295 tex
->is_array
= false;
296 tex
->coord_components
= 1;
297 tex
->texture
= nir_deref_var_create(tex
, input_img
);
300 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
301 nir_builder_instr_insert(&b
, &tex
->instr
);
303 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
304 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
305 store
->src
[0] = nir_src_for_ssa(img_coord
);
306 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
307 store
->src
[2] = nir_src_for_ssa(outval
);
308 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
310 nir_builder_instr_insert(&b
, &store
->instr
);
314 /* Buffer to image - don't write use image accessors */
316 radv_device_init_meta_btoi_state(struct radv_device
*device
)
319 struct radv_shader_module cs
= { .nir
= NULL
};
321 zero(device
->meta_state
.btoi
);
323 cs
.nir
= build_nir_btoi_compute_shader(device
);
326 * two descriptors one for the image being sampled
327 * one for the buffer being written.
329 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
330 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
332 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
335 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
336 .descriptorCount
= 1,
337 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
338 .pImmutableSamplers
= NULL
342 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
343 .descriptorCount
= 1,
344 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
345 .pImmutableSamplers
= NULL
350 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
352 &device
->meta_state
.alloc
,
353 &device
->meta_state
.btoi
.img_ds_layout
);
354 if (result
!= VK_SUCCESS
)
358 VkPipelineLayoutCreateInfo pl_create_info
= {
359 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
361 .pSetLayouts
= &device
->meta_state
.btoi
.img_ds_layout
,
362 .pushConstantRangeCount
= 1,
363 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
366 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
368 &device
->meta_state
.alloc
,
369 &device
->meta_state
.btoi
.img_p_layout
);
370 if (result
!= VK_SUCCESS
)
375 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
376 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
377 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
378 .module
= radv_shader_module_to_handle(&cs
),
380 .pSpecializationInfo
= NULL
,
383 VkComputePipelineCreateInfo vk_pipeline_info
= {
384 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
385 .stage
= pipeline_shader_stage
,
387 .layout
= device
->meta_state
.btoi
.img_p_layout
,
390 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
391 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
392 1, &vk_pipeline_info
, NULL
,
393 &device
->meta_state
.btoi
.pipeline
);
394 if (result
!= VK_SUCCESS
)
405 radv_device_finish_meta_btoi_state(struct radv_device
*device
)
407 if (device
->meta_state
.btoi
.img_p_layout
) {
408 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
409 device
->meta_state
.btoi
.img_p_layout
,
410 &device
->meta_state
.alloc
);
412 if (device
->meta_state
.btoi
.img_ds_layout
) {
413 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
414 device
->meta_state
.btoi
.img_ds_layout
,
415 &device
->meta_state
.alloc
);
417 if (device
->meta_state
.btoi
.pipeline
) {
418 radv_DestroyPipeline(radv_device_to_handle(device
),
419 device
->meta_state
.btoi
.pipeline
,
420 &device
->meta_state
.alloc
);
425 build_nir_itoi_compute_shader(struct radv_device
*dev
)
428 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
432 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
436 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
437 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itoi_cs");
438 b
.shader
->info
->cs
.local_size
[0] = 16;
439 b
.shader
->info
->cs
.local_size
[1] = 16;
440 b
.shader
->info
->cs
.local_size
[2] = 1;
441 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
443 input_img
->data
.descriptor_set
= 0;
444 input_img
->data
.binding
= 0;
446 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
447 img_type
, "out_img");
448 output_img
->data
.descriptor_set
= 0;
449 output_img
->data
.binding
= 1;
451 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
452 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
453 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
454 b
.shader
->info
->cs
.local_size
[0],
455 b
.shader
->info
->cs
.local_size
[1],
456 b
.shader
->info
->cs
.local_size
[2], 0);
458 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
460 nir_intrinsic_instr
*src_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
461 src_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
462 src_offset
->num_components
= 2;
463 nir_ssa_dest_init(&src_offset
->instr
, &src_offset
->dest
, 2, 32, "src_offset");
464 nir_builder_instr_insert(&b
, &src_offset
->instr
);
466 nir_intrinsic_instr
*dst_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
467 dst_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
468 dst_offset
->num_components
= 2;
469 nir_ssa_dest_init(&dst_offset
->instr
, &dst_offset
->dest
, 2, 32, "dst_offset");
470 nir_builder_instr_insert(&b
, &dst_offset
->instr
);
472 nir_ssa_def
*src_coord
= nir_iadd(&b
, global_id
, &src_offset
->dest
.ssa
);
474 nir_ssa_def
*dst_coord
= nir_iadd(&b
, global_id
, &dst_offset
->dest
.ssa
);
476 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
477 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
478 tex
->op
= nir_texop_txf
;
479 tex
->src
[0].src_type
= nir_tex_src_coord
;
480 tex
->src
[0].src
= nir_src_for_ssa(src_coord
);
481 tex
->src
[1].src_type
= nir_tex_src_lod
;
482 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
483 tex
->dest_type
= nir_type_float
;
484 tex
->is_array
= false;
485 tex
->coord_components
= 2;
486 tex
->texture
= nir_deref_var_create(tex
, input_img
);
489 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
490 nir_builder_instr_insert(&b
, &tex
->instr
);
492 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
493 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
494 store
->src
[0] = nir_src_for_ssa(dst_coord
);
495 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
496 store
->src
[2] = nir_src_for_ssa(outval
);
497 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
499 nir_builder_instr_insert(&b
, &store
->instr
);
503 /* image to image - don't write use image accessors */
505 radv_device_init_meta_itoi_state(struct radv_device
*device
)
508 struct radv_shader_module cs
= { .nir
= NULL
};
510 zero(device
->meta_state
.itoi
);
512 cs
.nir
= build_nir_itoi_compute_shader(device
);
515 * two descriptors one for the image being sampled
516 * one for the buffer being written.
518 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
519 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
521 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
524 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
525 .descriptorCount
= 1,
526 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
527 .pImmutableSamplers
= NULL
531 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
532 .descriptorCount
= 1,
533 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
534 .pImmutableSamplers
= NULL
539 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
541 &device
->meta_state
.alloc
,
542 &device
->meta_state
.itoi
.img_ds_layout
);
543 if (result
!= VK_SUCCESS
)
547 VkPipelineLayoutCreateInfo pl_create_info
= {
548 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
550 .pSetLayouts
= &device
->meta_state
.itoi
.img_ds_layout
,
551 .pushConstantRangeCount
= 1,
552 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
555 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
557 &device
->meta_state
.alloc
,
558 &device
->meta_state
.itoi
.img_p_layout
);
559 if (result
!= VK_SUCCESS
)
564 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
565 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
566 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
567 .module
= radv_shader_module_to_handle(&cs
),
569 .pSpecializationInfo
= NULL
,
572 VkComputePipelineCreateInfo vk_pipeline_info
= {
573 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
574 .stage
= pipeline_shader_stage
,
576 .layout
= device
->meta_state
.itoi
.img_p_layout
,
579 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
580 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
581 1, &vk_pipeline_info
, NULL
,
582 &device
->meta_state
.itoi
.pipeline
);
583 if (result
!= VK_SUCCESS
)
594 radv_device_finish_meta_itoi_state(struct radv_device
*device
)
596 if (device
->meta_state
.itoi
.img_p_layout
) {
597 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
598 device
->meta_state
.itoi
.img_p_layout
,
599 &device
->meta_state
.alloc
);
601 if (device
->meta_state
.itoi
.img_ds_layout
) {
602 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
603 device
->meta_state
.itoi
.img_ds_layout
,
604 &device
->meta_state
.alloc
);
606 if (device
->meta_state
.itoi
.pipeline
) {
607 radv_DestroyPipeline(radv_device_to_handle(device
),
608 device
->meta_state
.itoi
.pipeline
,
609 &device
->meta_state
.alloc
);
614 build_nir_cleari_compute_shader(struct radv_device
*dev
)
617 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
621 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
622 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_cleari_cs");
623 b
.shader
->info
->cs
.local_size
[0] = 16;
624 b
.shader
->info
->cs
.local_size
[1] = 16;
625 b
.shader
->info
->cs
.local_size
[2] = 1;
627 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
628 img_type
, "out_img");
629 output_img
->data
.descriptor_set
= 0;
630 output_img
->data
.binding
= 0;
632 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
633 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
634 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
635 b
.shader
->info
->cs
.local_size
[0],
636 b
.shader
->info
->cs
.local_size
[1],
637 b
.shader
->info
->cs
.local_size
[2], 0);
639 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
641 nir_intrinsic_instr
*clear_val
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
642 clear_val
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
643 clear_val
->num_components
= 4;
644 nir_ssa_dest_init(&clear_val
->instr
, &clear_val
->dest
, 4, 32, "clear_value");
645 nir_builder_instr_insert(&b
, &clear_val
->instr
);
647 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
648 store
->src
[0] = nir_src_for_ssa(global_id
);
649 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
650 store
->src
[2] = nir_src_for_ssa(&clear_val
->dest
.ssa
);
651 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
653 nir_builder_instr_insert(&b
, &store
->instr
);
658 radv_device_init_meta_cleari_state(struct radv_device
*device
)
661 struct radv_shader_module cs
= { .nir
= NULL
};
663 zero(device
->meta_state
.cleari
);
665 cs
.nir
= build_nir_cleari_compute_shader(device
);
668 * two descriptors one for the image being sampled
669 * one for the buffer being written.
671 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
672 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
674 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
677 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
678 .descriptorCount
= 1,
679 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
680 .pImmutableSamplers
= NULL
685 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
687 &device
->meta_state
.alloc
,
688 &device
->meta_state
.cleari
.img_ds_layout
);
689 if (result
!= VK_SUCCESS
)
693 VkPipelineLayoutCreateInfo pl_create_info
= {
694 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
696 .pSetLayouts
= &device
->meta_state
.cleari
.img_ds_layout
,
697 .pushConstantRangeCount
= 1,
698 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
701 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
703 &device
->meta_state
.alloc
,
704 &device
->meta_state
.cleari
.img_p_layout
);
705 if (result
!= VK_SUCCESS
)
710 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
711 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
712 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
713 .module
= radv_shader_module_to_handle(&cs
),
715 .pSpecializationInfo
= NULL
,
718 VkComputePipelineCreateInfo vk_pipeline_info
= {
719 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
720 .stage
= pipeline_shader_stage
,
722 .layout
= device
->meta_state
.cleari
.img_p_layout
,
725 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
726 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
727 1, &vk_pipeline_info
, NULL
,
728 &device
->meta_state
.cleari
.pipeline
);
729 if (result
!= VK_SUCCESS
)
740 radv_device_finish_meta_cleari_state(struct radv_device
*device
)
742 if (device
->meta_state
.cleari
.img_p_layout
) {
743 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
744 device
->meta_state
.cleari
.img_p_layout
,
745 &device
->meta_state
.alloc
);
747 if (device
->meta_state
.cleari
.img_ds_layout
) {
748 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
749 device
->meta_state
.cleari
.img_ds_layout
,
750 &device
->meta_state
.alloc
);
752 if (device
->meta_state
.cleari
.pipeline
) {
753 radv_DestroyPipeline(radv_device_to_handle(device
),
754 device
->meta_state
.cleari
.pipeline
,
755 &device
->meta_state
.alloc
);
760 radv_device_finish_meta_bufimage_state(struct radv_device
*device
)
762 radv_device_finish_meta_itob_state(device
);
763 radv_device_finish_meta_btoi_state(device
);
764 radv_device_finish_meta_itoi_state(device
);
765 radv_device_finish_meta_cleari_state(device
);
769 radv_device_init_meta_bufimage_state(struct radv_device
*device
)
773 result
= radv_device_init_meta_itob_state(device
);
774 if (result
!= VK_SUCCESS
)
777 result
= radv_device_init_meta_btoi_state(device
);
778 if (result
!= VK_SUCCESS
)
781 result
= radv_device_init_meta_itoi_state(device
);
782 if (result
!= VK_SUCCESS
)
785 result
= radv_device_init_meta_cleari_state(device
);
786 if (result
!= VK_SUCCESS
)
791 radv_device_finish_meta_itoi_state(device
);
793 radv_device_finish_meta_btoi_state(device
);
795 radv_device_finish_meta_itob_state(device
);
800 radv_meta_begin_itoi(struct radv_cmd_buffer
*cmd_buffer
,
801 struct radv_meta_saved_compute_state
*save
)
803 radv_meta_save_compute(save
, cmd_buffer
, 16);
807 radv_meta_end_itoi(struct radv_cmd_buffer
*cmd_buffer
,
808 struct radv_meta_saved_compute_state
*save
)
810 radv_meta_restore_compute(save
, cmd_buffer
, 16);
814 radv_meta_begin_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
815 struct radv_meta_saved_compute_state
*save
)
817 radv_meta_save_compute(save
, cmd_buffer
, 12);
821 radv_meta_end_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
822 struct radv_meta_saved_compute_state
*save
)
824 radv_meta_restore_compute(save
, cmd_buffer
, 12);
828 radv_meta_begin_cleari(struct radv_cmd_buffer
*cmd_buffer
,
829 struct radv_meta_saved_compute_state
*save
)
831 radv_meta_save_compute(save
, cmd_buffer
, 16);
835 radv_meta_end_cleari(struct radv_cmd_buffer
*cmd_buffer
,
836 struct radv_meta_saved_compute_state
*save
)
838 radv_meta_restore_compute(save
, cmd_buffer
, 16);
842 create_iview(struct radv_cmd_buffer
*cmd_buffer
,
843 struct radv_meta_blit2d_surf
*surf
,
844 VkImageUsageFlags usage
,
845 struct radv_image_view
*iview
)
848 radv_image_view_init(iview
, cmd_buffer
->device
,
849 &(VkImageViewCreateInfo
) {
850 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
851 .image
= radv_image_to_handle(surf
->image
),
852 .viewType
= VK_IMAGE_VIEW_TYPE_2D
,
853 .format
= surf
->format
,
854 .subresourceRange
= {
855 .aspectMask
= surf
->aspect_mask
,
856 .baseMipLevel
= surf
->level
,
858 .baseArrayLayer
= surf
->layer
,
861 }, cmd_buffer
, usage
);
865 create_bview(struct radv_cmd_buffer
*cmd_buffer
,
866 struct radv_buffer
*buffer
,
869 struct radv_buffer_view
*bview
)
871 radv_buffer_view_init(bview
, cmd_buffer
->device
,
872 &(VkBufferViewCreateInfo
) {
873 .sType
= VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
,
875 .buffer
= radv_buffer_to_handle(buffer
),
878 .range
= VK_WHOLE_SIZE
,
884 struct radv_image_view src_iview
;
885 struct radv_buffer_view dst_bview
;
890 itob_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
891 struct itob_temps
*tmp
)
893 struct radv_device
*device
= cmd_buffer
->device
;
894 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
896 radv_temp_descriptor_set_create(device
, cmd_buffer
,
897 device
->meta_state
.itob
.img_ds_layout
,
900 radv_UpdateDescriptorSets(vk_device
,
902 (VkWriteDescriptorSet
[]) {
904 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
907 .dstArrayElement
= 0,
908 .descriptorCount
= 1,
909 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
910 .pImageInfo
= (VkDescriptorImageInfo
[]) {
912 .sampler
= VK_NULL_HANDLE
,
913 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
914 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
919 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
922 .dstArrayElement
= 0,
923 .descriptorCount
= 1,
924 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
925 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->dst_bview
) },
929 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
930 VK_PIPELINE_BIND_POINT_COMPUTE
,
931 device
->meta_state
.itob
.img_p_layout
, 0, 1,
936 itob_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
938 VkPipeline pipeline
=
939 cmd_buffer
->device
->meta_state
.itob
.pipeline
;
941 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
942 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
943 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
948 radv_meta_image_to_buffer(struct radv_cmd_buffer
*cmd_buffer
,
949 struct radv_meta_blit2d_surf
*src
,
950 struct radv_meta_blit2d_buffer
*dst
,
952 struct radv_meta_blit2d_rect
*rects
)
954 struct radv_device
*device
= cmd_buffer
->device
;
955 struct itob_temps temps
;
957 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
958 create_bview(cmd_buffer
, dst
->buffer
, dst
->offset
, dst
->format
, &temps
.dst_bview
);
959 itob_bind_descriptors(cmd_buffer
, &temps
);
961 itob_bind_pipeline(cmd_buffer
);
963 for (unsigned r
= 0; r
< num_rects
; ++r
) {
964 unsigned push_constants
[3] = {
969 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
970 device
->meta_state
.itob
.img_p_layout
,
971 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
974 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
976 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
980 struct radv_buffer_view src_bview
;
981 struct radv_image_view dst_iview
;
986 btoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
987 struct btoi_temps
*tmp
)
989 struct radv_device
*device
= cmd_buffer
->device
;
990 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
992 radv_temp_descriptor_set_create(device
, cmd_buffer
,
993 device
->meta_state
.btoi
.img_ds_layout
,
996 radv_UpdateDescriptorSets(vk_device
,
998 (VkWriteDescriptorSet
[]) {
1000 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1003 .dstArrayElement
= 0,
1004 .descriptorCount
= 1,
1005 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
1006 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->src_bview
) },
1009 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1012 .dstArrayElement
= 0,
1013 .descriptorCount
= 1,
1014 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1015 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1018 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1019 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1025 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1026 VK_PIPELINE_BIND_POINT_COMPUTE
,
1027 device
->meta_state
.btoi
.img_p_layout
, 0, 1,
1028 &tmp
->set
, 0, NULL
);
1032 btoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1034 VkPipeline pipeline
=
1035 cmd_buffer
->device
->meta_state
.btoi
.pipeline
;
1037 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1038 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1039 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1044 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1045 struct radv_meta_blit2d_buffer
*src
,
1046 struct radv_meta_blit2d_surf
*dst
,
1048 struct radv_meta_blit2d_rect
*rects
)
1050 struct radv_device
*device
= cmd_buffer
->device
;
1051 struct btoi_temps temps
;
1053 create_bview(cmd_buffer
, src
->buffer
, src
->offset
, src
->format
, &temps
.src_bview
);
1054 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1055 btoi_bind_descriptors(cmd_buffer
, &temps
);
1057 btoi_bind_pipeline(cmd_buffer
);
1059 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1060 unsigned push_constants
[3] = {
1065 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1066 device
->meta_state
.btoi
.img_p_layout
,
1067 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
1070 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1072 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
1076 struct radv_image_view src_iview
;
1077 struct radv_image_view dst_iview
;
1078 VkDescriptorSet set
;
1082 itoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1083 struct itoi_temps
*tmp
)
1085 struct radv_device
*device
= cmd_buffer
->device
;
1086 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
1088 radv_temp_descriptor_set_create(device
, cmd_buffer
,
1089 device
->meta_state
.itoi
.img_ds_layout
,
1092 radv_UpdateDescriptorSets(vk_device
,
1094 (VkWriteDescriptorSet
[]) {
1096 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1099 .dstArrayElement
= 0,
1100 .descriptorCount
= 1,
1101 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
1102 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1105 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
1106 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1111 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1114 .dstArrayElement
= 0,
1115 .descriptorCount
= 1,
1116 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1117 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1120 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1121 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1127 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1128 VK_PIPELINE_BIND_POINT_COMPUTE
,
1129 device
->meta_state
.itoi
.img_p_layout
, 0, 1,
1130 &tmp
->set
, 0, NULL
);
1134 itoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1136 VkPipeline pipeline
=
1137 cmd_buffer
->device
->meta_state
.itoi
.pipeline
;
1139 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1140 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1141 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1146 radv_meta_image_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1147 struct radv_meta_blit2d_surf
*src
,
1148 struct radv_meta_blit2d_surf
*dst
,
1150 struct radv_meta_blit2d_rect
*rects
)
1152 struct radv_device
*device
= cmd_buffer
->device
;
1153 struct itoi_temps temps
;
1155 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
1156 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1158 itoi_bind_descriptors(cmd_buffer
, &temps
);
1160 itoi_bind_pipeline(cmd_buffer
);
1162 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1163 unsigned push_constants
[4] = {
1169 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1170 device
->meta_state
.itoi
.img_p_layout
,
1171 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1174 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1176 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
1179 struct cleari_temps
{
1180 struct radv_image_view dst_iview
;
1181 VkDescriptorSet set
;
1185 cleari_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1186 struct cleari_temps
*tmp
)
1188 struct radv_device
*device
= cmd_buffer
->device
;
1189 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
1191 radv_temp_descriptor_set_create(device
, cmd_buffer
,
1192 device
->meta_state
.cleari
.img_ds_layout
,
1195 radv_UpdateDescriptorSets(vk_device
,
1197 (VkWriteDescriptorSet
[]) {
1199 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1202 .dstArrayElement
= 0,
1203 .descriptorCount
= 1,
1204 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1205 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1208 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1209 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1215 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
1216 VK_PIPELINE_BIND_POINT_COMPUTE
,
1217 device
->meta_state
.cleari
.img_p_layout
, 0, 1,
1218 &tmp
->set
, 0, NULL
);
1222 cleari_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1224 VkPipeline pipeline
=
1225 cmd_buffer
->device
->meta_state
.cleari
.pipeline
;
1227 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1228 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1229 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1234 radv_meta_clear_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1235 struct radv_meta_blit2d_surf
*dst
,
1236 const VkClearColorValue
*clear_color
)
1238 struct radv_device
*device
= cmd_buffer
->device
;
1239 struct cleari_temps temps
;
1241 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1242 cleari_bind_descriptors(cmd_buffer
, &temps
);
1244 cleari_bind_pipeline(cmd_buffer
);
1246 unsigned push_constants
[4] = {
1247 clear_color
->uint32
[0],
1248 clear_color
->uint32
[1],
1249 clear_color
->uint32
[2],
1250 clear_color
->uint32
[3],
1253 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1254 device
->meta_state
.cleari
.img_p_layout
,
1255 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1258 radv_unaligned_dispatch(cmd_buffer
, dst
->image
->extent
.width
, dst
->image
->extent
.height
, 1);
1259 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);