2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
28 * GFX queue: Compute shader implementation of image->buffer copy
29 * Compute queue: implementation also of buffer->image, image->image, and image clear.
33 build_nir_itob_compute_shader(struct radv_device
*dev
)
36 const struct glsl_type
*sampler_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
40 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
44 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
45 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itob_cs");
46 b
.shader
->info
->cs
.local_size
[0] = 16;
47 b
.shader
->info
->cs
.local_size
[1] = 16;
48 b
.shader
->info
->cs
.local_size
[2] = 1;
49 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
50 sampler_type
, "s_tex");
51 input_img
->data
.descriptor_set
= 0;
52 input_img
->data
.binding
= 0;
54 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
56 output_img
->data
.descriptor_set
= 0;
57 output_img
->data
.binding
= 1;
59 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
60 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
61 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
62 b
.shader
->info
->cs
.local_size
[0],
63 b
.shader
->info
->cs
.local_size
[1],
64 b
.shader
->info
->cs
.local_size
[2], 0);
66 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
70 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
71 nir_intrinsic_set_base(offset
, 0);
72 nir_intrinsic_set_range(offset
, 12);
73 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
74 offset
->num_components
= 2;
75 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
76 nir_builder_instr_insert(&b
, &offset
->instr
);
78 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
79 nir_intrinsic_set_base(stride
, 0);
80 nir_intrinsic_set_range(stride
, 12);
81 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
82 stride
->num_components
= 1;
83 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
84 nir_builder_instr_insert(&b
, &stride
->instr
);
86 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
88 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
89 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
90 tex
->op
= nir_texop_txf
;
91 tex
->src
[0].src_type
= nir_tex_src_coord
;
92 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, img_coord
, 0x3));
93 tex
->src
[1].src_type
= nir_tex_src_lod
;
94 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
95 tex
->dest_type
= nir_type_float
;
96 tex
->is_array
= false;
97 tex
->coord_components
= 2;
98 tex
->texture
= nir_deref_var_create(tex
, input_img
);
101 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
102 nir_builder_instr_insert(&b
, &tex
->instr
);
104 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
105 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
107 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
108 tmp
= nir_iadd(&b
, tmp
, pos_x
);
110 nir_ssa_def
*coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
112 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
113 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
114 store
->src
[0] = nir_src_for_ssa(coord
);
115 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
116 store
->src
[2] = nir_src_for_ssa(outval
);
117 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
119 nir_builder_instr_insert(&b
, &store
->instr
);
123 /* Image to buffer - don't write use image accessors */
125 radv_device_init_meta_itob_state(struct radv_device
*device
)
128 struct radv_shader_module cs
= { .nir
= NULL
};
130 zero(device
->meta_state
.itob
);
132 cs
.nir
= build_nir_itob_compute_shader(device
);
135 * two descriptors one for the image being sampled
136 * one for the buffer being written.
138 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
139 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
140 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
142 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
145 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
146 .descriptorCount
= 1,
147 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
148 .pImmutableSamplers
= NULL
152 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
153 .descriptorCount
= 1,
154 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
155 .pImmutableSamplers
= NULL
160 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
162 &device
->meta_state
.alloc
,
163 &device
->meta_state
.itob
.img_ds_layout
);
164 if (result
!= VK_SUCCESS
)
168 VkPipelineLayoutCreateInfo pl_create_info
= {
169 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
171 .pSetLayouts
= &device
->meta_state
.itob
.img_ds_layout
,
172 .pushConstantRangeCount
= 1,
173 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
176 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
178 &device
->meta_state
.alloc
,
179 &device
->meta_state
.itob
.img_p_layout
);
180 if (result
!= VK_SUCCESS
)
185 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
186 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
187 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
188 .module
= radv_shader_module_to_handle(&cs
),
190 .pSpecializationInfo
= NULL
,
193 VkComputePipelineCreateInfo vk_pipeline_info
= {
194 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
195 .stage
= pipeline_shader_stage
,
197 .layout
= device
->meta_state
.itob
.img_p_layout
,
200 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
201 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
202 1, &vk_pipeline_info
, NULL
,
203 &device
->meta_state
.itob
.pipeline
);
204 if (result
!= VK_SUCCESS
)
215 radv_device_finish_meta_itob_state(struct radv_device
*device
)
217 if (device
->meta_state
.itob
.img_p_layout
) {
218 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
219 device
->meta_state
.itob
.img_p_layout
,
220 &device
->meta_state
.alloc
);
222 if (device
->meta_state
.itob
.img_ds_layout
) {
223 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
224 device
->meta_state
.itob
.img_ds_layout
,
225 &device
->meta_state
.alloc
);
227 if (device
->meta_state
.itob
.pipeline
) {
228 radv_DestroyPipeline(radv_device_to_handle(device
),
229 device
->meta_state
.itob
.pipeline
,
230 &device
->meta_state
.alloc
);
235 build_nir_btoi_compute_shader(struct radv_device
*dev
)
238 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
242 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
246 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
247 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_btoi_cs");
248 b
.shader
->info
->cs
.local_size
[0] = 16;
249 b
.shader
->info
->cs
.local_size
[1] = 16;
250 b
.shader
->info
->cs
.local_size
[2] = 1;
251 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
253 input_img
->data
.descriptor_set
= 0;
254 input_img
->data
.binding
= 0;
256 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
257 img_type
, "out_img");
258 output_img
->data
.descriptor_set
= 0;
259 output_img
->data
.binding
= 1;
261 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
262 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
263 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
264 b
.shader
->info
->cs
.local_size
[0],
265 b
.shader
->info
->cs
.local_size
[1],
266 b
.shader
->info
->cs
.local_size
[2], 0);
268 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
270 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
271 nir_intrinsic_set_base(offset
, 0);
272 nir_intrinsic_set_range(offset
, 12);
273 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
274 offset
->num_components
= 2;
275 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
276 nir_builder_instr_insert(&b
, &offset
->instr
);
278 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
279 nir_intrinsic_set_base(stride
, 0);
280 nir_intrinsic_set_range(stride
, 12);
281 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
282 stride
->num_components
= 1;
283 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
284 nir_builder_instr_insert(&b
, &stride
->instr
);
286 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
287 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
289 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
290 tmp
= nir_iadd(&b
, tmp
, pos_x
);
292 nir_ssa_def
*buf_coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
294 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
296 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
297 tex
->sampler_dim
= GLSL_SAMPLER_DIM_BUF
;
298 tex
->op
= nir_texop_txf
;
299 tex
->src
[0].src_type
= nir_tex_src_coord
;
300 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, buf_coord
, 1));
301 tex
->src
[1].src_type
= nir_tex_src_lod
;
302 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
303 tex
->dest_type
= nir_type_float
;
304 tex
->is_array
= false;
305 tex
->coord_components
= 1;
306 tex
->texture
= nir_deref_var_create(tex
, input_img
);
309 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
310 nir_builder_instr_insert(&b
, &tex
->instr
);
312 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
313 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
314 store
->src
[0] = nir_src_for_ssa(img_coord
);
315 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
316 store
->src
[2] = nir_src_for_ssa(outval
);
317 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
319 nir_builder_instr_insert(&b
, &store
->instr
);
323 /* Buffer to image - don't write use image accessors */
325 radv_device_init_meta_btoi_state(struct radv_device
*device
)
328 struct radv_shader_module cs
= { .nir
= NULL
};
330 zero(device
->meta_state
.btoi
);
332 cs
.nir
= build_nir_btoi_compute_shader(device
);
335 * two descriptors one for the image being sampled
336 * one for the buffer being written.
338 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
339 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
340 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
342 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
345 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
346 .descriptorCount
= 1,
347 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
348 .pImmutableSamplers
= NULL
352 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
353 .descriptorCount
= 1,
354 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
355 .pImmutableSamplers
= NULL
360 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
362 &device
->meta_state
.alloc
,
363 &device
->meta_state
.btoi
.img_ds_layout
);
364 if (result
!= VK_SUCCESS
)
368 VkPipelineLayoutCreateInfo pl_create_info
= {
369 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
371 .pSetLayouts
= &device
->meta_state
.btoi
.img_ds_layout
,
372 .pushConstantRangeCount
= 1,
373 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
376 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
378 &device
->meta_state
.alloc
,
379 &device
->meta_state
.btoi
.img_p_layout
);
380 if (result
!= VK_SUCCESS
)
385 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
386 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
387 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
388 .module
= radv_shader_module_to_handle(&cs
),
390 .pSpecializationInfo
= NULL
,
393 VkComputePipelineCreateInfo vk_pipeline_info
= {
394 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
395 .stage
= pipeline_shader_stage
,
397 .layout
= device
->meta_state
.btoi
.img_p_layout
,
400 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
401 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
402 1, &vk_pipeline_info
, NULL
,
403 &device
->meta_state
.btoi
.pipeline
);
404 if (result
!= VK_SUCCESS
)
415 radv_device_finish_meta_btoi_state(struct radv_device
*device
)
417 if (device
->meta_state
.btoi
.img_p_layout
) {
418 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
419 device
->meta_state
.btoi
.img_p_layout
,
420 &device
->meta_state
.alloc
);
422 if (device
->meta_state
.btoi
.img_ds_layout
) {
423 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
424 device
->meta_state
.btoi
.img_ds_layout
,
425 &device
->meta_state
.alloc
);
427 if (device
->meta_state
.btoi
.pipeline
) {
428 radv_DestroyPipeline(radv_device_to_handle(device
),
429 device
->meta_state
.btoi
.pipeline
,
430 &device
->meta_state
.alloc
);
435 build_nir_itoi_compute_shader(struct radv_device
*dev
)
438 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
442 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
446 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
447 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itoi_cs");
448 b
.shader
->info
->cs
.local_size
[0] = 16;
449 b
.shader
->info
->cs
.local_size
[1] = 16;
450 b
.shader
->info
->cs
.local_size
[2] = 1;
451 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
453 input_img
->data
.descriptor_set
= 0;
454 input_img
->data
.binding
= 0;
456 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
457 img_type
, "out_img");
458 output_img
->data
.descriptor_set
= 0;
459 output_img
->data
.binding
= 1;
461 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
462 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
463 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
464 b
.shader
->info
->cs
.local_size
[0],
465 b
.shader
->info
->cs
.local_size
[1],
466 b
.shader
->info
->cs
.local_size
[2], 0);
468 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
470 nir_intrinsic_instr
*src_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
471 nir_intrinsic_set_base(src_offset
, 0);
472 nir_intrinsic_set_range(src_offset
, 16);
473 src_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
474 src_offset
->num_components
= 2;
475 nir_ssa_dest_init(&src_offset
->instr
, &src_offset
->dest
, 2, 32, "src_offset");
476 nir_builder_instr_insert(&b
, &src_offset
->instr
);
478 nir_intrinsic_instr
*dst_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
479 nir_intrinsic_set_base(dst_offset
, 0);
480 nir_intrinsic_set_range(dst_offset
, 16);
481 dst_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
482 dst_offset
->num_components
= 2;
483 nir_ssa_dest_init(&dst_offset
->instr
, &dst_offset
->dest
, 2, 32, "dst_offset");
484 nir_builder_instr_insert(&b
, &dst_offset
->instr
);
486 nir_ssa_def
*src_coord
= nir_iadd(&b
, global_id
, &src_offset
->dest
.ssa
);
488 nir_ssa_def
*dst_coord
= nir_iadd(&b
, global_id
, &dst_offset
->dest
.ssa
);
490 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
491 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
492 tex
->op
= nir_texop_txf
;
493 tex
->src
[0].src_type
= nir_tex_src_coord
;
494 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, src_coord
, 3));
495 tex
->src
[1].src_type
= nir_tex_src_lod
;
496 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
497 tex
->dest_type
= nir_type_float
;
498 tex
->is_array
= false;
499 tex
->coord_components
= 2;
500 tex
->texture
= nir_deref_var_create(tex
, input_img
);
503 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
504 nir_builder_instr_insert(&b
, &tex
->instr
);
506 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
507 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
508 store
->src
[0] = nir_src_for_ssa(dst_coord
);
509 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
510 store
->src
[2] = nir_src_for_ssa(outval
);
511 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
513 nir_builder_instr_insert(&b
, &store
->instr
);
517 /* image to image - don't write use image accessors */
519 radv_device_init_meta_itoi_state(struct radv_device
*device
)
522 struct radv_shader_module cs
= { .nir
= NULL
};
524 zero(device
->meta_state
.itoi
);
526 cs
.nir
= build_nir_itoi_compute_shader(device
);
529 * two descriptors one for the image being sampled
530 * one for the buffer being written.
532 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
533 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
534 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
536 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
539 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
540 .descriptorCount
= 1,
541 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
542 .pImmutableSamplers
= NULL
546 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
547 .descriptorCount
= 1,
548 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
549 .pImmutableSamplers
= NULL
554 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
556 &device
->meta_state
.alloc
,
557 &device
->meta_state
.itoi
.img_ds_layout
);
558 if (result
!= VK_SUCCESS
)
562 VkPipelineLayoutCreateInfo pl_create_info
= {
563 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
565 .pSetLayouts
= &device
->meta_state
.itoi
.img_ds_layout
,
566 .pushConstantRangeCount
= 1,
567 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
570 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
572 &device
->meta_state
.alloc
,
573 &device
->meta_state
.itoi
.img_p_layout
);
574 if (result
!= VK_SUCCESS
)
579 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
580 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
581 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
582 .module
= radv_shader_module_to_handle(&cs
),
584 .pSpecializationInfo
= NULL
,
587 VkComputePipelineCreateInfo vk_pipeline_info
= {
588 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
589 .stage
= pipeline_shader_stage
,
591 .layout
= device
->meta_state
.itoi
.img_p_layout
,
594 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
595 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
596 1, &vk_pipeline_info
, NULL
,
597 &device
->meta_state
.itoi
.pipeline
);
598 if (result
!= VK_SUCCESS
)
609 radv_device_finish_meta_itoi_state(struct radv_device
*device
)
611 if (device
->meta_state
.itoi
.img_p_layout
) {
612 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
613 device
->meta_state
.itoi
.img_p_layout
,
614 &device
->meta_state
.alloc
);
616 if (device
->meta_state
.itoi
.img_ds_layout
) {
617 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
618 device
->meta_state
.itoi
.img_ds_layout
,
619 &device
->meta_state
.alloc
);
621 if (device
->meta_state
.itoi
.pipeline
) {
622 radv_DestroyPipeline(radv_device_to_handle(device
),
623 device
->meta_state
.itoi
.pipeline
,
624 &device
->meta_state
.alloc
);
629 build_nir_cleari_compute_shader(struct radv_device
*dev
)
632 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
636 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
637 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_cleari_cs");
638 b
.shader
->info
->cs
.local_size
[0] = 16;
639 b
.shader
->info
->cs
.local_size
[1] = 16;
640 b
.shader
->info
->cs
.local_size
[2] = 1;
642 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
643 img_type
, "out_img");
644 output_img
->data
.descriptor_set
= 0;
645 output_img
->data
.binding
= 0;
647 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
648 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
649 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
650 b
.shader
->info
->cs
.local_size
[0],
651 b
.shader
->info
->cs
.local_size
[1],
652 b
.shader
->info
->cs
.local_size
[2], 0);
654 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
656 nir_intrinsic_instr
*clear_val
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
657 nir_intrinsic_set_base(clear_val
, 0);
658 nir_intrinsic_set_range(clear_val
, 16);
659 clear_val
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
660 clear_val
->num_components
= 4;
661 nir_ssa_dest_init(&clear_val
->instr
, &clear_val
->dest
, 4, 32, "clear_value");
662 nir_builder_instr_insert(&b
, &clear_val
->instr
);
664 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
665 store
->src
[0] = nir_src_for_ssa(global_id
);
666 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
667 store
->src
[2] = nir_src_for_ssa(&clear_val
->dest
.ssa
);
668 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
670 nir_builder_instr_insert(&b
, &store
->instr
);
675 radv_device_init_meta_cleari_state(struct radv_device
*device
)
678 struct radv_shader_module cs
= { .nir
= NULL
};
680 zero(device
->meta_state
.cleari
);
682 cs
.nir
= build_nir_cleari_compute_shader(device
);
685 * two descriptors one for the image being sampled
686 * one for the buffer being written.
688 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
689 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
690 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
692 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
695 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
696 .descriptorCount
= 1,
697 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
698 .pImmutableSamplers
= NULL
703 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
705 &device
->meta_state
.alloc
,
706 &device
->meta_state
.cleari
.img_ds_layout
);
707 if (result
!= VK_SUCCESS
)
711 VkPipelineLayoutCreateInfo pl_create_info
= {
712 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
714 .pSetLayouts
= &device
->meta_state
.cleari
.img_ds_layout
,
715 .pushConstantRangeCount
= 1,
716 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
719 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
721 &device
->meta_state
.alloc
,
722 &device
->meta_state
.cleari
.img_p_layout
);
723 if (result
!= VK_SUCCESS
)
728 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
729 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
730 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
731 .module
= radv_shader_module_to_handle(&cs
),
733 .pSpecializationInfo
= NULL
,
736 VkComputePipelineCreateInfo vk_pipeline_info
= {
737 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
738 .stage
= pipeline_shader_stage
,
740 .layout
= device
->meta_state
.cleari
.img_p_layout
,
743 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
744 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
745 1, &vk_pipeline_info
, NULL
,
746 &device
->meta_state
.cleari
.pipeline
);
747 if (result
!= VK_SUCCESS
)
758 radv_device_finish_meta_cleari_state(struct radv_device
*device
)
760 if (device
->meta_state
.cleari
.img_p_layout
) {
761 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
762 device
->meta_state
.cleari
.img_p_layout
,
763 &device
->meta_state
.alloc
);
765 if (device
->meta_state
.cleari
.img_ds_layout
) {
766 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
767 device
->meta_state
.cleari
.img_ds_layout
,
768 &device
->meta_state
.alloc
);
770 if (device
->meta_state
.cleari
.pipeline
) {
771 radv_DestroyPipeline(radv_device_to_handle(device
),
772 device
->meta_state
.cleari
.pipeline
,
773 &device
->meta_state
.alloc
);
778 radv_device_finish_meta_bufimage_state(struct radv_device
*device
)
780 radv_device_finish_meta_itob_state(device
);
781 radv_device_finish_meta_btoi_state(device
);
782 radv_device_finish_meta_itoi_state(device
);
783 radv_device_finish_meta_cleari_state(device
);
787 radv_device_init_meta_bufimage_state(struct radv_device
*device
)
791 result
= radv_device_init_meta_itob_state(device
);
792 if (result
!= VK_SUCCESS
)
795 result
= radv_device_init_meta_btoi_state(device
);
796 if (result
!= VK_SUCCESS
)
799 result
= radv_device_init_meta_itoi_state(device
);
800 if (result
!= VK_SUCCESS
)
803 result
= radv_device_init_meta_cleari_state(device
);
804 if (result
!= VK_SUCCESS
)
809 radv_device_finish_meta_itoi_state(device
);
811 radv_device_finish_meta_btoi_state(device
);
813 radv_device_finish_meta_itob_state(device
);
818 radv_meta_begin_itoi(struct radv_cmd_buffer
*cmd_buffer
,
819 struct radv_meta_saved_compute_state
*save
)
821 radv_meta_save_compute(save
, cmd_buffer
, 16);
825 radv_meta_end_itoi(struct radv_cmd_buffer
*cmd_buffer
,
826 struct radv_meta_saved_compute_state
*save
)
828 radv_meta_restore_compute(save
, cmd_buffer
, 16);
832 radv_meta_begin_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
833 struct radv_meta_saved_compute_state
*save
)
835 radv_meta_save_compute(save
, cmd_buffer
, 12);
839 radv_meta_end_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
840 struct radv_meta_saved_compute_state
*save
)
842 radv_meta_restore_compute(save
, cmd_buffer
, 12);
846 radv_meta_begin_cleari(struct radv_cmd_buffer
*cmd_buffer
,
847 struct radv_meta_saved_compute_state
*save
)
849 radv_meta_save_compute(save
, cmd_buffer
, 16);
853 radv_meta_end_cleari(struct radv_cmd_buffer
*cmd_buffer
,
854 struct radv_meta_saved_compute_state
*save
)
856 radv_meta_restore_compute(save
, cmd_buffer
, 16);
860 create_iview(struct radv_cmd_buffer
*cmd_buffer
,
861 struct radv_meta_blit2d_surf
*surf
,
862 VkImageUsageFlags usage
,
863 struct radv_image_view
*iview
)
866 radv_image_view_init(iview
, cmd_buffer
->device
,
867 &(VkImageViewCreateInfo
) {
868 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
869 .image
= radv_image_to_handle(surf
->image
),
870 .viewType
= VK_IMAGE_VIEW_TYPE_2D
,
871 .format
= surf
->format
,
872 .subresourceRange
= {
873 .aspectMask
= surf
->aspect_mask
,
874 .baseMipLevel
= surf
->level
,
876 .baseArrayLayer
= surf
->layer
,
879 }, cmd_buffer
, usage
);
883 create_bview(struct radv_cmd_buffer
*cmd_buffer
,
884 struct radv_buffer
*buffer
,
887 struct radv_buffer_view
*bview
)
889 radv_buffer_view_init(bview
, cmd_buffer
->device
,
890 &(VkBufferViewCreateInfo
) {
891 .sType
= VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
,
893 .buffer
= radv_buffer_to_handle(buffer
),
896 .range
= VK_WHOLE_SIZE
,
902 struct radv_image_view src_iview
;
903 struct radv_buffer_view dst_bview
;
907 itob_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
908 struct itob_temps
*tmp
)
910 struct radv_device
*device
= cmd_buffer
->device
;
912 radv_meta_push_descriptor_set(cmd_buffer
,
913 VK_PIPELINE_BIND_POINT_COMPUTE
,
914 device
->meta_state
.itob
.img_p_layout
,
916 2, /* descriptorWriteCount */
917 (VkWriteDescriptorSet
[]) {
919 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
921 .dstArrayElement
= 0,
922 .descriptorCount
= 1,
923 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
924 .pImageInfo
= (VkDescriptorImageInfo
[]) {
926 .sampler
= VK_NULL_HANDLE
,
927 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
928 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
933 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
935 .dstArrayElement
= 0,
936 .descriptorCount
= 1,
937 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
938 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->dst_bview
) },
944 itob_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
946 VkPipeline pipeline
=
947 cmd_buffer
->device
->meta_state
.itob
.pipeline
;
949 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
950 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
951 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
956 radv_meta_image_to_buffer(struct radv_cmd_buffer
*cmd_buffer
,
957 struct radv_meta_blit2d_surf
*src
,
958 struct radv_meta_blit2d_buffer
*dst
,
960 struct radv_meta_blit2d_rect
*rects
)
962 struct radv_device
*device
= cmd_buffer
->device
;
963 struct itob_temps temps
;
965 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
966 create_bview(cmd_buffer
, dst
->buffer
, dst
->offset
, dst
->format
, &temps
.dst_bview
);
967 itob_bind_descriptors(cmd_buffer
, &temps
);
969 itob_bind_pipeline(cmd_buffer
);
971 for (unsigned r
= 0; r
< num_rects
; ++r
) {
972 unsigned push_constants
[3] = {
977 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
978 device
->meta_state
.itob
.img_p_layout
,
979 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
982 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
987 struct radv_buffer_view src_bview
;
988 struct radv_image_view dst_iview
;
992 btoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
993 struct btoi_temps
*tmp
)
995 struct radv_device
*device
= cmd_buffer
->device
;
997 radv_meta_push_descriptor_set(cmd_buffer
,
998 VK_PIPELINE_BIND_POINT_COMPUTE
,
999 device
->meta_state
.btoi
.img_p_layout
,
1001 2, /* descriptorWriteCount */
1002 (VkWriteDescriptorSet
[]) {
1004 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1006 .dstArrayElement
= 0,
1007 .descriptorCount
= 1,
1008 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
1009 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->src_bview
) },
1012 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1014 .dstArrayElement
= 0,
1015 .descriptorCount
= 1,
1016 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1017 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1019 .sampler
= VK_NULL_HANDLE
,
1020 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1021 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1029 btoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1031 VkPipeline pipeline
=
1032 cmd_buffer
->device
->meta_state
.btoi
.pipeline
;
1034 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1035 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1036 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1041 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1042 struct radv_meta_blit2d_buffer
*src
,
1043 struct radv_meta_blit2d_surf
*dst
,
1045 struct radv_meta_blit2d_rect
*rects
)
1047 struct radv_device
*device
= cmd_buffer
->device
;
1048 struct btoi_temps temps
;
1050 create_bview(cmd_buffer
, src
->buffer
, src
->offset
, src
->format
, &temps
.src_bview
);
1051 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1052 btoi_bind_descriptors(cmd_buffer
, &temps
);
1054 btoi_bind_pipeline(cmd_buffer
);
1056 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1057 unsigned push_constants
[3] = {
1062 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1063 device
->meta_state
.btoi
.img_p_layout
,
1064 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
1067 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1072 struct radv_image_view src_iview
;
1073 struct radv_image_view dst_iview
;
1077 itoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1078 struct itoi_temps
*tmp
)
1080 struct radv_device
*device
= cmd_buffer
->device
;
1082 radv_meta_push_descriptor_set(cmd_buffer
,
1083 VK_PIPELINE_BIND_POINT_COMPUTE
,
1084 device
->meta_state
.itoi
.img_p_layout
,
1086 2, /* descriptorWriteCount */
1087 (VkWriteDescriptorSet
[]) {
1089 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1091 .dstArrayElement
= 0,
1092 .descriptorCount
= 1,
1093 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
1094 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1096 .sampler
= VK_NULL_HANDLE
,
1097 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
1098 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1103 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1105 .dstArrayElement
= 0,
1106 .descriptorCount
= 1,
1107 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1108 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1110 .sampler
= VK_NULL_HANDLE
,
1111 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1112 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1120 itoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1122 VkPipeline pipeline
=
1123 cmd_buffer
->device
->meta_state
.itoi
.pipeline
;
1125 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1126 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1127 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1132 radv_meta_image_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1133 struct radv_meta_blit2d_surf
*src
,
1134 struct radv_meta_blit2d_surf
*dst
,
1136 struct radv_meta_blit2d_rect
*rects
)
1138 struct radv_device
*device
= cmd_buffer
->device
;
1139 struct itoi_temps temps
;
1141 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
1142 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1144 itoi_bind_descriptors(cmd_buffer
, &temps
);
1146 itoi_bind_pipeline(cmd_buffer
);
1148 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1149 unsigned push_constants
[4] = {
1155 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1156 device
->meta_state
.itoi
.img_p_layout
,
1157 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1160 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1165 cleari_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1166 struct radv_image_view
*dst_iview
)
1168 struct radv_device
*device
= cmd_buffer
->device
;
1170 radv_meta_push_descriptor_set(cmd_buffer
,
1171 VK_PIPELINE_BIND_POINT_COMPUTE
,
1172 device
->meta_state
.cleari
.img_p_layout
,
1174 1, /* descriptorWriteCount */
1175 (VkWriteDescriptorSet
[]) {
1177 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1179 .dstArrayElement
= 0,
1180 .descriptorCount
= 1,
1181 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1182 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1184 .sampler
= VK_NULL_HANDLE
,
1185 .imageView
= radv_image_view_to_handle(dst_iview
),
1186 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1194 cleari_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1196 VkPipeline pipeline
=
1197 cmd_buffer
->device
->meta_state
.cleari
.pipeline
;
1199 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1200 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1201 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1206 radv_meta_clear_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1207 struct radv_meta_blit2d_surf
*dst
,
1208 const VkClearColorValue
*clear_color
)
1210 struct radv_device
*device
= cmd_buffer
->device
;
1211 struct radv_image_view dst_iview
;
1213 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &dst_iview
);
1214 cleari_bind_descriptors(cmd_buffer
, &dst_iview
);
1216 cleari_bind_pipeline(cmd_buffer
);
1218 unsigned push_constants
[4] = {
1219 clear_color
->uint32
[0],
1220 clear_color
->uint32
[1],
1221 clear_color
->uint32
[2],
1222 clear_color
->uint32
[3],
1225 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1226 device
->meta_state
.cleari
.img_p_layout
,
1227 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1230 radv_unaligned_dispatch(cmd_buffer
, dst
->image
->info
.width
, dst
->image
->info
.height
, 1);