2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
28 * GFX queue: Compute shader implementation of image->buffer copy
29 * Compute queue: implementation also of buffer->image, image->image, and image clear.
33 build_nir_itob_compute_shader(struct radv_device
*dev
)
36 const struct glsl_type
*sampler_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
40 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
44 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
45 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itob_cs");
46 b
.shader
->info
->cs
.local_size
[0] = 16;
47 b
.shader
->info
->cs
.local_size
[1] = 16;
48 b
.shader
->info
->cs
.local_size
[2] = 1;
49 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
50 sampler_type
, "s_tex");
51 input_img
->data
.descriptor_set
= 0;
52 input_img
->data
.binding
= 0;
54 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
56 output_img
->data
.descriptor_set
= 0;
57 output_img
->data
.binding
= 1;
59 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
60 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
61 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
62 b
.shader
->info
->cs
.local_size
[0],
63 b
.shader
->info
->cs
.local_size
[1],
64 b
.shader
->info
->cs
.local_size
[2], 0);
66 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
70 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
71 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
72 offset
->num_components
= 2;
73 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
74 nir_builder_instr_insert(&b
, &offset
->instr
);
76 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
77 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
78 stride
->num_components
= 1;
79 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
80 nir_builder_instr_insert(&b
, &stride
->instr
);
82 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
84 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
85 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
86 tex
->op
= nir_texop_txf
;
87 tex
->src
[0].src_type
= nir_tex_src_coord
;
88 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, img_coord
, 0x3));
89 tex
->src
[1].src_type
= nir_tex_src_lod
;
90 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
91 tex
->dest_type
= nir_type_float
;
92 tex
->is_array
= false;
93 tex
->coord_components
= 2;
94 tex
->texture
= nir_deref_var_create(tex
, input_img
);
97 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
98 nir_builder_instr_insert(&b
, &tex
->instr
);
100 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
101 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
103 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
104 tmp
= nir_iadd(&b
, tmp
, pos_x
);
106 nir_ssa_def
*coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
108 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
109 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
110 store
->src
[0] = nir_src_for_ssa(coord
);
111 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
112 store
->src
[2] = nir_src_for_ssa(outval
);
113 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
115 nir_builder_instr_insert(&b
, &store
->instr
);
119 /* Image to buffer - don't write use image accessors */
121 radv_device_init_meta_itob_state(struct radv_device
*device
)
124 struct radv_shader_module cs
= { .nir
= NULL
};
126 zero(device
->meta_state
.itob
);
128 cs
.nir
= build_nir_itob_compute_shader(device
);
131 * two descriptors one for the image being sampled
132 * one for the buffer being written.
134 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
135 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
136 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
138 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
141 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
142 .descriptorCount
= 1,
143 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
144 .pImmutableSamplers
= NULL
148 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
149 .descriptorCount
= 1,
150 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
151 .pImmutableSamplers
= NULL
156 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
158 &device
->meta_state
.alloc
,
159 &device
->meta_state
.itob
.img_ds_layout
);
160 if (result
!= VK_SUCCESS
)
164 VkPipelineLayoutCreateInfo pl_create_info
= {
165 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
167 .pSetLayouts
= &device
->meta_state
.itob
.img_ds_layout
,
168 .pushConstantRangeCount
= 1,
169 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
172 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
174 &device
->meta_state
.alloc
,
175 &device
->meta_state
.itob
.img_p_layout
);
176 if (result
!= VK_SUCCESS
)
181 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
182 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
183 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
184 .module
= radv_shader_module_to_handle(&cs
),
186 .pSpecializationInfo
= NULL
,
189 VkComputePipelineCreateInfo vk_pipeline_info
= {
190 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
191 .stage
= pipeline_shader_stage
,
193 .layout
= device
->meta_state
.itob
.img_p_layout
,
196 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
197 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
198 1, &vk_pipeline_info
, NULL
,
199 &device
->meta_state
.itob
.pipeline
);
200 if (result
!= VK_SUCCESS
)
211 radv_device_finish_meta_itob_state(struct radv_device
*device
)
213 if (device
->meta_state
.itob
.img_p_layout
) {
214 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
215 device
->meta_state
.itob
.img_p_layout
,
216 &device
->meta_state
.alloc
);
218 if (device
->meta_state
.itob
.img_ds_layout
) {
219 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
220 device
->meta_state
.itob
.img_ds_layout
,
221 &device
->meta_state
.alloc
);
223 if (device
->meta_state
.itob
.pipeline
) {
224 radv_DestroyPipeline(radv_device_to_handle(device
),
225 device
->meta_state
.itob
.pipeline
,
226 &device
->meta_state
.alloc
);
231 build_nir_btoi_compute_shader(struct radv_device
*dev
)
234 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
238 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
242 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
243 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_btoi_cs");
244 b
.shader
->info
->cs
.local_size
[0] = 16;
245 b
.shader
->info
->cs
.local_size
[1] = 16;
246 b
.shader
->info
->cs
.local_size
[2] = 1;
247 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
249 input_img
->data
.descriptor_set
= 0;
250 input_img
->data
.binding
= 0;
252 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
253 img_type
, "out_img");
254 output_img
->data
.descriptor_set
= 0;
255 output_img
->data
.binding
= 1;
257 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
258 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
259 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
260 b
.shader
->info
->cs
.local_size
[0],
261 b
.shader
->info
->cs
.local_size
[1],
262 b
.shader
->info
->cs
.local_size
[2], 0);
264 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
266 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
267 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
268 offset
->num_components
= 2;
269 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
270 nir_builder_instr_insert(&b
, &offset
->instr
);
272 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
273 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
274 stride
->num_components
= 1;
275 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
276 nir_builder_instr_insert(&b
, &stride
->instr
);
278 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
279 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
281 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
282 tmp
= nir_iadd(&b
, tmp
, pos_x
);
284 nir_ssa_def
*buf_coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
286 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
288 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
289 tex
->sampler_dim
= GLSL_SAMPLER_DIM_BUF
;
290 tex
->op
= nir_texop_txf
;
291 tex
->src
[0].src_type
= nir_tex_src_coord
;
292 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, buf_coord
, 1));
293 tex
->src
[1].src_type
= nir_tex_src_lod
;
294 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
295 tex
->dest_type
= nir_type_float
;
296 tex
->is_array
= false;
297 tex
->coord_components
= 1;
298 tex
->texture
= nir_deref_var_create(tex
, input_img
);
301 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
302 nir_builder_instr_insert(&b
, &tex
->instr
);
304 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
305 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
306 store
->src
[0] = nir_src_for_ssa(img_coord
);
307 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
308 store
->src
[2] = nir_src_for_ssa(outval
);
309 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
311 nir_builder_instr_insert(&b
, &store
->instr
);
315 /* Buffer to image - don't write use image accessors */
317 radv_device_init_meta_btoi_state(struct radv_device
*device
)
320 struct radv_shader_module cs
= { .nir
= NULL
};
322 zero(device
->meta_state
.btoi
);
324 cs
.nir
= build_nir_btoi_compute_shader(device
);
327 * two descriptors one for the image being sampled
328 * one for the buffer being written.
330 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
331 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
332 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
334 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
337 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
338 .descriptorCount
= 1,
339 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
340 .pImmutableSamplers
= NULL
344 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
345 .descriptorCount
= 1,
346 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
347 .pImmutableSamplers
= NULL
352 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
354 &device
->meta_state
.alloc
,
355 &device
->meta_state
.btoi
.img_ds_layout
);
356 if (result
!= VK_SUCCESS
)
360 VkPipelineLayoutCreateInfo pl_create_info
= {
361 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
363 .pSetLayouts
= &device
->meta_state
.btoi
.img_ds_layout
,
364 .pushConstantRangeCount
= 1,
365 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
368 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
370 &device
->meta_state
.alloc
,
371 &device
->meta_state
.btoi
.img_p_layout
);
372 if (result
!= VK_SUCCESS
)
377 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
378 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
379 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
380 .module
= radv_shader_module_to_handle(&cs
),
382 .pSpecializationInfo
= NULL
,
385 VkComputePipelineCreateInfo vk_pipeline_info
= {
386 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
387 .stage
= pipeline_shader_stage
,
389 .layout
= device
->meta_state
.btoi
.img_p_layout
,
392 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
393 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
394 1, &vk_pipeline_info
, NULL
,
395 &device
->meta_state
.btoi
.pipeline
);
396 if (result
!= VK_SUCCESS
)
407 radv_device_finish_meta_btoi_state(struct radv_device
*device
)
409 if (device
->meta_state
.btoi
.img_p_layout
) {
410 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
411 device
->meta_state
.btoi
.img_p_layout
,
412 &device
->meta_state
.alloc
);
414 if (device
->meta_state
.btoi
.img_ds_layout
) {
415 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
416 device
->meta_state
.btoi
.img_ds_layout
,
417 &device
->meta_state
.alloc
);
419 if (device
->meta_state
.btoi
.pipeline
) {
420 radv_DestroyPipeline(radv_device_to_handle(device
),
421 device
->meta_state
.btoi
.pipeline
,
422 &device
->meta_state
.alloc
);
427 build_nir_itoi_compute_shader(struct radv_device
*dev
)
430 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
434 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
438 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
439 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itoi_cs");
440 b
.shader
->info
->cs
.local_size
[0] = 16;
441 b
.shader
->info
->cs
.local_size
[1] = 16;
442 b
.shader
->info
->cs
.local_size
[2] = 1;
443 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
445 input_img
->data
.descriptor_set
= 0;
446 input_img
->data
.binding
= 0;
448 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
449 img_type
, "out_img");
450 output_img
->data
.descriptor_set
= 0;
451 output_img
->data
.binding
= 1;
453 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
454 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
455 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
456 b
.shader
->info
->cs
.local_size
[0],
457 b
.shader
->info
->cs
.local_size
[1],
458 b
.shader
->info
->cs
.local_size
[2], 0);
460 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
462 nir_intrinsic_instr
*src_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
463 src_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
464 src_offset
->num_components
= 2;
465 nir_ssa_dest_init(&src_offset
->instr
, &src_offset
->dest
, 2, 32, "src_offset");
466 nir_builder_instr_insert(&b
, &src_offset
->instr
);
468 nir_intrinsic_instr
*dst_offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
469 dst_offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
470 dst_offset
->num_components
= 2;
471 nir_ssa_dest_init(&dst_offset
->instr
, &dst_offset
->dest
, 2, 32, "dst_offset");
472 nir_builder_instr_insert(&b
, &dst_offset
->instr
);
474 nir_ssa_def
*src_coord
= nir_iadd(&b
, global_id
, &src_offset
->dest
.ssa
);
476 nir_ssa_def
*dst_coord
= nir_iadd(&b
, global_id
, &dst_offset
->dest
.ssa
);
478 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
479 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
480 tex
->op
= nir_texop_txf
;
481 tex
->src
[0].src_type
= nir_tex_src_coord
;
482 tex
->src
[0].src
= nir_src_for_ssa(nir_channels(&b
, src_coord
, 3));
483 tex
->src
[1].src_type
= nir_tex_src_lod
;
484 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
485 tex
->dest_type
= nir_type_float
;
486 tex
->is_array
= false;
487 tex
->coord_components
= 2;
488 tex
->texture
= nir_deref_var_create(tex
, input_img
);
491 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
492 nir_builder_instr_insert(&b
, &tex
->instr
);
494 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
495 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
496 store
->src
[0] = nir_src_for_ssa(dst_coord
);
497 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
498 store
->src
[2] = nir_src_for_ssa(outval
);
499 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
501 nir_builder_instr_insert(&b
, &store
->instr
);
505 /* image to image - don't write use image accessors */
507 radv_device_init_meta_itoi_state(struct radv_device
*device
)
510 struct radv_shader_module cs
= { .nir
= NULL
};
512 zero(device
->meta_state
.itoi
);
514 cs
.nir
= build_nir_itoi_compute_shader(device
);
517 * two descriptors one for the image being sampled
518 * one for the buffer being written.
520 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
521 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
522 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
524 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
527 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
528 .descriptorCount
= 1,
529 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
530 .pImmutableSamplers
= NULL
534 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
535 .descriptorCount
= 1,
536 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
537 .pImmutableSamplers
= NULL
542 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
544 &device
->meta_state
.alloc
,
545 &device
->meta_state
.itoi
.img_ds_layout
);
546 if (result
!= VK_SUCCESS
)
550 VkPipelineLayoutCreateInfo pl_create_info
= {
551 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
553 .pSetLayouts
= &device
->meta_state
.itoi
.img_ds_layout
,
554 .pushConstantRangeCount
= 1,
555 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
558 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
560 &device
->meta_state
.alloc
,
561 &device
->meta_state
.itoi
.img_p_layout
);
562 if (result
!= VK_SUCCESS
)
567 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
568 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
569 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
570 .module
= radv_shader_module_to_handle(&cs
),
572 .pSpecializationInfo
= NULL
,
575 VkComputePipelineCreateInfo vk_pipeline_info
= {
576 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
577 .stage
= pipeline_shader_stage
,
579 .layout
= device
->meta_state
.itoi
.img_p_layout
,
582 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
583 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
584 1, &vk_pipeline_info
, NULL
,
585 &device
->meta_state
.itoi
.pipeline
);
586 if (result
!= VK_SUCCESS
)
597 radv_device_finish_meta_itoi_state(struct radv_device
*device
)
599 if (device
->meta_state
.itoi
.img_p_layout
) {
600 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
601 device
->meta_state
.itoi
.img_p_layout
,
602 &device
->meta_state
.alloc
);
604 if (device
->meta_state
.itoi
.img_ds_layout
) {
605 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
606 device
->meta_state
.itoi
.img_ds_layout
,
607 &device
->meta_state
.alloc
);
609 if (device
->meta_state
.itoi
.pipeline
) {
610 radv_DestroyPipeline(radv_device_to_handle(device
),
611 device
->meta_state
.itoi
.pipeline
,
612 &device
->meta_state
.alloc
);
617 build_nir_cleari_compute_shader(struct radv_device
*dev
)
620 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
624 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
625 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_cleari_cs");
626 b
.shader
->info
->cs
.local_size
[0] = 16;
627 b
.shader
->info
->cs
.local_size
[1] = 16;
628 b
.shader
->info
->cs
.local_size
[2] = 1;
630 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
631 img_type
, "out_img");
632 output_img
->data
.descriptor_set
= 0;
633 output_img
->data
.binding
= 0;
635 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
636 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
637 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
638 b
.shader
->info
->cs
.local_size
[0],
639 b
.shader
->info
->cs
.local_size
[1],
640 b
.shader
->info
->cs
.local_size
[2], 0);
642 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
644 nir_intrinsic_instr
*clear_val
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
645 clear_val
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
646 clear_val
->num_components
= 4;
647 nir_ssa_dest_init(&clear_val
->instr
, &clear_val
->dest
, 4, 32, "clear_value");
648 nir_builder_instr_insert(&b
, &clear_val
->instr
);
650 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
651 store
->src
[0] = nir_src_for_ssa(global_id
);
652 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
653 store
->src
[2] = nir_src_for_ssa(&clear_val
->dest
.ssa
);
654 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
656 nir_builder_instr_insert(&b
, &store
->instr
);
661 radv_device_init_meta_cleari_state(struct radv_device
*device
)
664 struct radv_shader_module cs
= { .nir
= NULL
};
666 zero(device
->meta_state
.cleari
);
668 cs
.nir
= build_nir_cleari_compute_shader(device
);
671 * two descriptors one for the image being sampled
672 * one for the buffer being written.
674 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
675 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
676 .flags
= VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
,
678 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
681 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
682 .descriptorCount
= 1,
683 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
684 .pImmutableSamplers
= NULL
689 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
691 &device
->meta_state
.alloc
,
692 &device
->meta_state
.cleari
.img_ds_layout
);
693 if (result
!= VK_SUCCESS
)
697 VkPipelineLayoutCreateInfo pl_create_info
= {
698 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
700 .pSetLayouts
= &device
->meta_state
.cleari
.img_ds_layout
,
701 .pushConstantRangeCount
= 1,
702 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16},
705 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
707 &device
->meta_state
.alloc
,
708 &device
->meta_state
.cleari
.img_p_layout
);
709 if (result
!= VK_SUCCESS
)
714 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
715 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
716 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
717 .module
= radv_shader_module_to_handle(&cs
),
719 .pSpecializationInfo
= NULL
,
722 VkComputePipelineCreateInfo vk_pipeline_info
= {
723 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
724 .stage
= pipeline_shader_stage
,
726 .layout
= device
->meta_state
.cleari
.img_p_layout
,
729 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
730 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
731 1, &vk_pipeline_info
, NULL
,
732 &device
->meta_state
.cleari
.pipeline
);
733 if (result
!= VK_SUCCESS
)
744 radv_device_finish_meta_cleari_state(struct radv_device
*device
)
746 if (device
->meta_state
.cleari
.img_p_layout
) {
747 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
748 device
->meta_state
.cleari
.img_p_layout
,
749 &device
->meta_state
.alloc
);
751 if (device
->meta_state
.cleari
.img_ds_layout
) {
752 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
753 device
->meta_state
.cleari
.img_ds_layout
,
754 &device
->meta_state
.alloc
);
756 if (device
->meta_state
.cleari
.pipeline
) {
757 radv_DestroyPipeline(radv_device_to_handle(device
),
758 device
->meta_state
.cleari
.pipeline
,
759 &device
->meta_state
.alloc
);
764 radv_device_finish_meta_bufimage_state(struct radv_device
*device
)
766 radv_device_finish_meta_itob_state(device
);
767 radv_device_finish_meta_btoi_state(device
);
768 radv_device_finish_meta_itoi_state(device
);
769 radv_device_finish_meta_cleari_state(device
);
773 radv_device_init_meta_bufimage_state(struct radv_device
*device
)
777 result
= radv_device_init_meta_itob_state(device
);
778 if (result
!= VK_SUCCESS
)
781 result
= radv_device_init_meta_btoi_state(device
);
782 if (result
!= VK_SUCCESS
)
785 result
= radv_device_init_meta_itoi_state(device
);
786 if (result
!= VK_SUCCESS
)
789 result
= radv_device_init_meta_cleari_state(device
);
790 if (result
!= VK_SUCCESS
)
795 radv_device_finish_meta_itoi_state(device
);
797 radv_device_finish_meta_btoi_state(device
);
799 radv_device_finish_meta_itob_state(device
);
804 radv_meta_begin_itoi(struct radv_cmd_buffer
*cmd_buffer
,
805 struct radv_meta_saved_compute_state
*save
)
807 radv_meta_save_compute(save
, cmd_buffer
, 16);
811 radv_meta_end_itoi(struct radv_cmd_buffer
*cmd_buffer
,
812 struct radv_meta_saved_compute_state
*save
)
814 radv_meta_restore_compute(save
, cmd_buffer
, 16);
818 radv_meta_begin_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
819 struct radv_meta_saved_compute_state
*save
)
821 radv_meta_save_compute(save
, cmd_buffer
, 12);
825 radv_meta_end_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
826 struct radv_meta_saved_compute_state
*save
)
828 radv_meta_restore_compute(save
, cmd_buffer
, 12);
832 radv_meta_begin_cleari(struct radv_cmd_buffer
*cmd_buffer
,
833 struct radv_meta_saved_compute_state
*save
)
835 radv_meta_save_compute(save
, cmd_buffer
, 16);
839 radv_meta_end_cleari(struct radv_cmd_buffer
*cmd_buffer
,
840 struct radv_meta_saved_compute_state
*save
)
842 radv_meta_restore_compute(save
, cmd_buffer
, 16);
846 create_iview(struct radv_cmd_buffer
*cmd_buffer
,
847 struct radv_meta_blit2d_surf
*surf
,
848 VkImageUsageFlags usage
,
849 struct radv_image_view
*iview
)
852 radv_image_view_init(iview
, cmd_buffer
->device
,
853 &(VkImageViewCreateInfo
) {
854 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
855 .image
= radv_image_to_handle(surf
->image
),
856 .viewType
= VK_IMAGE_VIEW_TYPE_2D
,
857 .format
= surf
->format
,
858 .subresourceRange
= {
859 .aspectMask
= surf
->aspect_mask
,
860 .baseMipLevel
= surf
->level
,
862 .baseArrayLayer
= surf
->layer
,
865 }, cmd_buffer
, usage
);
869 create_bview(struct radv_cmd_buffer
*cmd_buffer
,
870 struct radv_buffer
*buffer
,
873 struct radv_buffer_view
*bview
)
875 radv_buffer_view_init(bview
, cmd_buffer
->device
,
876 &(VkBufferViewCreateInfo
) {
877 .sType
= VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
,
879 .buffer
= radv_buffer_to_handle(buffer
),
882 .range
= VK_WHOLE_SIZE
,
888 struct radv_image_view src_iview
;
889 struct radv_buffer_view dst_bview
;
893 itob_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
894 struct itob_temps
*tmp
)
896 struct radv_device
*device
= cmd_buffer
->device
;
898 radv_meta_push_descriptor_set(cmd_buffer
,
899 VK_PIPELINE_BIND_POINT_COMPUTE
,
900 device
->meta_state
.itob
.img_p_layout
,
902 2, /* descriptorWriteCount */
903 (VkWriteDescriptorSet
[]) {
905 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
907 .dstArrayElement
= 0,
908 .descriptorCount
= 1,
909 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
910 .pImageInfo
= (VkDescriptorImageInfo
[]) {
912 .sampler
= VK_NULL_HANDLE
,
913 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
914 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
919 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
921 .dstArrayElement
= 0,
922 .descriptorCount
= 1,
923 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
924 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->dst_bview
) },
930 itob_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
932 VkPipeline pipeline
=
933 cmd_buffer
->device
->meta_state
.itob
.pipeline
;
935 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
936 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
937 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
942 radv_meta_image_to_buffer(struct radv_cmd_buffer
*cmd_buffer
,
943 struct radv_meta_blit2d_surf
*src
,
944 struct radv_meta_blit2d_buffer
*dst
,
946 struct radv_meta_blit2d_rect
*rects
)
948 struct radv_device
*device
= cmd_buffer
->device
;
949 struct itob_temps temps
;
951 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
952 create_bview(cmd_buffer
, dst
->buffer
, dst
->offset
, dst
->format
, &temps
.dst_bview
);
953 itob_bind_descriptors(cmd_buffer
, &temps
);
955 itob_bind_pipeline(cmd_buffer
);
957 for (unsigned r
= 0; r
< num_rects
; ++r
) {
958 unsigned push_constants
[3] = {
963 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
964 device
->meta_state
.itob
.img_p_layout
,
965 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
968 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
973 struct radv_buffer_view src_bview
;
974 struct radv_image_view dst_iview
;
978 btoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
979 struct btoi_temps
*tmp
)
981 struct radv_device
*device
= cmd_buffer
->device
;
983 radv_meta_push_descriptor_set(cmd_buffer
,
984 VK_PIPELINE_BIND_POINT_COMPUTE
,
985 device
->meta_state
.btoi
.img_p_layout
,
987 2, /* descriptorWriteCount */
988 (VkWriteDescriptorSet
[]) {
990 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
992 .dstArrayElement
= 0,
993 .descriptorCount
= 1,
994 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
995 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->src_bview
) },
998 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1000 .dstArrayElement
= 0,
1001 .descriptorCount
= 1,
1002 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1003 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1005 .sampler
= VK_NULL_HANDLE
,
1006 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1007 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1015 btoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1017 VkPipeline pipeline
=
1018 cmd_buffer
->device
->meta_state
.btoi
.pipeline
;
1020 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1021 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1022 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1027 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1028 struct radv_meta_blit2d_buffer
*src
,
1029 struct radv_meta_blit2d_surf
*dst
,
1031 struct radv_meta_blit2d_rect
*rects
)
1033 struct radv_device
*device
= cmd_buffer
->device
;
1034 struct btoi_temps temps
;
1036 create_bview(cmd_buffer
, src
->buffer
, src
->offset
, src
->format
, &temps
.src_bview
);
1037 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1038 btoi_bind_descriptors(cmd_buffer
, &temps
);
1040 btoi_bind_pipeline(cmd_buffer
);
1042 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1043 unsigned push_constants
[3] = {
1048 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1049 device
->meta_state
.btoi
.img_p_layout
,
1050 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
1053 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1058 struct radv_image_view src_iview
;
1059 struct radv_image_view dst_iview
;
1063 itoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1064 struct itoi_temps
*tmp
)
1066 struct radv_device
*device
= cmd_buffer
->device
;
1068 radv_meta_push_descriptor_set(cmd_buffer
,
1069 VK_PIPELINE_BIND_POINT_COMPUTE
,
1070 device
->meta_state
.itoi
.img_p_layout
,
1072 2, /* descriptorWriteCount */
1073 (VkWriteDescriptorSet
[]) {
1075 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1077 .dstArrayElement
= 0,
1078 .descriptorCount
= 1,
1079 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
1080 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1082 .sampler
= VK_NULL_HANDLE
,
1083 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
1084 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1089 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1091 .dstArrayElement
= 0,
1092 .descriptorCount
= 1,
1093 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1094 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1096 .sampler
= VK_NULL_HANDLE
,
1097 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
1098 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1106 itoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1108 VkPipeline pipeline
=
1109 cmd_buffer
->device
->meta_state
.itoi
.pipeline
;
1111 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1112 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1113 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1118 radv_meta_image_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1119 struct radv_meta_blit2d_surf
*src
,
1120 struct radv_meta_blit2d_surf
*dst
,
1122 struct radv_meta_blit2d_rect
*rects
)
1124 struct radv_device
*device
= cmd_buffer
->device
;
1125 struct itoi_temps temps
;
1127 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
1128 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
1130 itoi_bind_descriptors(cmd_buffer
, &temps
);
1132 itoi_bind_pipeline(cmd_buffer
);
1134 for (unsigned r
= 0; r
< num_rects
; ++r
) {
1135 unsigned push_constants
[4] = {
1141 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1142 device
->meta_state
.itoi
.img_p_layout
,
1143 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1146 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
1151 cleari_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
1152 struct radv_image_view
*dst_iview
)
1154 struct radv_device
*device
= cmd_buffer
->device
;
1156 radv_meta_push_descriptor_set(cmd_buffer
,
1157 VK_PIPELINE_BIND_POINT_COMPUTE
,
1158 device
->meta_state
.cleari
.img_p_layout
,
1160 1, /* descriptorWriteCount */
1161 (VkWriteDescriptorSet
[]) {
1163 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
1165 .dstArrayElement
= 0,
1166 .descriptorCount
= 1,
1167 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
1168 .pImageInfo
= (VkDescriptorImageInfo
[]) {
1170 .sampler
= VK_NULL_HANDLE
,
1171 .imageView
= radv_image_view_to_handle(dst_iview
),
1172 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
1180 cleari_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
1182 VkPipeline pipeline
=
1183 cmd_buffer
->device
->meta_state
.cleari
.pipeline
;
1185 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
1186 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
1187 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
1192 radv_meta_clear_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
1193 struct radv_meta_blit2d_surf
*dst
,
1194 const VkClearColorValue
*clear_color
)
1196 struct radv_device
*device
= cmd_buffer
->device
;
1197 struct radv_image_view dst_iview
;
1199 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &dst_iview
);
1200 cleari_bind_descriptors(cmd_buffer
, &dst_iview
);
1202 cleari_bind_pipeline(cmd_buffer
);
1204 unsigned push_constants
[4] = {
1205 clear_color
->uint32
[0],
1206 clear_color
->uint32
[1],
1207 clear_color
->uint32
[2],
1208 clear_color
->uint32
[3],
1211 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
1212 device
->meta_state
.cleari
.img_p_layout
,
1213 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 16,
1216 radv_unaligned_dispatch(cmd_buffer
, dst
->image
->info
.width
, dst
->image
->info
.height
, 1);