2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
28 * Compute shader implementation of image->buffer copy.
32 build_nir_itob_compute_shader(struct radv_device
*dev
)
35 const struct glsl_type
*sampler_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
39 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
43 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
44 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_itob_cs");
45 b
.shader
->info
->cs
.local_size
[0] = 16;
46 b
.shader
->info
->cs
.local_size
[1] = 16;
47 b
.shader
->info
->cs
.local_size
[2] = 1;
48 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
49 sampler_type
, "s_tex");
50 input_img
->data
.descriptor_set
= 0;
51 input_img
->data
.binding
= 0;
53 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
55 output_img
->data
.descriptor_set
= 0;
56 output_img
->data
.binding
= 1;
58 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
59 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
60 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
61 b
.shader
->info
->cs
.local_size
[0],
62 b
.shader
->info
->cs
.local_size
[1],
63 b
.shader
->info
->cs
.local_size
[2], 0);
65 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
69 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
70 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
71 offset
->num_components
= 2;
72 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
73 nir_builder_instr_insert(&b
, &offset
->instr
);
75 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
76 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
77 stride
->num_components
= 1;
78 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
79 nir_builder_instr_insert(&b
, &stride
->instr
);
81 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
83 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
84 tex
->sampler_dim
= GLSL_SAMPLER_DIM_2D
;
85 tex
->op
= nir_texop_txf
;
86 tex
->src
[0].src_type
= nir_tex_src_coord
;
87 tex
->src
[0].src
= nir_src_for_ssa(img_coord
);
88 tex
->src
[1].src_type
= nir_tex_src_lod
;
89 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
90 tex
->dest_type
= nir_type_float
;
91 tex
->is_array
= false;
92 tex
->coord_components
= 2;
93 tex
->texture
= nir_deref_var_create(tex
, input_img
);
96 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
97 nir_builder_instr_insert(&b
, &tex
->instr
);
99 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
100 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
102 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
103 tmp
= nir_iadd(&b
, tmp
, pos_x
);
105 nir_ssa_def
*coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
107 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
108 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
109 store
->src
[0] = nir_src_for_ssa(coord
);
110 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
111 store
->src
[2] = nir_src_for_ssa(outval
);
112 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
114 nir_builder_instr_insert(&b
, &store
->instr
);
118 /* Image to buffer - don't write use image accessors */
120 radv_device_init_meta_itob_state(struct radv_device
*device
)
123 struct radv_shader_module cs
= { .nir
= NULL
};
125 zero(device
->meta_state
.itob
);
127 cs
.nir
= build_nir_itob_compute_shader(device
);
130 * two descriptors one for the image being sampled
131 * one for the buffer being written.
133 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
134 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
136 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
139 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
140 .descriptorCount
= 1,
141 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
142 .pImmutableSamplers
= NULL
146 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
147 .descriptorCount
= 1,
148 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
149 .pImmutableSamplers
= NULL
154 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
156 &device
->meta_state
.alloc
,
157 &device
->meta_state
.itob
.img_ds_layout
);
158 if (result
!= VK_SUCCESS
)
162 VkPipelineLayoutCreateInfo pl_create_info
= {
163 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
165 .pSetLayouts
= &device
->meta_state
.itob
.img_ds_layout
,
166 .pushConstantRangeCount
= 1,
167 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
170 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
172 &device
->meta_state
.alloc
,
173 &device
->meta_state
.itob
.img_p_layout
);
174 if (result
!= VK_SUCCESS
)
179 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
180 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
181 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
182 .module
= radv_shader_module_to_handle(&cs
),
184 .pSpecializationInfo
= NULL
,
187 VkComputePipelineCreateInfo vk_pipeline_info
= {
188 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
189 .stage
= pipeline_shader_stage
,
191 .layout
= device
->meta_state
.itob
.img_p_layout
,
194 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
195 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
196 1, &vk_pipeline_info
, NULL
,
197 &device
->meta_state
.itob
.pipeline
);
198 if (result
!= VK_SUCCESS
)
209 radv_device_finish_meta_itob_state(struct radv_device
*device
)
211 if (device
->meta_state
.itob
.img_p_layout
) {
212 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
213 device
->meta_state
.itob
.img_p_layout
,
214 &device
->meta_state
.alloc
);
216 if (device
->meta_state
.itob
.img_ds_layout
) {
217 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
218 device
->meta_state
.itob
.img_ds_layout
,
219 &device
->meta_state
.alloc
);
221 if (device
->meta_state
.itob
.pipeline
) {
222 radv_DestroyPipeline(radv_device_to_handle(device
),
223 device
->meta_state
.itob
.pipeline
,
224 &device
->meta_state
.alloc
);
229 build_nir_btoi_compute_shader(struct radv_device
*dev
)
232 const struct glsl_type
*buf_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_BUF
,
236 const struct glsl_type
*img_type
= glsl_sampler_type(GLSL_SAMPLER_DIM_2D
,
240 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_COMPUTE
, NULL
);
241 b
.shader
->info
->name
= ralloc_strdup(b
.shader
, "meta_btoi_cs");
242 b
.shader
->info
->cs
.local_size
[0] = 16;
243 b
.shader
->info
->cs
.local_size
[1] = 16;
244 b
.shader
->info
->cs
.local_size
[2] = 1;
245 nir_variable
*input_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
247 input_img
->data
.descriptor_set
= 0;
248 input_img
->data
.binding
= 0;
250 nir_variable
*output_img
= nir_variable_create(b
.shader
, nir_var_uniform
,
251 img_type
, "out_img");
252 output_img
->data
.descriptor_set
= 0;
253 output_img
->data
.binding
= 1;
255 nir_ssa_def
*invoc_id
= nir_load_system_value(&b
, nir_intrinsic_load_local_invocation_id
, 0);
256 nir_ssa_def
*wg_id
= nir_load_system_value(&b
, nir_intrinsic_load_work_group_id
, 0);
257 nir_ssa_def
*block_size
= nir_imm_ivec4(&b
,
258 b
.shader
->info
->cs
.local_size
[0],
259 b
.shader
->info
->cs
.local_size
[1],
260 b
.shader
->info
->cs
.local_size
[2], 0);
262 nir_ssa_def
*global_id
= nir_iadd(&b
, nir_imul(&b
, wg_id
, block_size
), invoc_id
);
264 nir_intrinsic_instr
*offset
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
265 offset
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 0));
266 offset
->num_components
= 2;
267 nir_ssa_dest_init(&offset
->instr
, &offset
->dest
, 2, 32, "offset");
268 nir_builder_instr_insert(&b
, &offset
->instr
);
270 nir_intrinsic_instr
*stride
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_load_push_constant
);
271 stride
->src
[0] = nir_src_for_ssa(nir_imm_int(&b
, 8));
272 stride
->num_components
= 1;
273 nir_ssa_dest_init(&stride
->instr
, &stride
->dest
, 1, 32, "stride");
274 nir_builder_instr_insert(&b
, &stride
->instr
);
276 nir_ssa_def
*pos_x
= nir_channel(&b
, global_id
, 0);
277 nir_ssa_def
*pos_y
= nir_channel(&b
, global_id
, 1);
279 nir_ssa_def
*tmp
= nir_imul(&b
, pos_y
, &stride
->dest
.ssa
);
280 tmp
= nir_iadd(&b
, tmp
, pos_x
);
282 nir_ssa_def
*buf_coord
= nir_vec4(&b
, tmp
, tmp
, tmp
, tmp
);
284 nir_ssa_def
*img_coord
= nir_iadd(&b
, global_id
, &offset
->dest
.ssa
);
286 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 2);
287 tex
->sampler_dim
= GLSL_SAMPLER_DIM_BUF
;
288 tex
->op
= nir_texop_txf
;
289 tex
->src
[0].src_type
= nir_tex_src_coord
;
290 tex
->src
[0].src
= nir_src_for_ssa(buf_coord
);
291 tex
->src
[1].src_type
= nir_tex_src_lod
;
292 tex
->src
[1].src
= nir_src_for_ssa(nir_imm_int(&b
, 0));
293 tex
->dest_type
= nir_type_float
;
294 tex
->is_array
= false;
295 tex
->coord_components
= 1;
296 tex
->texture
= nir_deref_var_create(tex
, input_img
);
299 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
300 nir_builder_instr_insert(&b
, &tex
->instr
);
302 nir_ssa_def
*outval
= &tex
->dest
.ssa
;
303 nir_intrinsic_instr
*store
= nir_intrinsic_instr_create(b
.shader
, nir_intrinsic_image_store
);
304 store
->src
[0] = nir_src_for_ssa(img_coord
);
305 store
->src
[1] = nir_src_for_ssa(nir_ssa_undef(&b
, 1, 32));
306 store
->src
[2] = nir_src_for_ssa(outval
);
307 store
->variables
[0] = nir_deref_var_create(store
, output_img
);
309 nir_builder_instr_insert(&b
, &store
->instr
);
313 /* Buffer to image - don't write use image accessors */
315 radv_device_init_meta_btoi_state(struct radv_device
*device
)
318 struct radv_shader_module cs
= { .nir
= NULL
};
320 zero(device
->meta_state
.btoi
);
322 cs
.nir
= build_nir_btoi_compute_shader(device
);
325 * two descriptors one for the image being sampled
326 * one for the buffer being written.
328 VkDescriptorSetLayoutCreateInfo ds_create_info
= {
329 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
331 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
334 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
335 .descriptorCount
= 1,
336 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
337 .pImmutableSamplers
= NULL
341 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
342 .descriptorCount
= 1,
343 .stageFlags
= VK_SHADER_STAGE_COMPUTE_BIT
,
344 .pImmutableSamplers
= NULL
349 result
= radv_CreateDescriptorSetLayout(radv_device_to_handle(device
),
351 &device
->meta_state
.alloc
,
352 &device
->meta_state
.btoi
.img_ds_layout
);
353 if (result
!= VK_SUCCESS
)
357 VkPipelineLayoutCreateInfo pl_create_info
= {
358 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
360 .pSetLayouts
= &device
->meta_state
.btoi
.img_ds_layout
,
361 .pushConstantRangeCount
= 1,
362 .pPushConstantRanges
= &(VkPushConstantRange
){VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12},
365 result
= radv_CreatePipelineLayout(radv_device_to_handle(device
),
367 &device
->meta_state
.alloc
,
368 &device
->meta_state
.btoi
.img_p_layout
);
369 if (result
!= VK_SUCCESS
)
374 VkPipelineShaderStageCreateInfo pipeline_shader_stage
= {
375 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
376 .stage
= VK_SHADER_STAGE_COMPUTE_BIT
,
377 .module
= radv_shader_module_to_handle(&cs
),
379 .pSpecializationInfo
= NULL
,
382 VkComputePipelineCreateInfo vk_pipeline_info
= {
383 .sType
= VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
,
384 .stage
= pipeline_shader_stage
,
386 .layout
= device
->meta_state
.btoi
.img_p_layout
,
389 result
= radv_CreateComputePipelines(radv_device_to_handle(device
),
390 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
391 1, &vk_pipeline_info
, NULL
,
392 &device
->meta_state
.btoi
.pipeline
);
393 if (result
!= VK_SUCCESS
)
404 radv_device_finish_meta_btoi_state(struct radv_device
*device
)
406 if (device
->meta_state
.btoi
.img_p_layout
) {
407 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
408 device
->meta_state
.btoi
.img_p_layout
,
409 &device
->meta_state
.alloc
);
411 if (device
->meta_state
.btoi
.img_ds_layout
) {
412 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device
),
413 device
->meta_state
.btoi
.img_ds_layout
,
414 &device
->meta_state
.alloc
);
416 if (device
->meta_state
.btoi
.pipeline
) {
417 radv_DestroyPipeline(radv_device_to_handle(device
),
418 device
->meta_state
.btoi
.pipeline
,
419 &device
->meta_state
.alloc
);
424 radv_device_finish_meta_bufimage_state(struct radv_device
*device
)
426 radv_device_finish_meta_itob_state(device
);
427 radv_device_finish_meta_btoi_state(device
);
431 radv_device_init_meta_bufimage_state(struct radv_device
*device
)
435 result
= radv_device_init_meta_itob_state(device
);
436 if (result
!= VK_SUCCESS
)
439 result
= radv_device_init_meta_btoi_state(device
);
440 if (result
!= VK_SUCCESS
) {
441 radv_device_finish_meta_itob_state(device
);
448 radv_meta_begin_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
449 struct radv_meta_saved_compute_state
*save
)
451 radv_meta_save_compute(save
, cmd_buffer
, 12);
455 radv_meta_end_bufimage(struct radv_cmd_buffer
*cmd_buffer
,
456 struct radv_meta_saved_compute_state
*save
)
458 radv_meta_restore_compute(save
, cmd_buffer
, 12);
462 create_iview(struct radv_cmd_buffer
*cmd_buffer
,
463 struct radv_meta_blit2d_surf
*surf
,
464 VkImageUsageFlags usage
,
465 struct radv_image_view
*iview
)
468 radv_image_view_init(iview
, cmd_buffer
->device
,
469 &(VkImageViewCreateInfo
) {
470 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
471 .image
= radv_image_to_handle(surf
->image
),
472 .viewType
= VK_IMAGE_VIEW_TYPE_2D
,
473 .format
= surf
->format
,
474 .subresourceRange
= {
475 .aspectMask
= surf
->aspect_mask
,
476 .baseMipLevel
= surf
->level
,
478 .baseArrayLayer
= surf
->layer
,
481 }, cmd_buffer
, usage
);
485 create_bview(struct radv_cmd_buffer
*cmd_buffer
,
486 struct radv_buffer
*buffer
,
489 struct radv_buffer_view
*bview
)
491 radv_buffer_view_init(bview
, cmd_buffer
->device
,
492 &(VkBufferViewCreateInfo
) {
493 .sType
= VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
,
495 .buffer
= radv_buffer_to_handle(buffer
),
498 .range
= VK_WHOLE_SIZE
,
504 struct radv_image_view src_iview
;
505 struct radv_buffer_view dst_bview
;
510 itob_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
511 struct itob_temps
*tmp
)
513 struct radv_device
*device
= cmd_buffer
->device
;
514 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
516 radv_temp_descriptor_set_create(device
, cmd_buffer
,
517 device
->meta_state
.itob
.img_ds_layout
,
520 radv_UpdateDescriptorSets(vk_device
,
522 (VkWriteDescriptorSet
[]) {
524 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
527 .dstArrayElement
= 0,
528 .descriptorCount
= 1,
529 .descriptorType
= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
,
530 .pImageInfo
= (VkDescriptorImageInfo
[]) {
532 .sampler
= VK_NULL_HANDLE
,
533 .imageView
= radv_image_view_to_handle(&tmp
->src_iview
),
534 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
539 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
542 .dstArrayElement
= 0,
543 .descriptorCount
= 1,
544 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
545 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->dst_bview
) },
549 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
550 VK_PIPELINE_BIND_POINT_COMPUTE
,
551 device
->meta_state
.itob
.img_p_layout
, 0, 1,
556 itob_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
558 VkPipeline pipeline
=
559 cmd_buffer
->device
->meta_state
.itob
.pipeline
;
561 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
562 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
563 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
568 radv_meta_image_to_buffer(struct radv_cmd_buffer
*cmd_buffer
,
569 struct radv_meta_blit2d_surf
*src
,
570 struct radv_meta_blit2d_buffer
*dst
,
572 struct radv_meta_blit2d_rect
*rects
)
574 struct radv_device
*device
= cmd_buffer
->device
;
575 struct itob_temps temps
;
577 create_iview(cmd_buffer
, src
, VK_IMAGE_USAGE_SAMPLED_BIT
, &temps
.src_iview
);
578 create_bview(cmd_buffer
, dst
->buffer
, dst
->offset
, dst
->format
, &temps
.dst_bview
);
579 itob_bind_descriptors(cmd_buffer
, &temps
);
581 itob_bind_pipeline(cmd_buffer
);
583 for (unsigned r
= 0; r
< num_rects
; ++r
) {
584 unsigned push_constants
[3] = {
589 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
590 device
->meta_state
.itob
.img_p_layout
,
591 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
594 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
596 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);
600 struct radv_buffer_view src_bview
;
601 struct radv_image_view dst_iview
;
606 btoi_bind_descriptors(struct radv_cmd_buffer
*cmd_buffer
,
607 struct btoi_temps
*tmp
)
609 struct radv_device
*device
= cmd_buffer
->device
;
610 VkDevice vk_device
= radv_device_to_handle(cmd_buffer
->device
);
612 radv_temp_descriptor_set_create(device
, cmd_buffer
,
613 device
->meta_state
.btoi
.img_ds_layout
,
616 radv_UpdateDescriptorSets(vk_device
,
618 (VkWriteDescriptorSet
[]) {
620 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
623 .dstArrayElement
= 0,
624 .descriptorCount
= 1,
625 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
,
626 .pTexelBufferView
= (VkBufferView
[]) { radv_buffer_view_to_handle(&tmp
->src_bview
) },
629 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
632 .dstArrayElement
= 0,
633 .descriptorCount
= 1,
634 .descriptorType
= VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
,
635 .pImageInfo
= (VkDescriptorImageInfo
[]) {
638 .imageView
= radv_image_view_to_handle(&tmp
->dst_iview
),
639 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
645 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer
),
646 VK_PIPELINE_BIND_POINT_COMPUTE
,
647 device
->meta_state
.btoi
.img_p_layout
, 0, 1,
652 btoi_bind_pipeline(struct radv_cmd_buffer
*cmd_buffer
)
654 VkPipeline pipeline
=
655 cmd_buffer
->device
->meta_state
.btoi
.pipeline
;
657 if (cmd_buffer
->state
.compute_pipeline
!= radv_pipeline_from_handle(pipeline
)) {
658 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
659 VK_PIPELINE_BIND_POINT_COMPUTE
, pipeline
);
664 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer
*cmd_buffer
,
665 struct radv_meta_blit2d_buffer
*src
,
666 struct radv_meta_blit2d_surf
*dst
,
668 struct radv_meta_blit2d_rect
*rects
)
670 struct radv_device
*device
= cmd_buffer
->device
;
671 struct btoi_temps temps
;
673 create_bview(cmd_buffer
, src
->buffer
, src
->offset
, src
->format
, &temps
.src_bview
);
674 create_iview(cmd_buffer
, dst
, VK_IMAGE_USAGE_STORAGE_BIT
, &temps
.dst_iview
);
675 btoi_bind_descriptors(cmd_buffer
, &temps
);
677 btoi_bind_pipeline(cmd_buffer
);
679 for (unsigned r
= 0; r
< num_rects
; ++r
) {
680 unsigned push_constants
[3] = {
685 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer
),
686 device
->meta_state
.btoi
.img_p_layout
,
687 VK_SHADER_STAGE_COMPUTE_BIT
, 0, 12,
690 radv_unaligned_dispatch(cmd_buffer
, rects
[r
].width
, rects
[r
].height
, 1);
692 radv_temp_descriptor_set_destroy(cmd_buffer
->device
, temps
.set
);