984b3472e8456590e3f9fe2bc2d9dd786d706679
[mesa.git] / src / amd / vulkan / radv_meta_bufimage.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22 * IN THE SOFTWARE.
23 */
24 #include "radv_meta.h"
25 #include "nir/nir_builder.h"
26
27 /*
28 * Compute shader implementation of image->buffer copy.
29 */
30
31 static nir_shader *
32 build_nir_itob_compute_shader(struct radv_device *dev)
33 {
34 nir_builder b;
35 const struct glsl_type *sampler_type = glsl_sampler_type(GLSL_SAMPLER_DIM_2D,
36 false,
37 false,
38 GLSL_TYPE_FLOAT);
39 const struct glsl_type *img_type = glsl_sampler_type(GLSL_SAMPLER_DIM_BUF,
40 false,
41 false,
42 GLSL_TYPE_FLOAT);
43 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
44 b.shader->info->name = ralloc_strdup(b.shader, "meta_itob_cs");
45 b.shader->info->cs.local_size[0] = 16;
46 b.shader->info->cs.local_size[1] = 16;
47 b.shader->info->cs.local_size[2] = 1;
48 nir_variable *input_img = nir_variable_create(b.shader, nir_var_uniform,
49 sampler_type, "s_tex");
50 input_img->data.descriptor_set = 0;
51 input_img->data.binding = 0;
52
53 nir_variable *output_img = nir_variable_create(b.shader, nir_var_uniform,
54 img_type, "out_img");
55 output_img->data.descriptor_set = 0;
56 output_img->data.binding = 1;
57
58 nir_ssa_def *invoc_id = nir_load_system_value(&b, nir_intrinsic_load_local_invocation_id, 0);
59 nir_ssa_def *wg_id = nir_load_system_value(&b, nir_intrinsic_load_work_group_id, 0);
60 nir_ssa_def *block_size = nir_imm_ivec4(&b,
61 b.shader->info->cs.local_size[0],
62 b.shader->info->cs.local_size[1],
63 b.shader->info->cs.local_size[2], 0);
64
65 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
66
67
68
69 nir_intrinsic_instr *offset = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
70 offset->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
71 offset->num_components = 2;
72 nir_ssa_dest_init(&offset->instr, &offset->dest, 2, 32, "offset");
73 nir_builder_instr_insert(&b, &offset->instr);
74
75 nir_intrinsic_instr *stride = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
76 stride->src[0] = nir_src_for_ssa(nir_imm_int(&b, 8));
77 stride->num_components = 1;
78 nir_ssa_dest_init(&stride->instr, &stride->dest, 1, 32, "stride");
79 nir_builder_instr_insert(&b, &stride->instr);
80
81 nir_ssa_def *img_coord = nir_iadd(&b, global_id, &offset->dest.ssa);
82
83 nir_tex_instr *tex = nir_tex_instr_create(b.shader, 2);
84 tex->sampler_dim = GLSL_SAMPLER_DIM_2D;
85 tex->op = nir_texop_txf;
86 tex->src[0].src_type = nir_tex_src_coord;
87 tex->src[0].src = nir_src_for_ssa(img_coord);
88 tex->src[1].src_type = nir_tex_src_lod;
89 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, 0));
90 tex->dest_type = nir_type_float;
91 tex->is_array = false;
92 tex->coord_components = 2;
93 tex->texture = nir_deref_var_create(tex, input_img);
94 tex->sampler = NULL;
95
96 nir_ssa_dest_init(&tex->instr, &tex->dest, 4, 32, "tex");
97 nir_builder_instr_insert(&b, &tex->instr);
98
99 nir_ssa_def *pos_x = nir_channel(&b, global_id, 0);
100 nir_ssa_def *pos_y = nir_channel(&b, global_id, 1);
101
102 nir_ssa_def *tmp = nir_imul(&b, pos_y, &stride->dest.ssa);
103 tmp = nir_iadd(&b, tmp, pos_x);
104
105 nir_ssa_def *coord = nir_vec4(&b, tmp, tmp, tmp, tmp);
106
107 nir_ssa_def *outval = &tex->dest.ssa;
108 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_image_store);
109 store->src[0] = nir_src_for_ssa(coord);
110 store->src[1] = nir_src_for_ssa(nir_ssa_undef(&b, 1, 32));
111 store->src[2] = nir_src_for_ssa(outval);
112 store->variables[0] = nir_deref_var_create(store, output_img);
113
114 nir_builder_instr_insert(&b, &store->instr);
115 return b.shader;
116 }
117
118 /* Image to buffer - don't write use image accessors */
119 static VkResult
120 radv_device_init_meta_itob_state(struct radv_device *device)
121 {
122 VkResult result;
123 struct radv_shader_module cs = { .nir = NULL };
124
125 zero(device->meta_state.itob);
126
127 cs.nir = build_nir_itob_compute_shader(device);
128
129 /*
130 * two descriptors one for the image being sampled
131 * one for the buffer being written.
132 */
133 VkDescriptorSetLayoutCreateInfo ds_create_info = {
134 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
135 .bindingCount = 2,
136 .pBindings = (VkDescriptorSetLayoutBinding[]) {
137 {
138 .binding = 0,
139 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
140 .descriptorCount = 1,
141 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
142 .pImmutableSamplers = NULL
143 },
144 {
145 .binding = 1,
146 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
147 .descriptorCount = 1,
148 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
149 .pImmutableSamplers = NULL
150 },
151 }
152 };
153
154 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
155 &ds_create_info,
156 &device->meta_state.alloc,
157 &device->meta_state.itob.img_ds_layout);
158 if (result != VK_SUCCESS)
159 goto fail;
160
161
162 VkPipelineLayoutCreateInfo pl_create_info = {
163 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
164 .setLayoutCount = 1,
165 .pSetLayouts = &device->meta_state.itob.img_ds_layout,
166 .pushConstantRangeCount = 1,
167 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 12},
168 };
169
170 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
171 &pl_create_info,
172 &device->meta_state.alloc,
173 &device->meta_state.itob.img_p_layout);
174 if (result != VK_SUCCESS)
175 goto fail;
176
177 /* compute shader */
178
179 VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
180 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
181 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
182 .module = radv_shader_module_to_handle(&cs),
183 .pName = "main",
184 .pSpecializationInfo = NULL,
185 };
186
187 VkComputePipelineCreateInfo vk_pipeline_info = {
188 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
189 .stage = pipeline_shader_stage,
190 .flags = 0,
191 .layout = device->meta_state.itob.img_p_layout,
192 };
193
194 result = radv_CreateComputePipelines(radv_device_to_handle(device),
195 radv_pipeline_cache_to_handle(&device->meta_state.cache),
196 1, &vk_pipeline_info, NULL,
197 &device->meta_state.itob.pipeline);
198 if (result != VK_SUCCESS)
199 goto fail;
200
201 ralloc_free(cs.nir);
202 return VK_SUCCESS;
203 fail:
204 ralloc_free(cs.nir);
205 return result;
206 }
207
208 static void
209 radv_device_finish_meta_itob_state(struct radv_device *device)
210 {
211 if (device->meta_state.itob.img_p_layout) {
212 radv_DestroyPipelineLayout(radv_device_to_handle(device),
213 device->meta_state.itob.img_p_layout,
214 &device->meta_state.alloc);
215 }
216 if (device->meta_state.itob.img_ds_layout) {
217 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
218 device->meta_state.itob.img_ds_layout,
219 &device->meta_state.alloc);
220 }
221 if (device->meta_state.itob.pipeline) {
222 radv_DestroyPipeline(radv_device_to_handle(device),
223 device->meta_state.itob.pipeline,
224 &device->meta_state.alloc);
225 }
226 }
227
228 static nir_shader *
229 build_nir_btoi_compute_shader(struct radv_device *dev)
230 {
231 nir_builder b;
232 const struct glsl_type *buf_type = glsl_sampler_type(GLSL_SAMPLER_DIM_BUF,
233 false,
234 false,
235 GLSL_TYPE_FLOAT);
236 const struct glsl_type *img_type = glsl_sampler_type(GLSL_SAMPLER_DIM_2D,
237 false,
238 false,
239 GLSL_TYPE_FLOAT);
240 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
241 b.shader->info->name = ralloc_strdup(b.shader, "meta_btoi_cs");
242 b.shader->info->cs.local_size[0] = 16;
243 b.shader->info->cs.local_size[1] = 16;
244 b.shader->info->cs.local_size[2] = 1;
245 nir_variable *input_img = nir_variable_create(b.shader, nir_var_uniform,
246 buf_type, "s_tex");
247 input_img->data.descriptor_set = 0;
248 input_img->data.binding = 0;
249
250 nir_variable *output_img = nir_variable_create(b.shader, nir_var_uniform,
251 img_type, "out_img");
252 output_img->data.descriptor_set = 0;
253 output_img->data.binding = 1;
254
255 nir_ssa_def *invoc_id = nir_load_system_value(&b, nir_intrinsic_load_local_invocation_id, 0);
256 nir_ssa_def *wg_id = nir_load_system_value(&b, nir_intrinsic_load_work_group_id, 0);
257 nir_ssa_def *block_size = nir_imm_ivec4(&b,
258 b.shader->info->cs.local_size[0],
259 b.shader->info->cs.local_size[1],
260 b.shader->info->cs.local_size[2], 0);
261
262 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
263
264 nir_intrinsic_instr *offset = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
265 offset->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
266 offset->num_components = 2;
267 nir_ssa_dest_init(&offset->instr, &offset->dest, 2, 32, "offset");
268 nir_builder_instr_insert(&b, &offset->instr);
269
270 nir_intrinsic_instr *stride = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
271 stride->src[0] = nir_src_for_ssa(nir_imm_int(&b, 8));
272 stride->num_components = 1;
273 nir_ssa_dest_init(&stride->instr, &stride->dest, 1, 32, "stride");
274 nir_builder_instr_insert(&b, &stride->instr);
275
276 nir_ssa_def *pos_x = nir_channel(&b, global_id, 0);
277 nir_ssa_def *pos_y = nir_channel(&b, global_id, 1);
278
279 nir_ssa_def *tmp = nir_imul(&b, pos_y, &stride->dest.ssa);
280 tmp = nir_iadd(&b, tmp, pos_x);
281
282 nir_ssa_def *buf_coord = nir_vec4(&b, tmp, tmp, tmp, tmp);
283
284 nir_ssa_def *img_coord = nir_iadd(&b, global_id, &offset->dest.ssa);
285
286 nir_tex_instr *tex = nir_tex_instr_create(b.shader, 2);
287 tex->sampler_dim = GLSL_SAMPLER_DIM_BUF;
288 tex->op = nir_texop_txf;
289 tex->src[0].src_type = nir_tex_src_coord;
290 tex->src[0].src = nir_src_for_ssa(buf_coord);
291 tex->src[1].src_type = nir_tex_src_lod;
292 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, 0));
293 tex->dest_type = nir_type_float;
294 tex->is_array = false;
295 tex->coord_components = 1;
296 tex->texture = nir_deref_var_create(tex, input_img);
297 tex->sampler = NULL;
298
299 nir_ssa_dest_init(&tex->instr, &tex->dest, 4, 32, "tex");
300 nir_builder_instr_insert(&b, &tex->instr);
301
302 nir_ssa_def *outval = &tex->dest.ssa;
303 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_image_store);
304 store->src[0] = nir_src_for_ssa(img_coord);
305 store->src[1] = nir_src_for_ssa(nir_ssa_undef(&b, 1, 32));
306 store->src[2] = nir_src_for_ssa(outval);
307 store->variables[0] = nir_deref_var_create(store, output_img);
308
309 nir_builder_instr_insert(&b, &store->instr);
310 return b.shader;
311 }
312
313 /* Buffer to image - don't write use image accessors */
314 static VkResult
315 radv_device_init_meta_btoi_state(struct radv_device *device)
316 {
317 VkResult result;
318 struct radv_shader_module cs = { .nir = NULL };
319
320 zero(device->meta_state.btoi);
321
322 cs.nir = build_nir_btoi_compute_shader(device);
323
324 /*
325 * two descriptors one for the image being sampled
326 * one for the buffer being written.
327 */
328 VkDescriptorSetLayoutCreateInfo ds_create_info = {
329 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
330 .bindingCount = 2,
331 .pBindings = (VkDescriptorSetLayoutBinding[]) {
332 {
333 .binding = 0,
334 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
335 .descriptorCount = 1,
336 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
337 .pImmutableSamplers = NULL
338 },
339 {
340 .binding = 1,
341 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
342 .descriptorCount = 1,
343 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
344 .pImmutableSamplers = NULL
345 },
346 }
347 };
348
349 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
350 &ds_create_info,
351 &device->meta_state.alloc,
352 &device->meta_state.btoi.img_ds_layout);
353 if (result != VK_SUCCESS)
354 goto fail;
355
356
357 VkPipelineLayoutCreateInfo pl_create_info = {
358 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
359 .setLayoutCount = 1,
360 .pSetLayouts = &device->meta_state.btoi.img_ds_layout,
361 .pushConstantRangeCount = 1,
362 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 12},
363 };
364
365 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
366 &pl_create_info,
367 &device->meta_state.alloc,
368 &device->meta_state.btoi.img_p_layout);
369 if (result != VK_SUCCESS)
370 goto fail;
371
372 /* compute shader */
373
374 VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
375 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
376 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
377 .module = radv_shader_module_to_handle(&cs),
378 .pName = "main",
379 .pSpecializationInfo = NULL,
380 };
381
382 VkComputePipelineCreateInfo vk_pipeline_info = {
383 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
384 .stage = pipeline_shader_stage,
385 .flags = 0,
386 .layout = device->meta_state.btoi.img_p_layout,
387 };
388
389 result = radv_CreateComputePipelines(radv_device_to_handle(device),
390 radv_pipeline_cache_to_handle(&device->meta_state.cache),
391 1, &vk_pipeline_info, NULL,
392 &device->meta_state.btoi.pipeline);
393 if (result != VK_SUCCESS)
394 goto fail;
395
396 ralloc_free(cs.nir);
397 return VK_SUCCESS;
398 fail:
399 ralloc_free(cs.nir);
400 return result;
401 }
402
403 static void
404 radv_device_finish_meta_btoi_state(struct radv_device *device)
405 {
406 if (device->meta_state.btoi.img_p_layout) {
407 radv_DestroyPipelineLayout(radv_device_to_handle(device),
408 device->meta_state.btoi.img_p_layout,
409 &device->meta_state.alloc);
410 }
411 if (device->meta_state.btoi.img_ds_layout) {
412 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
413 device->meta_state.btoi.img_ds_layout,
414 &device->meta_state.alloc);
415 }
416 if (device->meta_state.btoi.pipeline) {
417 radv_DestroyPipeline(radv_device_to_handle(device),
418 device->meta_state.btoi.pipeline,
419 &device->meta_state.alloc);
420 }
421 }
422
423 static nir_shader *
424 build_nir_itoi_compute_shader(struct radv_device *dev)
425 {
426 nir_builder b;
427 const struct glsl_type *buf_type = glsl_sampler_type(GLSL_SAMPLER_DIM_2D,
428 false,
429 false,
430 GLSL_TYPE_FLOAT);
431 const struct glsl_type *img_type = glsl_sampler_type(GLSL_SAMPLER_DIM_2D,
432 false,
433 false,
434 GLSL_TYPE_FLOAT);
435 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
436 b.shader->info->name = ralloc_strdup(b.shader, "meta_itoi_cs");
437 b.shader->info->cs.local_size[0] = 16;
438 b.shader->info->cs.local_size[1] = 16;
439 b.shader->info->cs.local_size[2] = 1;
440 nir_variable *input_img = nir_variable_create(b.shader, nir_var_uniform,
441 buf_type, "s_tex");
442 input_img->data.descriptor_set = 0;
443 input_img->data.binding = 0;
444
445 nir_variable *output_img = nir_variable_create(b.shader, nir_var_uniform,
446 img_type, "out_img");
447 output_img->data.descriptor_set = 0;
448 output_img->data.binding = 1;
449
450 nir_ssa_def *invoc_id = nir_load_system_value(&b, nir_intrinsic_load_local_invocation_id, 0);
451 nir_ssa_def *wg_id = nir_load_system_value(&b, nir_intrinsic_load_work_group_id, 0);
452 nir_ssa_def *block_size = nir_imm_ivec4(&b,
453 b.shader->info->cs.local_size[0],
454 b.shader->info->cs.local_size[1],
455 b.shader->info->cs.local_size[2], 0);
456
457 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
458
459 nir_intrinsic_instr *src_offset = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
460 src_offset->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
461 src_offset->num_components = 2;
462 nir_ssa_dest_init(&src_offset->instr, &src_offset->dest, 2, 32, "src_offset");
463 nir_builder_instr_insert(&b, &src_offset->instr);
464
465 nir_intrinsic_instr *dst_offset = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
466 dst_offset->src[0] = nir_src_for_ssa(nir_imm_int(&b, 8));
467 dst_offset->num_components = 2;
468 nir_ssa_dest_init(&dst_offset->instr, &dst_offset->dest, 2, 32, "dst_offset");
469 nir_builder_instr_insert(&b, &dst_offset->instr);
470
471 nir_ssa_def *src_coord = nir_iadd(&b, global_id, &src_offset->dest.ssa);
472
473 nir_ssa_def *dst_coord = nir_iadd(&b, global_id, &dst_offset->dest.ssa);
474
475 nir_tex_instr *tex = nir_tex_instr_create(b.shader, 2);
476 tex->sampler_dim = GLSL_SAMPLER_DIM_2D;
477 tex->op = nir_texop_txf;
478 tex->src[0].src_type = nir_tex_src_coord;
479 tex->src[0].src = nir_src_for_ssa(src_coord);
480 tex->src[1].src_type = nir_tex_src_lod;
481 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, 0));
482 tex->dest_type = nir_type_float;
483 tex->is_array = false;
484 tex->coord_components = 2;
485 tex->texture = nir_deref_var_create(tex, input_img);
486 tex->sampler = NULL;
487
488 nir_ssa_dest_init(&tex->instr, &tex->dest, 4, 32, "tex");
489 nir_builder_instr_insert(&b, &tex->instr);
490
491 nir_ssa_def *outval = &tex->dest.ssa;
492 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_image_store);
493 store->src[0] = nir_src_for_ssa(dst_coord);
494 store->src[1] = nir_src_for_ssa(nir_ssa_undef(&b, 1, 32));
495 store->src[2] = nir_src_for_ssa(outval);
496 store->variables[0] = nir_deref_var_create(store, output_img);
497
498 nir_builder_instr_insert(&b, &store->instr);
499 return b.shader;
500 }
501
502 /* image to image - don't write use image accessors */
503 static VkResult
504 radv_device_init_meta_itoi_state(struct radv_device *device)
505 {
506 VkResult result;
507 struct radv_shader_module cs = { .nir = NULL };
508
509 zero(device->meta_state.itoi);
510
511 cs.nir = build_nir_itoi_compute_shader(device);
512
513 /*
514 * two descriptors one for the image being sampled
515 * one for the buffer being written.
516 */
517 VkDescriptorSetLayoutCreateInfo ds_create_info = {
518 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
519 .bindingCount = 2,
520 .pBindings = (VkDescriptorSetLayoutBinding[]) {
521 {
522 .binding = 0,
523 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
524 .descriptorCount = 1,
525 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
526 .pImmutableSamplers = NULL
527 },
528 {
529 .binding = 1,
530 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
531 .descriptorCount = 1,
532 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
533 .pImmutableSamplers = NULL
534 },
535 }
536 };
537
538 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
539 &ds_create_info,
540 &device->meta_state.alloc,
541 &device->meta_state.itoi.img_ds_layout);
542 if (result != VK_SUCCESS)
543 goto fail;
544
545
546 VkPipelineLayoutCreateInfo pl_create_info = {
547 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
548 .setLayoutCount = 1,
549 .pSetLayouts = &device->meta_state.itoi.img_ds_layout,
550 .pushConstantRangeCount = 1,
551 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 16},
552 };
553
554 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
555 &pl_create_info,
556 &device->meta_state.alloc,
557 &device->meta_state.itoi.img_p_layout);
558 if (result != VK_SUCCESS)
559 goto fail;
560
561 /* compute shader */
562
563 VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
564 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
565 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
566 .module = radv_shader_module_to_handle(&cs),
567 .pName = "main",
568 .pSpecializationInfo = NULL,
569 };
570
571 VkComputePipelineCreateInfo vk_pipeline_info = {
572 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
573 .stage = pipeline_shader_stage,
574 .flags = 0,
575 .layout = device->meta_state.itoi.img_p_layout,
576 };
577
578 result = radv_CreateComputePipelines(radv_device_to_handle(device),
579 radv_pipeline_cache_to_handle(&device->meta_state.cache),
580 1, &vk_pipeline_info, NULL,
581 &device->meta_state.itoi.pipeline);
582 if (result != VK_SUCCESS)
583 goto fail;
584
585 ralloc_free(cs.nir);
586 return VK_SUCCESS;
587 fail:
588 ralloc_free(cs.nir);
589 return result;
590 }
591
592 static void
593 radv_device_finish_meta_itoi_state(struct radv_device *device)
594 {
595 if (device->meta_state.itoi.img_p_layout) {
596 radv_DestroyPipelineLayout(radv_device_to_handle(device),
597 device->meta_state.itoi.img_p_layout,
598 &device->meta_state.alloc);
599 }
600 if (device->meta_state.itoi.img_ds_layout) {
601 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
602 device->meta_state.itoi.img_ds_layout,
603 &device->meta_state.alloc);
604 }
605 if (device->meta_state.itoi.pipeline) {
606 radv_DestroyPipeline(radv_device_to_handle(device),
607 device->meta_state.itoi.pipeline,
608 &device->meta_state.alloc);
609 }
610 }
611
612 static nir_shader *
613 build_nir_cleari_compute_shader(struct radv_device *dev)
614 {
615 nir_builder b;
616 const struct glsl_type *img_type = glsl_sampler_type(GLSL_SAMPLER_DIM_2D,
617 false,
618 false,
619 GLSL_TYPE_FLOAT);
620 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
621 b.shader->info->name = ralloc_strdup(b.shader, "meta_cleari_cs");
622 b.shader->info->cs.local_size[0] = 16;
623 b.shader->info->cs.local_size[1] = 16;
624 b.shader->info->cs.local_size[2] = 1;
625
626 nir_variable *output_img = nir_variable_create(b.shader, nir_var_uniform,
627 img_type, "out_img");
628 output_img->data.descriptor_set = 0;
629 output_img->data.binding = 0;
630
631 nir_ssa_def *invoc_id = nir_load_system_value(&b, nir_intrinsic_load_local_invocation_id, 0);
632 nir_ssa_def *wg_id = nir_load_system_value(&b, nir_intrinsic_load_work_group_id, 0);
633 nir_ssa_def *block_size = nir_imm_ivec4(&b,
634 b.shader->info->cs.local_size[0],
635 b.shader->info->cs.local_size[1],
636 b.shader->info->cs.local_size[2], 0);
637
638 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
639
640 nir_intrinsic_instr *clear_val = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
641 clear_val->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
642 clear_val->num_components = 4;
643 nir_ssa_dest_init(&clear_val->instr, &clear_val->dest, 4, 32, "clear_value");
644 nir_builder_instr_insert(&b, &clear_val->instr);
645
646 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_image_store);
647 store->src[0] = nir_src_for_ssa(global_id);
648 store->src[1] = nir_src_for_ssa(nir_ssa_undef(&b, 1, 32));
649 store->src[2] = nir_src_for_ssa(&clear_val->dest.ssa);
650 store->variables[0] = nir_deref_var_create(store, output_img);
651
652 nir_builder_instr_insert(&b, &store->instr);
653 return b.shader;
654 }
655
656 static VkResult
657 radv_device_init_meta_cleari_state(struct radv_device *device)
658 {
659 VkResult result;
660 struct radv_shader_module cs = { .nir = NULL };
661
662 zero(device->meta_state.cleari);
663
664 cs.nir = build_nir_cleari_compute_shader(device);
665
666 /*
667 * two descriptors one for the image being sampled
668 * one for the buffer being written.
669 */
670 VkDescriptorSetLayoutCreateInfo ds_create_info = {
671 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
672 .bindingCount = 1,
673 .pBindings = (VkDescriptorSetLayoutBinding[]) {
674 {
675 .binding = 0,
676 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
677 .descriptorCount = 1,
678 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
679 .pImmutableSamplers = NULL
680 },
681 }
682 };
683
684 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
685 &ds_create_info,
686 &device->meta_state.alloc,
687 &device->meta_state.cleari.img_ds_layout);
688 if (result != VK_SUCCESS)
689 goto fail;
690
691
692 VkPipelineLayoutCreateInfo pl_create_info = {
693 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
694 .setLayoutCount = 1,
695 .pSetLayouts = &device->meta_state.cleari.img_ds_layout,
696 .pushConstantRangeCount = 1,
697 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 16},
698 };
699
700 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
701 &pl_create_info,
702 &device->meta_state.alloc,
703 &device->meta_state.cleari.img_p_layout);
704 if (result != VK_SUCCESS)
705 goto fail;
706
707 /* compute shader */
708
709 VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
710 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
711 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
712 .module = radv_shader_module_to_handle(&cs),
713 .pName = "main",
714 .pSpecializationInfo = NULL,
715 };
716
717 VkComputePipelineCreateInfo vk_pipeline_info = {
718 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
719 .stage = pipeline_shader_stage,
720 .flags = 0,
721 .layout = device->meta_state.cleari.img_p_layout,
722 };
723
724 result = radv_CreateComputePipelines(radv_device_to_handle(device),
725 radv_pipeline_cache_to_handle(&device->meta_state.cache),
726 1, &vk_pipeline_info, NULL,
727 &device->meta_state.cleari.pipeline);
728 if (result != VK_SUCCESS)
729 goto fail;
730
731 ralloc_free(cs.nir);
732 return VK_SUCCESS;
733 fail:
734 ralloc_free(cs.nir);
735 return result;
736 }
737
738 static void
739 radv_device_finish_meta_cleari_state(struct radv_device *device)
740 {
741 if (device->meta_state.cleari.img_p_layout) {
742 radv_DestroyPipelineLayout(radv_device_to_handle(device),
743 device->meta_state.cleari.img_p_layout,
744 &device->meta_state.alloc);
745 }
746 if (device->meta_state.cleari.img_ds_layout) {
747 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
748 device->meta_state.cleari.img_ds_layout,
749 &device->meta_state.alloc);
750 }
751 if (device->meta_state.cleari.pipeline) {
752 radv_DestroyPipeline(radv_device_to_handle(device),
753 device->meta_state.cleari.pipeline,
754 &device->meta_state.alloc);
755 }
756 }
757
758 void
759 radv_device_finish_meta_bufimage_state(struct radv_device *device)
760 {
761 radv_device_finish_meta_itob_state(device);
762 radv_device_finish_meta_btoi_state(device);
763 radv_device_finish_meta_itoi_state(device);
764 radv_device_finish_meta_cleari_state(device);
765 }
766
767 VkResult
768 radv_device_init_meta_bufimage_state(struct radv_device *device)
769 {
770 VkResult result;
771
772 result = radv_device_init_meta_itob_state(device);
773 if (result != VK_SUCCESS)
774 return result;
775
776 result = radv_device_init_meta_btoi_state(device);
777 if (result != VK_SUCCESS)
778 goto fail_itob;
779
780 result = radv_device_init_meta_itoi_state(device);
781 if (result != VK_SUCCESS)
782 goto fail_btoi;
783
784 result = radv_device_init_meta_cleari_state(device);
785 if (result != VK_SUCCESS)
786 goto fail_itoi;
787
788 return VK_SUCCESS;
789 fail_itoi:
790 radv_device_finish_meta_itoi_state(device);
791 fail_btoi:
792 radv_device_finish_meta_btoi_state(device);
793 fail_itob:
794 radv_device_finish_meta_itob_state(device);
795 return result;
796 }
797
798 void
799 radv_meta_begin_itoi(struct radv_cmd_buffer *cmd_buffer,
800 struct radv_meta_saved_compute_state *save)
801 {
802 radv_meta_save_compute(save, cmd_buffer, 16);
803 }
804
805 void
806 radv_meta_end_itoi(struct radv_cmd_buffer *cmd_buffer,
807 struct radv_meta_saved_compute_state *save)
808 {
809 radv_meta_restore_compute(save, cmd_buffer, 16);
810 }
811
812 void
813 radv_meta_begin_bufimage(struct radv_cmd_buffer *cmd_buffer,
814 struct radv_meta_saved_compute_state *save)
815 {
816 radv_meta_save_compute(save, cmd_buffer, 12);
817 }
818
819 void
820 radv_meta_end_bufimage(struct radv_cmd_buffer *cmd_buffer,
821 struct radv_meta_saved_compute_state *save)
822 {
823 radv_meta_restore_compute(save, cmd_buffer, 12);
824 }
825
826 void
827 radv_meta_begin_cleari(struct radv_cmd_buffer *cmd_buffer,
828 struct radv_meta_saved_compute_state *save)
829 {
830 radv_meta_save_compute(save, cmd_buffer, 16);
831 }
832
833 void
834 radv_meta_end_cleari(struct radv_cmd_buffer *cmd_buffer,
835 struct radv_meta_saved_compute_state *save)
836 {
837 radv_meta_restore_compute(save, cmd_buffer, 16);
838 }
839
840 static void
841 create_iview(struct radv_cmd_buffer *cmd_buffer,
842 struct radv_meta_blit2d_surf *surf,
843 VkImageUsageFlags usage,
844 struct radv_image_view *iview)
845 {
846
847 radv_image_view_init(iview, cmd_buffer->device,
848 &(VkImageViewCreateInfo) {
849 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
850 .image = radv_image_to_handle(surf->image),
851 .viewType = VK_IMAGE_VIEW_TYPE_2D,
852 .format = surf->format,
853 .subresourceRange = {
854 .aspectMask = surf->aspect_mask,
855 .baseMipLevel = surf->level,
856 .levelCount = 1,
857 .baseArrayLayer = surf->layer,
858 .layerCount = 1
859 },
860 }, cmd_buffer, usage);
861 }
862
863 static void
864 create_bview(struct radv_cmd_buffer *cmd_buffer,
865 struct radv_buffer *buffer,
866 unsigned offset,
867 VkFormat format,
868 struct radv_buffer_view *bview)
869 {
870 radv_buffer_view_init(bview, cmd_buffer->device,
871 &(VkBufferViewCreateInfo) {
872 .sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
873 .flags = 0,
874 .buffer = radv_buffer_to_handle(buffer),
875 .format = format,
876 .offset = offset,
877 .range = VK_WHOLE_SIZE,
878 }, cmd_buffer);
879
880 }
881
882 struct itob_temps {
883 struct radv_image_view src_iview;
884 struct radv_buffer_view dst_bview;
885 VkDescriptorSet set;
886 };
887
888 static void
889 itob_bind_descriptors(struct radv_cmd_buffer *cmd_buffer,
890 struct itob_temps *tmp)
891 {
892 struct radv_device *device = cmd_buffer->device;
893 VkDevice vk_device = radv_device_to_handle(cmd_buffer->device);
894
895 radv_temp_descriptor_set_create(device, cmd_buffer,
896 device->meta_state.itob.img_ds_layout,
897 &tmp->set);
898
899 radv_UpdateDescriptorSets(vk_device,
900 2, /* writeCount */
901 (VkWriteDescriptorSet[]) {
902 {
903 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
904 .dstSet = tmp->set,
905 .dstBinding = 0,
906 .dstArrayElement = 0,
907 .descriptorCount = 1,
908 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
909 .pImageInfo = (VkDescriptorImageInfo[]) {
910 {
911 .sampler = VK_NULL_HANDLE,
912 .imageView = radv_image_view_to_handle(&tmp->src_iview),
913 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
914 },
915 }
916 },
917 {
918 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
919 .dstSet = tmp->set,
920 .dstBinding = 1,
921 .dstArrayElement = 0,
922 .descriptorCount = 1,
923 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
924 .pTexelBufferView = (VkBufferView[]) { radv_buffer_view_to_handle(&tmp->dst_bview) },
925 }
926 }, 0, NULL);
927
928 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer),
929 VK_PIPELINE_BIND_POINT_COMPUTE,
930 device->meta_state.itob.img_p_layout, 0, 1,
931 &tmp->set, 0, NULL);
932 }
933
934 static void
935 itob_bind_pipeline(struct radv_cmd_buffer *cmd_buffer)
936 {
937 VkPipeline pipeline =
938 cmd_buffer->device->meta_state.itob.pipeline;
939
940 if (cmd_buffer->state.compute_pipeline != radv_pipeline_from_handle(pipeline)) {
941 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
942 VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
943 }
944 }
945
946 void
947 radv_meta_image_to_buffer(struct radv_cmd_buffer *cmd_buffer,
948 struct radv_meta_blit2d_surf *src,
949 struct radv_meta_blit2d_buffer *dst,
950 unsigned num_rects,
951 struct radv_meta_blit2d_rect *rects)
952 {
953 struct radv_device *device = cmd_buffer->device;
954 struct itob_temps temps;
955
956 create_iview(cmd_buffer, src, VK_IMAGE_USAGE_SAMPLED_BIT, &temps.src_iview);
957 create_bview(cmd_buffer, dst->buffer, dst->offset, dst->format, &temps.dst_bview);
958 itob_bind_descriptors(cmd_buffer, &temps);
959
960 itob_bind_pipeline(cmd_buffer);
961
962 for (unsigned r = 0; r < num_rects; ++r) {
963 unsigned push_constants[3] = {
964 rects[r].src_x,
965 rects[r].src_y,
966 dst->pitch
967 };
968 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
969 device->meta_state.itob.img_p_layout,
970 VK_SHADER_STAGE_COMPUTE_BIT, 0, 12,
971 push_constants);
972
973 radv_unaligned_dispatch(cmd_buffer, rects[r].width, rects[r].height, 1);
974 }
975 radv_temp_descriptor_set_destroy(cmd_buffer->device, temps.set);
976 }
977
978 struct btoi_temps {
979 struct radv_buffer_view src_bview;
980 struct radv_image_view dst_iview;
981 VkDescriptorSet set;
982 };
983
984 static void
985 btoi_bind_descriptors(struct radv_cmd_buffer *cmd_buffer,
986 struct btoi_temps *tmp)
987 {
988 struct radv_device *device = cmd_buffer->device;
989 VkDevice vk_device = radv_device_to_handle(cmd_buffer->device);
990
991 radv_temp_descriptor_set_create(device, cmd_buffer,
992 device->meta_state.btoi.img_ds_layout,
993 &tmp->set);
994
995 radv_UpdateDescriptorSets(vk_device,
996 2, /* writeCount */
997 (VkWriteDescriptorSet[]) {
998 {
999 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1000 .dstSet = tmp->set,
1001 .dstBinding = 0,
1002 .dstArrayElement = 0,
1003 .descriptorCount = 1,
1004 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
1005 .pTexelBufferView = (VkBufferView[]) { radv_buffer_view_to_handle(&tmp->src_bview) },
1006 },
1007 {
1008 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1009 .dstSet = tmp->set,
1010 .dstBinding = 1,
1011 .dstArrayElement = 0,
1012 .descriptorCount = 1,
1013 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
1014 .pImageInfo = (VkDescriptorImageInfo[]) {
1015 {
1016 .sampler = NULL,
1017 .imageView = radv_image_view_to_handle(&tmp->dst_iview),
1018 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
1019 },
1020 }
1021 }
1022 }, 0, NULL);
1023
1024 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer),
1025 VK_PIPELINE_BIND_POINT_COMPUTE,
1026 device->meta_state.btoi.img_p_layout, 0, 1,
1027 &tmp->set, 0, NULL);
1028 }
1029
1030 static void
1031 btoi_bind_pipeline(struct radv_cmd_buffer *cmd_buffer)
1032 {
1033 VkPipeline pipeline =
1034 cmd_buffer->device->meta_state.btoi.pipeline;
1035
1036 if (cmd_buffer->state.compute_pipeline != radv_pipeline_from_handle(pipeline)) {
1037 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
1038 VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
1039 }
1040 }
1041
1042 void
1043 radv_meta_buffer_to_image_cs(struct radv_cmd_buffer *cmd_buffer,
1044 struct radv_meta_blit2d_buffer *src,
1045 struct radv_meta_blit2d_surf *dst,
1046 unsigned num_rects,
1047 struct radv_meta_blit2d_rect *rects)
1048 {
1049 struct radv_device *device = cmd_buffer->device;
1050 struct btoi_temps temps;
1051
1052 create_bview(cmd_buffer, src->buffer, src->offset, src->format, &temps.src_bview);
1053 create_iview(cmd_buffer, dst, VK_IMAGE_USAGE_STORAGE_BIT, &temps.dst_iview);
1054 btoi_bind_descriptors(cmd_buffer, &temps);
1055
1056 btoi_bind_pipeline(cmd_buffer);
1057
1058 for (unsigned r = 0; r < num_rects; ++r) {
1059 unsigned push_constants[3] = {
1060 rects[r].dst_x,
1061 rects[r].dst_y,
1062 src->pitch
1063 };
1064 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
1065 device->meta_state.btoi.img_p_layout,
1066 VK_SHADER_STAGE_COMPUTE_BIT, 0, 12,
1067 push_constants);
1068
1069 radv_unaligned_dispatch(cmd_buffer, rects[r].width, rects[r].height, 1);
1070 }
1071 radv_temp_descriptor_set_destroy(cmd_buffer->device, temps.set);
1072 }
1073
1074 struct itoi_temps {
1075 struct radv_image_view src_iview;
1076 struct radv_image_view dst_iview;
1077 VkDescriptorSet set;
1078 };
1079
1080 static void
1081 itoi_bind_descriptors(struct radv_cmd_buffer *cmd_buffer,
1082 struct itoi_temps *tmp)
1083 {
1084 struct radv_device *device = cmd_buffer->device;
1085 VkDevice vk_device = radv_device_to_handle(cmd_buffer->device);
1086
1087 radv_temp_descriptor_set_create(device, cmd_buffer,
1088 device->meta_state.itoi.img_ds_layout,
1089 &tmp->set);
1090
1091 radv_UpdateDescriptorSets(vk_device,
1092 2, /* writeCount */
1093 (VkWriteDescriptorSet[]) {
1094 {
1095 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1096 .dstSet = tmp->set,
1097 .dstBinding = 0,
1098 .dstArrayElement = 0,
1099 .descriptorCount = 1,
1100 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
1101 .pImageInfo = (VkDescriptorImageInfo[]) {
1102 {
1103 .sampler = NULL,
1104 .imageView = radv_image_view_to_handle(&tmp->src_iview),
1105 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
1106 },
1107 }
1108 },
1109 {
1110 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1111 .dstSet = tmp->set,
1112 .dstBinding = 1,
1113 .dstArrayElement = 0,
1114 .descriptorCount = 1,
1115 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
1116 .pImageInfo = (VkDescriptorImageInfo[]) {
1117 {
1118 .sampler = NULL,
1119 .imageView = radv_image_view_to_handle(&tmp->dst_iview),
1120 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
1121 },
1122 }
1123 }
1124 }, 0, NULL);
1125
1126 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer),
1127 VK_PIPELINE_BIND_POINT_COMPUTE,
1128 device->meta_state.itoi.img_p_layout, 0, 1,
1129 &tmp->set, 0, NULL);
1130 }
1131
1132 static void
1133 itoi_bind_pipeline(struct radv_cmd_buffer *cmd_buffer)
1134 {
1135 VkPipeline pipeline =
1136 cmd_buffer->device->meta_state.itoi.pipeline;
1137
1138 if (cmd_buffer->state.compute_pipeline != radv_pipeline_from_handle(pipeline)) {
1139 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
1140 VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
1141 }
1142 }
1143
1144 void
1145 radv_meta_image_to_image_cs(struct radv_cmd_buffer *cmd_buffer,
1146 struct radv_meta_blit2d_surf *src,
1147 struct radv_meta_blit2d_surf *dst,
1148 unsigned num_rects,
1149 struct radv_meta_blit2d_rect *rects)
1150 {
1151 struct radv_device *device = cmd_buffer->device;
1152 struct itoi_temps temps;
1153
1154 create_iview(cmd_buffer, src, VK_IMAGE_USAGE_SAMPLED_BIT, &temps.src_iview);
1155 create_iview(cmd_buffer, dst, VK_IMAGE_USAGE_STORAGE_BIT, &temps.dst_iview);
1156
1157 itoi_bind_descriptors(cmd_buffer, &temps);
1158
1159 itoi_bind_pipeline(cmd_buffer);
1160
1161 for (unsigned r = 0; r < num_rects; ++r) {
1162 unsigned push_constants[4] = {
1163 rects[r].src_x,
1164 rects[r].src_y,
1165 rects[r].dst_x,
1166 rects[r].dst_y,
1167 };
1168 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
1169 device->meta_state.itoi.img_p_layout,
1170 VK_SHADER_STAGE_COMPUTE_BIT, 0, 16,
1171 push_constants);
1172
1173 radv_unaligned_dispatch(cmd_buffer, rects[r].width, rects[r].height, 1);
1174 }
1175 radv_temp_descriptor_set_destroy(cmd_buffer->device, temps.set);
1176 }
1177
1178 struct cleari_temps {
1179 struct radv_image_view dst_iview;
1180 VkDescriptorSet set;
1181 };
1182
1183 static void
1184 cleari_bind_descriptors(struct radv_cmd_buffer *cmd_buffer,
1185 struct cleari_temps *tmp)
1186 {
1187 struct radv_device *device = cmd_buffer->device;
1188 VkDevice vk_device = radv_device_to_handle(cmd_buffer->device);
1189
1190 radv_temp_descriptor_set_create(device, cmd_buffer,
1191 device->meta_state.cleari.img_ds_layout,
1192 &tmp->set);
1193
1194 radv_UpdateDescriptorSets(vk_device,
1195 1, /* writeCount */
1196 (VkWriteDescriptorSet[]) {
1197 {
1198 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1199 .dstSet = tmp->set,
1200 .dstBinding = 0,
1201 .dstArrayElement = 0,
1202 .descriptorCount = 1,
1203 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
1204 .pImageInfo = (VkDescriptorImageInfo[]) {
1205 {
1206 .sampler = NULL,
1207 .imageView = radv_image_view_to_handle(&tmp->dst_iview),
1208 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
1209 },
1210 }
1211 },
1212 }, 0, NULL);
1213
1214 radv_CmdBindDescriptorSets(radv_cmd_buffer_to_handle(cmd_buffer),
1215 VK_PIPELINE_BIND_POINT_COMPUTE,
1216 device->meta_state.cleari.img_p_layout, 0, 1,
1217 &tmp->set, 0, NULL);
1218 }
1219
1220 static void
1221 cleari_bind_pipeline(struct radv_cmd_buffer *cmd_buffer)
1222 {
1223 VkPipeline pipeline =
1224 cmd_buffer->device->meta_state.cleari.pipeline;
1225
1226 if (cmd_buffer->state.compute_pipeline != radv_pipeline_from_handle(pipeline)) {
1227 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
1228 VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
1229 }
1230 }
1231
1232 void
1233 radv_meta_clear_image_cs(struct radv_cmd_buffer *cmd_buffer,
1234 struct radv_meta_blit2d_surf *dst,
1235 const VkClearColorValue *clear_color)
1236 {
1237 struct radv_device *device = cmd_buffer->device;
1238 struct cleari_temps temps;
1239
1240 create_iview(cmd_buffer, dst, VK_IMAGE_USAGE_STORAGE_BIT, &temps.dst_iview);
1241 cleari_bind_descriptors(cmd_buffer, &temps);
1242
1243 cleari_bind_pipeline(cmd_buffer);
1244
1245 unsigned push_constants[4] = {
1246 clear_color->uint32[0],
1247 clear_color->uint32[1],
1248 clear_color->uint32[2],
1249 clear_color->uint32[3],
1250 };
1251
1252 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
1253 device->meta_state.cleari.img_p_layout,
1254 VK_SHADER_STAGE_COMPUTE_BIT, 0, 16,
1255 push_constants);
1256
1257 radv_unaligned_dispatch(cmd_buffer, dst->image->extent.width, dst->image->extent.height, 1);
1258 radv_temp_descriptor_set_destroy(cmd_buffer->device, temps.set);
1259 }