radv: set num_components on vulkan_resource_index intrinsic
[mesa.git] / src / amd / vulkan / radv_meta_buffer.c
1 #include "radv_meta.h"
2 #include "nir/nir_builder.h"
3
4 #include "sid.h"
5 #include "radv_cs.h"
6
7 static nir_shader *
8 build_buffer_fill_shader(struct radv_device *dev)
9 {
10 nir_builder b;
11
12 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
13 b.shader->info.name = ralloc_strdup(b.shader, "meta_buffer_fill");
14 b.shader->info.cs.local_size[0] = 64;
15 b.shader->info.cs.local_size[1] = 1;
16 b.shader->info.cs.local_size[2] = 1;
17
18 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
19 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
20 nir_ssa_def *block_size = nir_imm_ivec4(&b,
21 b.shader->info.cs.local_size[0],
22 b.shader->info.cs.local_size[1],
23 b.shader->info.cs.local_size[2], 0);
24
25 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
26
27 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
28 offset = nir_channel(&b, offset, 0);
29
30 nir_intrinsic_instr *dst_buf = nir_intrinsic_instr_create(b.shader,
31 nir_intrinsic_vulkan_resource_index);
32 dst_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
33 dst_buf->num_components = 1;
34 nir_intrinsic_set_desc_set(dst_buf, 0);
35 nir_intrinsic_set_binding(dst_buf, 0);
36 nir_ssa_dest_init(&dst_buf->instr, &dst_buf->dest, dst_buf->num_components, 32, NULL);
37 nir_builder_instr_insert(&b, &dst_buf->instr);
38
39 nir_intrinsic_instr *load = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
40 nir_intrinsic_set_base(load, 0);
41 nir_intrinsic_set_range(load, 4);
42 load->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
43 load->num_components = 1;
44 nir_ssa_dest_init(&load->instr, &load->dest, 1, 32, "fill_value");
45 nir_builder_instr_insert(&b, &load->instr);
46
47 nir_ssa_def *swizzled_load = nir_swizzle(&b, &load->dest.ssa, (unsigned[]) { 0, 0, 0, 0}, 4, false);
48
49 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
50 store->src[0] = nir_src_for_ssa(swizzled_load);
51 store->src[1] = nir_src_for_ssa(&dst_buf->dest.ssa);
52 store->src[2] = nir_src_for_ssa(offset);
53 nir_intrinsic_set_write_mask(store, 0xf);
54 store->num_components = 4;
55 nir_builder_instr_insert(&b, &store->instr);
56
57 return b.shader;
58 }
59
60 static nir_shader *
61 build_buffer_copy_shader(struct radv_device *dev)
62 {
63 nir_builder b;
64
65 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
66 b.shader->info.name = ralloc_strdup(b.shader, "meta_buffer_copy");
67 b.shader->info.cs.local_size[0] = 64;
68 b.shader->info.cs.local_size[1] = 1;
69 b.shader->info.cs.local_size[2] = 1;
70
71 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
72 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
73 nir_ssa_def *block_size = nir_imm_ivec4(&b,
74 b.shader->info.cs.local_size[0],
75 b.shader->info.cs.local_size[1],
76 b.shader->info.cs.local_size[2], 0);
77
78 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
79
80 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
81 offset = nir_channel(&b, offset, 0);
82
83 nir_intrinsic_instr *dst_buf = nir_intrinsic_instr_create(b.shader,
84 nir_intrinsic_vulkan_resource_index);
85 dst_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
86 dst_buf->num_components = 1;
87 nir_intrinsic_set_desc_set(dst_buf, 0);
88 nir_intrinsic_set_binding(dst_buf, 0);
89 nir_ssa_dest_init(&dst_buf->instr, &dst_buf->dest, dst_buf->num_components, 32, NULL);
90 nir_builder_instr_insert(&b, &dst_buf->instr);
91
92 nir_intrinsic_instr *src_buf = nir_intrinsic_instr_create(b.shader,
93 nir_intrinsic_vulkan_resource_index);
94 src_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
95 src_buf->num_components = 1;
96 nir_intrinsic_set_desc_set(src_buf, 0);
97 nir_intrinsic_set_binding(src_buf, 1);
98 nir_ssa_dest_init(&src_buf->instr, &src_buf->dest, src_buf->num_components, 32, NULL);
99 nir_builder_instr_insert(&b, &src_buf->instr);
100
101 nir_intrinsic_instr *load = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_ssbo);
102 load->src[0] = nir_src_for_ssa(&src_buf->dest.ssa);
103 load->src[1] = nir_src_for_ssa(offset);
104 nir_ssa_dest_init(&load->instr, &load->dest, 4, 32, NULL);
105 load->num_components = 4;
106 nir_builder_instr_insert(&b, &load->instr);
107
108 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
109 store->src[0] = nir_src_for_ssa(&load->dest.ssa);
110 store->src[1] = nir_src_for_ssa(&dst_buf->dest.ssa);
111 store->src[2] = nir_src_for_ssa(offset);
112 nir_intrinsic_set_write_mask(store, 0xf);
113 store->num_components = 4;
114 nir_builder_instr_insert(&b, &store->instr);
115
116 return b.shader;
117 }
118
119
120
121 VkResult radv_device_init_meta_buffer_state(struct radv_device *device)
122 {
123 VkResult result;
124 struct radv_shader_module fill_cs = { .nir = NULL };
125 struct radv_shader_module copy_cs = { .nir = NULL };
126
127 fill_cs.nir = build_buffer_fill_shader(device);
128 copy_cs.nir = build_buffer_copy_shader(device);
129
130 VkDescriptorSetLayoutCreateInfo fill_ds_create_info = {
131 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
132 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
133 .bindingCount = 1,
134 .pBindings = (VkDescriptorSetLayoutBinding[]) {
135 {
136 .binding = 0,
137 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
138 .descriptorCount = 1,
139 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
140 .pImmutableSamplers = NULL
141 },
142 }
143 };
144
145 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
146 &fill_ds_create_info,
147 &device->meta_state.alloc,
148 &device->meta_state.buffer.fill_ds_layout);
149 if (result != VK_SUCCESS)
150 goto fail;
151
152 VkDescriptorSetLayoutCreateInfo copy_ds_create_info = {
153 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
154 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
155 .bindingCount = 2,
156 .pBindings = (VkDescriptorSetLayoutBinding[]) {
157 {
158 .binding = 0,
159 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
160 .descriptorCount = 1,
161 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
162 .pImmutableSamplers = NULL
163 },
164 {
165 .binding = 1,
166 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
167 .descriptorCount = 1,
168 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
169 .pImmutableSamplers = NULL
170 },
171 }
172 };
173
174 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
175 &copy_ds_create_info,
176 &device->meta_state.alloc,
177 &device->meta_state.buffer.copy_ds_layout);
178 if (result != VK_SUCCESS)
179 goto fail;
180
181
182 VkPipelineLayoutCreateInfo fill_pl_create_info = {
183 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
184 .setLayoutCount = 1,
185 .pSetLayouts = &device->meta_state.buffer.fill_ds_layout,
186 .pushConstantRangeCount = 1,
187 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 4},
188 };
189
190 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
191 &fill_pl_create_info,
192 &device->meta_state.alloc,
193 &device->meta_state.buffer.fill_p_layout);
194 if (result != VK_SUCCESS)
195 goto fail;
196
197 VkPipelineLayoutCreateInfo copy_pl_create_info = {
198 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
199 .setLayoutCount = 1,
200 .pSetLayouts = &device->meta_state.buffer.copy_ds_layout,
201 .pushConstantRangeCount = 0,
202 };
203
204 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
205 &copy_pl_create_info,
206 &device->meta_state.alloc,
207 &device->meta_state.buffer.copy_p_layout);
208 if (result != VK_SUCCESS)
209 goto fail;
210
211 VkPipelineShaderStageCreateInfo fill_pipeline_shader_stage = {
212 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
213 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
214 .module = radv_shader_module_to_handle(&fill_cs),
215 .pName = "main",
216 .pSpecializationInfo = NULL,
217 };
218
219 VkComputePipelineCreateInfo fill_vk_pipeline_info = {
220 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
221 .stage = fill_pipeline_shader_stage,
222 .flags = 0,
223 .layout = device->meta_state.buffer.fill_p_layout,
224 };
225
226 result = radv_CreateComputePipelines(radv_device_to_handle(device),
227 radv_pipeline_cache_to_handle(&device->meta_state.cache),
228 1, &fill_vk_pipeline_info, NULL,
229 &device->meta_state.buffer.fill_pipeline);
230 if (result != VK_SUCCESS)
231 goto fail;
232
233 VkPipelineShaderStageCreateInfo copy_pipeline_shader_stage = {
234 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
235 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
236 .module = radv_shader_module_to_handle(&copy_cs),
237 .pName = "main",
238 .pSpecializationInfo = NULL,
239 };
240
241 VkComputePipelineCreateInfo copy_vk_pipeline_info = {
242 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
243 .stage = copy_pipeline_shader_stage,
244 .flags = 0,
245 .layout = device->meta_state.buffer.copy_p_layout,
246 };
247
248 result = radv_CreateComputePipelines(radv_device_to_handle(device),
249 radv_pipeline_cache_to_handle(&device->meta_state.cache),
250 1, &copy_vk_pipeline_info, NULL,
251 &device->meta_state.buffer.copy_pipeline);
252 if (result != VK_SUCCESS)
253 goto fail;
254
255 ralloc_free(fill_cs.nir);
256 ralloc_free(copy_cs.nir);
257 return VK_SUCCESS;
258 fail:
259 radv_device_finish_meta_buffer_state(device);
260 ralloc_free(fill_cs.nir);
261 ralloc_free(copy_cs.nir);
262 return result;
263 }
264
265 void radv_device_finish_meta_buffer_state(struct radv_device *device)
266 {
267 struct radv_meta_state *state = &device->meta_state;
268
269 radv_DestroyPipeline(radv_device_to_handle(device),
270 state->buffer.copy_pipeline, &state->alloc);
271 radv_DestroyPipeline(radv_device_to_handle(device),
272 state->buffer.fill_pipeline, &state->alloc);
273 radv_DestroyPipelineLayout(radv_device_to_handle(device),
274 state->buffer.copy_p_layout, &state->alloc);
275 radv_DestroyPipelineLayout(radv_device_to_handle(device),
276 state->buffer.fill_p_layout, &state->alloc);
277 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
278 state->buffer.copy_ds_layout,
279 &state->alloc);
280 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
281 state->buffer.fill_ds_layout,
282 &state->alloc);
283 }
284
285 static void fill_buffer_shader(struct radv_cmd_buffer *cmd_buffer,
286 struct radeon_winsys_bo *bo,
287 uint64_t offset, uint64_t size, uint32_t value)
288 {
289 struct radv_device *device = cmd_buffer->device;
290 uint64_t block_count = round_up_u64(size, 1024);
291 struct radv_meta_saved_state saved_state;
292
293 radv_meta_save(&saved_state, cmd_buffer,
294 RADV_META_SAVE_COMPUTE_PIPELINE |
295 RADV_META_SAVE_CONSTANTS |
296 RADV_META_SAVE_DESCRIPTORS);
297
298 struct radv_buffer dst_buffer = {
299 .bo = bo,
300 .offset = offset,
301 .size = size
302 };
303
304 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
305 VK_PIPELINE_BIND_POINT_COMPUTE,
306 device->meta_state.buffer.fill_pipeline);
307
308 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
309 device->meta_state.buffer.fill_p_layout,
310 0, /* set */
311 1, /* descriptorWriteCount */
312 (VkWriteDescriptorSet[]) {
313 {
314 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
315 .dstBinding = 0,
316 .dstArrayElement = 0,
317 .descriptorCount = 1,
318 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
319 .pBufferInfo = &(VkDescriptorBufferInfo) {
320 .buffer = radv_buffer_to_handle(&dst_buffer),
321 .offset = 0,
322 .range = size
323 }
324 }
325 });
326
327 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
328 device->meta_state.buffer.fill_p_layout,
329 VK_SHADER_STAGE_COMPUTE_BIT, 0, 4,
330 &value);
331
332 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
333
334 radv_meta_restore(&saved_state, cmd_buffer);
335 }
336
337 static void copy_buffer_shader(struct radv_cmd_buffer *cmd_buffer,
338 struct radeon_winsys_bo *src_bo,
339 struct radeon_winsys_bo *dst_bo,
340 uint64_t src_offset, uint64_t dst_offset,
341 uint64_t size)
342 {
343 struct radv_device *device = cmd_buffer->device;
344 uint64_t block_count = round_up_u64(size, 1024);
345 struct radv_meta_saved_state saved_state;
346
347 radv_meta_save(&saved_state, cmd_buffer,
348 RADV_META_SAVE_COMPUTE_PIPELINE |
349 RADV_META_SAVE_DESCRIPTORS);
350
351 struct radv_buffer dst_buffer = {
352 .bo = dst_bo,
353 .offset = dst_offset,
354 .size = size
355 };
356
357 struct radv_buffer src_buffer = {
358 .bo = src_bo,
359 .offset = src_offset,
360 .size = size
361 };
362
363 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
364 VK_PIPELINE_BIND_POINT_COMPUTE,
365 device->meta_state.buffer.copy_pipeline);
366
367 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
368 device->meta_state.buffer.copy_p_layout,
369 0, /* set */
370 2, /* descriptorWriteCount */
371 (VkWriteDescriptorSet[]) {
372 {
373 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
374 .dstBinding = 0,
375 .dstArrayElement = 0,
376 .descriptorCount = 1,
377 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
378 .pBufferInfo = &(VkDescriptorBufferInfo) {
379 .buffer = radv_buffer_to_handle(&dst_buffer),
380 .offset = 0,
381 .range = size
382 }
383 },
384 {
385 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
386 .dstBinding = 1,
387 .dstArrayElement = 0,
388 .descriptorCount = 1,
389 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
390 .pBufferInfo = &(VkDescriptorBufferInfo) {
391 .buffer = radv_buffer_to_handle(&src_buffer),
392 .offset = 0,
393 .range = size
394 }
395 }
396 });
397
398 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
399
400 radv_meta_restore(&saved_state, cmd_buffer);
401 }
402
403
404 uint32_t radv_fill_buffer(struct radv_cmd_buffer *cmd_buffer,
405 struct radeon_winsys_bo *bo,
406 uint64_t offset, uint64_t size, uint32_t value)
407 {
408 uint32_t flush_bits = 0;
409
410 assert(!(offset & 3));
411 assert(!(size & 3));
412
413 if (size >= RADV_BUFFER_OPS_CS_THRESHOLD) {
414 fill_buffer_shader(cmd_buffer, bo, offset, size, value);
415 flush_bits = RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
416 RADV_CMD_FLAG_INV_VMEM_L1 |
417 RADV_CMD_FLAG_WRITEBACK_GLOBAL_L2;
418 } else if (size) {
419 uint64_t va = radv_buffer_get_va(bo);
420 va += offset;
421 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, bo);
422 si_cp_dma_clear_buffer(cmd_buffer, va, size, value);
423 }
424
425 return flush_bits;
426 }
427
428 static
429 void radv_copy_buffer(struct radv_cmd_buffer *cmd_buffer,
430 struct radeon_winsys_bo *src_bo,
431 struct radeon_winsys_bo *dst_bo,
432 uint64_t src_offset, uint64_t dst_offset,
433 uint64_t size)
434 {
435 if (size >= RADV_BUFFER_OPS_CS_THRESHOLD && !(size & 3) && !(src_offset & 3) && !(dst_offset & 3))
436 copy_buffer_shader(cmd_buffer, src_bo, dst_bo,
437 src_offset, dst_offset, size);
438 else if (size) {
439 uint64_t src_va = radv_buffer_get_va(src_bo);
440 uint64_t dst_va = radv_buffer_get_va(dst_bo);
441 src_va += src_offset;
442 dst_va += dst_offset;
443
444 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, src_bo);
445 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, dst_bo);
446
447 si_cp_dma_buffer_copy(cmd_buffer, src_va, dst_va, size);
448 }
449 }
450
451 void radv_CmdFillBuffer(
452 VkCommandBuffer commandBuffer,
453 VkBuffer dstBuffer,
454 VkDeviceSize dstOffset,
455 VkDeviceSize fillSize,
456 uint32_t data)
457 {
458 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
459 RADV_FROM_HANDLE(radv_buffer, dst_buffer, dstBuffer);
460
461 if (fillSize == VK_WHOLE_SIZE)
462 fillSize = (dst_buffer->size - dstOffset) & ~3ull;
463
464 radv_fill_buffer(cmd_buffer, dst_buffer->bo, dst_buffer->offset + dstOffset,
465 fillSize, data);
466 }
467
468 void radv_CmdCopyBuffer(
469 VkCommandBuffer commandBuffer,
470 VkBuffer srcBuffer,
471 VkBuffer destBuffer,
472 uint32_t regionCount,
473 const VkBufferCopy* pRegions)
474 {
475 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
476 RADV_FROM_HANDLE(radv_buffer, src_buffer, srcBuffer);
477 RADV_FROM_HANDLE(radv_buffer, dest_buffer, destBuffer);
478 bool old_predicating;
479
480 /* VK_EXT_conditional_rendering says that copy commands should not be
481 * affected by conditional rendering.
482 */
483 old_predicating = cmd_buffer->state.predicating;
484 cmd_buffer->state.predicating = false;
485
486 for (unsigned r = 0; r < regionCount; r++) {
487 uint64_t src_offset = src_buffer->offset + pRegions[r].srcOffset;
488 uint64_t dest_offset = dest_buffer->offset + pRegions[r].dstOffset;
489 uint64_t copy_size = pRegions[r].size;
490
491 radv_copy_buffer(cmd_buffer, src_buffer->bo, dest_buffer->bo,
492 src_offset, dest_offset, copy_size);
493 }
494
495 /* Restore conditional rendering. */
496 cmd_buffer->state.predicating = old_predicating;
497 }
498
499 void radv_CmdUpdateBuffer(
500 VkCommandBuffer commandBuffer,
501 VkBuffer dstBuffer,
502 VkDeviceSize dstOffset,
503 VkDeviceSize dataSize,
504 const void* pData)
505 {
506 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
507 RADV_FROM_HANDLE(radv_buffer, dst_buffer, dstBuffer);
508 bool mec = radv_cmd_buffer_uses_mec(cmd_buffer);
509 uint64_t words = dataSize / 4;
510 uint64_t va = radv_buffer_get_va(dst_buffer->bo);
511 va += dstOffset + dst_buffer->offset;
512
513 assert(!(dataSize & 3));
514 assert(!(va & 3));
515
516 if (!dataSize)
517 return;
518
519 if (dataSize < RADV_BUFFER_UPDATE_THRESHOLD) {
520 si_emit_cache_flush(cmd_buffer);
521
522 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, dst_buffer->bo);
523
524 radeon_check_space(cmd_buffer->device->ws, cmd_buffer->cs, words + 4);
525
526 radeon_emit(cmd_buffer->cs, PKT3(PKT3_WRITE_DATA, 2 + words, 0));
527 radeon_emit(cmd_buffer->cs, S_370_DST_SEL(mec ?
528 V_370_MEM : V_370_MEM_GRBM) |
529 S_370_WR_CONFIRM(1) |
530 S_370_ENGINE_SEL(V_370_ME));
531 radeon_emit(cmd_buffer->cs, va);
532 radeon_emit(cmd_buffer->cs, va >> 32);
533 radeon_emit_array(cmd_buffer->cs, pData, words);
534
535 if (unlikely(cmd_buffer->device->trace_bo))
536 radv_cmd_buffer_trace_emit(cmd_buffer);
537 } else {
538 uint32_t buf_offset;
539 radv_cmd_buffer_upload_data(cmd_buffer, dataSize, 32, pData, &buf_offset);
540 radv_copy_buffer(cmd_buffer, cmd_buffer->upload.upload_bo, dst_buffer->bo,
541 buf_offset, dstOffset + dst_buffer->offset, dataSize);
542 }
543 }