nir: replace nir_load_system_value calls with appropiate builder functions
[mesa.git] / src / amd / vulkan / radv_meta_buffer.c
1 #include "radv_meta.h"
2 #include "nir/nir_builder.h"
3
4 #include "sid.h"
5 #include "radv_cs.h"
6
7 static nir_shader *
8 build_buffer_fill_shader(struct radv_device *dev)
9 {
10 nir_builder b;
11
12 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
13 b.shader->info.name = ralloc_strdup(b.shader, "meta_buffer_fill");
14 b.shader->info.cs.local_size[0] = 64;
15 b.shader->info.cs.local_size[1] = 1;
16 b.shader->info.cs.local_size[2] = 1;
17
18 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
19 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
20 nir_ssa_def *block_size = nir_imm_ivec4(&b,
21 b.shader->info.cs.local_size[0],
22 b.shader->info.cs.local_size[1],
23 b.shader->info.cs.local_size[2], 0);
24
25 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
26
27 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
28 offset = nir_channel(&b, offset, 0);
29
30 nir_intrinsic_instr *dst_buf = nir_intrinsic_instr_create(b.shader,
31 nir_intrinsic_vulkan_resource_index);
32 dst_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
33 nir_intrinsic_set_desc_set(dst_buf, 0);
34 nir_intrinsic_set_binding(dst_buf, 0);
35 nir_ssa_dest_init(&dst_buf->instr, &dst_buf->dest, 1, 32, NULL);
36 nir_builder_instr_insert(&b, &dst_buf->instr);
37
38 nir_intrinsic_instr *load = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_push_constant);
39 nir_intrinsic_set_base(load, 0);
40 nir_intrinsic_set_range(load, 4);
41 load->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
42 load->num_components = 1;
43 nir_ssa_dest_init(&load->instr, &load->dest, 1, 32, "fill_value");
44 nir_builder_instr_insert(&b, &load->instr);
45
46 nir_ssa_def *swizzled_load = nir_swizzle(&b, &load->dest.ssa, (unsigned[]) { 0, 0, 0, 0}, 4, false);
47
48 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
49 store->src[0] = nir_src_for_ssa(swizzled_load);
50 store->src[1] = nir_src_for_ssa(&dst_buf->dest.ssa);
51 store->src[2] = nir_src_for_ssa(offset);
52 nir_intrinsic_set_write_mask(store, 0xf);
53 store->num_components = 4;
54 nir_builder_instr_insert(&b, &store->instr);
55
56 return b.shader;
57 }
58
59 static nir_shader *
60 build_buffer_copy_shader(struct radv_device *dev)
61 {
62 nir_builder b;
63
64 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_COMPUTE, NULL);
65 b.shader->info.name = ralloc_strdup(b.shader, "meta_buffer_copy");
66 b.shader->info.cs.local_size[0] = 64;
67 b.shader->info.cs.local_size[1] = 1;
68 b.shader->info.cs.local_size[2] = 1;
69
70 nir_ssa_def *invoc_id = nir_load_local_invocation_id(&b);
71 nir_ssa_def *wg_id = nir_load_work_group_id(&b);
72 nir_ssa_def *block_size = nir_imm_ivec4(&b,
73 b.shader->info.cs.local_size[0],
74 b.shader->info.cs.local_size[1],
75 b.shader->info.cs.local_size[2], 0);
76
77 nir_ssa_def *global_id = nir_iadd(&b, nir_imul(&b, wg_id, block_size), invoc_id);
78
79 nir_ssa_def *offset = nir_imul(&b, global_id, nir_imm_int(&b, 16));
80 offset = nir_channel(&b, offset, 0);
81
82 nir_intrinsic_instr *dst_buf = nir_intrinsic_instr_create(b.shader,
83 nir_intrinsic_vulkan_resource_index);
84 dst_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
85 nir_intrinsic_set_desc_set(dst_buf, 0);
86 nir_intrinsic_set_binding(dst_buf, 0);
87 nir_ssa_dest_init(&dst_buf->instr, &dst_buf->dest, 1, 32, NULL);
88 nir_builder_instr_insert(&b, &dst_buf->instr);
89
90 nir_intrinsic_instr *src_buf = nir_intrinsic_instr_create(b.shader,
91 nir_intrinsic_vulkan_resource_index);
92 src_buf->src[0] = nir_src_for_ssa(nir_imm_int(&b, 0));
93 nir_intrinsic_set_desc_set(src_buf, 0);
94 nir_intrinsic_set_binding(src_buf, 1);
95 nir_ssa_dest_init(&src_buf->instr, &src_buf->dest, 1, 32, NULL);
96 nir_builder_instr_insert(&b, &src_buf->instr);
97
98 nir_intrinsic_instr *load = nir_intrinsic_instr_create(b.shader, nir_intrinsic_load_ssbo);
99 load->src[0] = nir_src_for_ssa(&src_buf->dest.ssa);
100 load->src[1] = nir_src_for_ssa(offset);
101 nir_ssa_dest_init(&load->instr, &load->dest, 4, 32, NULL);
102 load->num_components = 4;
103 nir_builder_instr_insert(&b, &load->instr);
104
105 nir_intrinsic_instr *store = nir_intrinsic_instr_create(b.shader, nir_intrinsic_store_ssbo);
106 store->src[0] = nir_src_for_ssa(&load->dest.ssa);
107 store->src[1] = nir_src_for_ssa(&dst_buf->dest.ssa);
108 store->src[2] = nir_src_for_ssa(offset);
109 nir_intrinsic_set_write_mask(store, 0xf);
110 store->num_components = 4;
111 nir_builder_instr_insert(&b, &store->instr);
112
113 return b.shader;
114 }
115
116
117
118 VkResult radv_device_init_meta_buffer_state(struct radv_device *device)
119 {
120 VkResult result;
121 struct radv_shader_module fill_cs = { .nir = NULL };
122 struct radv_shader_module copy_cs = { .nir = NULL };
123
124 fill_cs.nir = build_buffer_fill_shader(device);
125 copy_cs.nir = build_buffer_copy_shader(device);
126
127 VkDescriptorSetLayoutCreateInfo fill_ds_create_info = {
128 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
129 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
130 .bindingCount = 1,
131 .pBindings = (VkDescriptorSetLayoutBinding[]) {
132 {
133 .binding = 0,
134 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
135 .descriptorCount = 1,
136 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
137 .pImmutableSamplers = NULL
138 },
139 }
140 };
141
142 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
143 &fill_ds_create_info,
144 &device->meta_state.alloc,
145 &device->meta_state.buffer.fill_ds_layout);
146 if (result != VK_SUCCESS)
147 goto fail;
148
149 VkDescriptorSetLayoutCreateInfo copy_ds_create_info = {
150 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
151 .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
152 .bindingCount = 2,
153 .pBindings = (VkDescriptorSetLayoutBinding[]) {
154 {
155 .binding = 0,
156 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
157 .descriptorCount = 1,
158 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
159 .pImmutableSamplers = NULL
160 },
161 {
162 .binding = 1,
163 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
164 .descriptorCount = 1,
165 .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
166 .pImmutableSamplers = NULL
167 },
168 }
169 };
170
171 result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device),
172 &copy_ds_create_info,
173 &device->meta_state.alloc,
174 &device->meta_state.buffer.copy_ds_layout);
175 if (result != VK_SUCCESS)
176 goto fail;
177
178
179 VkPipelineLayoutCreateInfo fill_pl_create_info = {
180 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
181 .setLayoutCount = 1,
182 .pSetLayouts = &device->meta_state.buffer.fill_ds_layout,
183 .pushConstantRangeCount = 1,
184 .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 4},
185 };
186
187 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
188 &fill_pl_create_info,
189 &device->meta_state.alloc,
190 &device->meta_state.buffer.fill_p_layout);
191 if (result != VK_SUCCESS)
192 goto fail;
193
194 VkPipelineLayoutCreateInfo copy_pl_create_info = {
195 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
196 .setLayoutCount = 1,
197 .pSetLayouts = &device->meta_state.buffer.copy_ds_layout,
198 .pushConstantRangeCount = 0,
199 };
200
201 result = radv_CreatePipelineLayout(radv_device_to_handle(device),
202 &copy_pl_create_info,
203 &device->meta_state.alloc,
204 &device->meta_state.buffer.copy_p_layout);
205 if (result != VK_SUCCESS)
206 goto fail;
207
208 VkPipelineShaderStageCreateInfo fill_pipeline_shader_stage = {
209 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
210 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
211 .module = radv_shader_module_to_handle(&fill_cs),
212 .pName = "main",
213 .pSpecializationInfo = NULL,
214 };
215
216 VkComputePipelineCreateInfo fill_vk_pipeline_info = {
217 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
218 .stage = fill_pipeline_shader_stage,
219 .flags = 0,
220 .layout = device->meta_state.buffer.fill_p_layout,
221 };
222
223 result = radv_CreateComputePipelines(radv_device_to_handle(device),
224 radv_pipeline_cache_to_handle(&device->meta_state.cache),
225 1, &fill_vk_pipeline_info, NULL,
226 &device->meta_state.buffer.fill_pipeline);
227 if (result != VK_SUCCESS)
228 goto fail;
229
230 VkPipelineShaderStageCreateInfo copy_pipeline_shader_stage = {
231 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
232 .stage = VK_SHADER_STAGE_COMPUTE_BIT,
233 .module = radv_shader_module_to_handle(&copy_cs),
234 .pName = "main",
235 .pSpecializationInfo = NULL,
236 };
237
238 VkComputePipelineCreateInfo copy_vk_pipeline_info = {
239 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
240 .stage = copy_pipeline_shader_stage,
241 .flags = 0,
242 .layout = device->meta_state.buffer.copy_p_layout,
243 };
244
245 result = radv_CreateComputePipelines(radv_device_to_handle(device),
246 radv_pipeline_cache_to_handle(&device->meta_state.cache),
247 1, &copy_vk_pipeline_info, NULL,
248 &device->meta_state.buffer.copy_pipeline);
249 if (result != VK_SUCCESS)
250 goto fail;
251
252 ralloc_free(fill_cs.nir);
253 ralloc_free(copy_cs.nir);
254 return VK_SUCCESS;
255 fail:
256 radv_device_finish_meta_buffer_state(device);
257 ralloc_free(fill_cs.nir);
258 ralloc_free(copy_cs.nir);
259 return result;
260 }
261
262 void radv_device_finish_meta_buffer_state(struct radv_device *device)
263 {
264 struct radv_meta_state *state = &device->meta_state;
265
266 radv_DestroyPipeline(radv_device_to_handle(device),
267 state->buffer.copy_pipeline, &state->alloc);
268 radv_DestroyPipeline(radv_device_to_handle(device),
269 state->buffer.fill_pipeline, &state->alloc);
270 radv_DestroyPipelineLayout(radv_device_to_handle(device),
271 state->buffer.copy_p_layout, &state->alloc);
272 radv_DestroyPipelineLayout(radv_device_to_handle(device),
273 state->buffer.fill_p_layout, &state->alloc);
274 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
275 state->buffer.copy_ds_layout,
276 &state->alloc);
277 radv_DestroyDescriptorSetLayout(radv_device_to_handle(device),
278 state->buffer.fill_ds_layout,
279 &state->alloc);
280 }
281
282 static void fill_buffer_shader(struct radv_cmd_buffer *cmd_buffer,
283 struct radeon_winsys_bo *bo,
284 uint64_t offset, uint64_t size, uint32_t value)
285 {
286 struct radv_device *device = cmd_buffer->device;
287 uint64_t block_count = round_up_u64(size, 1024);
288 struct radv_meta_saved_state saved_state;
289
290 radv_meta_save(&saved_state, cmd_buffer,
291 RADV_META_SAVE_COMPUTE_PIPELINE |
292 RADV_META_SAVE_CONSTANTS |
293 RADV_META_SAVE_DESCRIPTORS);
294
295 struct radv_buffer dst_buffer = {
296 .bo = bo,
297 .offset = offset,
298 .size = size
299 };
300
301 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
302 VK_PIPELINE_BIND_POINT_COMPUTE,
303 device->meta_state.buffer.fill_pipeline);
304
305 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
306 device->meta_state.buffer.fill_p_layout,
307 0, /* set */
308 1, /* descriptorWriteCount */
309 (VkWriteDescriptorSet[]) {
310 {
311 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
312 .dstBinding = 0,
313 .dstArrayElement = 0,
314 .descriptorCount = 1,
315 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
316 .pBufferInfo = &(VkDescriptorBufferInfo) {
317 .buffer = radv_buffer_to_handle(&dst_buffer),
318 .offset = 0,
319 .range = size
320 }
321 }
322 });
323
324 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
325 device->meta_state.buffer.fill_p_layout,
326 VK_SHADER_STAGE_COMPUTE_BIT, 0, 4,
327 &value);
328
329 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
330
331 radv_meta_restore(&saved_state, cmd_buffer);
332 }
333
334 static void copy_buffer_shader(struct radv_cmd_buffer *cmd_buffer,
335 struct radeon_winsys_bo *src_bo,
336 struct radeon_winsys_bo *dst_bo,
337 uint64_t src_offset, uint64_t dst_offset,
338 uint64_t size)
339 {
340 struct radv_device *device = cmd_buffer->device;
341 uint64_t block_count = round_up_u64(size, 1024);
342 struct radv_meta_saved_state saved_state;
343
344 radv_meta_save(&saved_state, cmd_buffer,
345 RADV_META_SAVE_COMPUTE_PIPELINE |
346 RADV_META_SAVE_DESCRIPTORS);
347
348 struct radv_buffer dst_buffer = {
349 .bo = dst_bo,
350 .offset = dst_offset,
351 .size = size
352 };
353
354 struct radv_buffer src_buffer = {
355 .bo = src_bo,
356 .offset = src_offset,
357 .size = size
358 };
359
360 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
361 VK_PIPELINE_BIND_POINT_COMPUTE,
362 device->meta_state.buffer.copy_pipeline);
363
364 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
365 device->meta_state.buffer.copy_p_layout,
366 0, /* set */
367 2, /* descriptorWriteCount */
368 (VkWriteDescriptorSet[]) {
369 {
370 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
371 .dstBinding = 0,
372 .dstArrayElement = 0,
373 .descriptorCount = 1,
374 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
375 .pBufferInfo = &(VkDescriptorBufferInfo) {
376 .buffer = radv_buffer_to_handle(&dst_buffer),
377 .offset = 0,
378 .range = size
379 }
380 },
381 {
382 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
383 .dstBinding = 1,
384 .dstArrayElement = 0,
385 .descriptorCount = 1,
386 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
387 .pBufferInfo = &(VkDescriptorBufferInfo) {
388 .buffer = radv_buffer_to_handle(&src_buffer),
389 .offset = 0,
390 .range = size
391 }
392 }
393 });
394
395 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1);
396
397 radv_meta_restore(&saved_state, cmd_buffer);
398 }
399
400
401 uint32_t radv_fill_buffer(struct radv_cmd_buffer *cmd_buffer,
402 struct radeon_winsys_bo *bo,
403 uint64_t offset, uint64_t size, uint32_t value)
404 {
405 uint32_t flush_bits = 0;
406
407 assert(!(offset & 3));
408 assert(!(size & 3));
409
410 if (size >= RADV_BUFFER_OPS_CS_THRESHOLD) {
411 fill_buffer_shader(cmd_buffer, bo, offset, size, value);
412 flush_bits = RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
413 RADV_CMD_FLAG_INV_VMEM_L1 |
414 RADV_CMD_FLAG_WRITEBACK_GLOBAL_L2;
415 } else if (size) {
416 uint64_t va = radv_buffer_get_va(bo);
417 va += offset;
418 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, bo);
419 si_cp_dma_clear_buffer(cmd_buffer, va, size, value);
420 }
421
422 return flush_bits;
423 }
424
425 static
426 void radv_copy_buffer(struct radv_cmd_buffer *cmd_buffer,
427 struct radeon_winsys_bo *src_bo,
428 struct radeon_winsys_bo *dst_bo,
429 uint64_t src_offset, uint64_t dst_offset,
430 uint64_t size)
431 {
432 if (size >= RADV_BUFFER_OPS_CS_THRESHOLD && !(size & 3) && !(src_offset & 3) && !(dst_offset & 3))
433 copy_buffer_shader(cmd_buffer, src_bo, dst_bo,
434 src_offset, dst_offset, size);
435 else if (size) {
436 uint64_t src_va = radv_buffer_get_va(src_bo);
437 uint64_t dst_va = radv_buffer_get_va(dst_bo);
438 src_va += src_offset;
439 dst_va += dst_offset;
440
441 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, src_bo);
442 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, dst_bo);
443
444 si_cp_dma_buffer_copy(cmd_buffer, src_va, dst_va, size);
445 }
446 }
447
448 void radv_CmdFillBuffer(
449 VkCommandBuffer commandBuffer,
450 VkBuffer dstBuffer,
451 VkDeviceSize dstOffset,
452 VkDeviceSize fillSize,
453 uint32_t data)
454 {
455 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
456 RADV_FROM_HANDLE(radv_buffer, dst_buffer, dstBuffer);
457
458 if (fillSize == VK_WHOLE_SIZE)
459 fillSize = (dst_buffer->size - dstOffset) & ~3ull;
460
461 radv_fill_buffer(cmd_buffer, dst_buffer->bo, dst_buffer->offset + dstOffset,
462 fillSize, data);
463 }
464
465 void radv_CmdCopyBuffer(
466 VkCommandBuffer commandBuffer,
467 VkBuffer srcBuffer,
468 VkBuffer destBuffer,
469 uint32_t regionCount,
470 const VkBufferCopy* pRegions)
471 {
472 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
473 RADV_FROM_HANDLE(radv_buffer, src_buffer, srcBuffer);
474 RADV_FROM_HANDLE(radv_buffer, dest_buffer, destBuffer);
475 bool old_predicating;
476
477 /* VK_EXT_conditional_rendering says that copy commands should not be
478 * affected by conditional rendering.
479 */
480 old_predicating = cmd_buffer->state.predicating;
481 cmd_buffer->state.predicating = false;
482
483 for (unsigned r = 0; r < regionCount; r++) {
484 uint64_t src_offset = src_buffer->offset + pRegions[r].srcOffset;
485 uint64_t dest_offset = dest_buffer->offset + pRegions[r].dstOffset;
486 uint64_t copy_size = pRegions[r].size;
487
488 radv_copy_buffer(cmd_buffer, src_buffer->bo, dest_buffer->bo,
489 src_offset, dest_offset, copy_size);
490 }
491
492 /* Restore conditional rendering. */
493 cmd_buffer->state.predicating = old_predicating;
494 }
495
496 void radv_CmdUpdateBuffer(
497 VkCommandBuffer commandBuffer,
498 VkBuffer dstBuffer,
499 VkDeviceSize dstOffset,
500 VkDeviceSize dataSize,
501 const void* pData)
502 {
503 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
504 RADV_FROM_HANDLE(radv_buffer, dst_buffer, dstBuffer);
505 bool mec = radv_cmd_buffer_uses_mec(cmd_buffer);
506 uint64_t words = dataSize / 4;
507 uint64_t va = radv_buffer_get_va(dst_buffer->bo);
508 va += dstOffset + dst_buffer->offset;
509
510 assert(!(dataSize & 3));
511 assert(!(va & 3));
512
513 if (!dataSize)
514 return;
515
516 if (dataSize < RADV_BUFFER_UPDATE_THRESHOLD) {
517 si_emit_cache_flush(cmd_buffer);
518
519 radv_cs_add_buffer(cmd_buffer->device->ws, cmd_buffer->cs, dst_buffer->bo);
520
521 radeon_check_space(cmd_buffer->device->ws, cmd_buffer->cs, words + 4);
522
523 radeon_emit(cmd_buffer->cs, PKT3(PKT3_WRITE_DATA, 2 + words, 0));
524 radeon_emit(cmd_buffer->cs, S_370_DST_SEL(mec ?
525 V_370_MEM_ASYNC : V_370_MEMORY_SYNC) |
526 S_370_WR_CONFIRM(1) |
527 S_370_ENGINE_SEL(V_370_ME));
528 radeon_emit(cmd_buffer->cs, va);
529 radeon_emit(cmd_buffer->cs, va >> 32);
530 radeon_emit_array(cmd_buffer->cs, pData, words);
531
532 if (unlikely(cmd_buffer->device->trace_bo))
533 radv_cmd_buffer_trace_emit(cmd_buffer);
534 } else {
535 uint32_t buf_offset;
536 radv_cmd_buffer_upload_data(cmd_buffer, dataSize, 32, pData, &buf_offset);
537 radv_copy_buffer(cmd_buffer, cmd_buffer->upload.upload_bo, dst_buffer->bo,
538 buf_offset, dstOffset + dst_buffer->offset, dataSize);
539 }
540 }