turnip: Fix indentation in function signatures
[mesa.git] / src / freedreno / vulkan / tu_cmd_buffer.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 * IN THE SOFTWARE.
26 */
27
28 #include "tu_private.h"
29 #include "vk_format.h"
30
31 const struct tu_dynamic_state default_dynamic_state = {
32 .viewport =
33 {
34 .count = 0,
35 },
36 .scissor =
37 {
38 .count = 0,
39 },
40 .line_width = 1.0f,
41 .depth_bias =
42 {
43 .bias = 0.0f,
44 .clamp = 0.0f,
45 .slope = 0.0f,
46 },
47 .blend_constants = { 0.0f, 0.0f, 0.0f, 0.0f },
48 .depth_bounds =
49 {
50 .min = 0.0f,
51 .max = 1.0f,
52 },
53 .stencil_compare_mask =
54 {
55 .front = ~0u,
56 .back = ~0u,
57 },
58 .stencil_write_mask =
59 {
60 .front = ~0u,
61 .back = ~0u,
62 },
63 .stencil_reference =
64 {
65 .front = 0u,
66 .back = 0u,
67 },
68 };
69
70 static void
71 tu_bind_dynamic_state(struct tu_cmd_buffer *cmd_buffer,
72 const struct tu_dynamic_state *src)
73 {
74 struct tu_dynamic_state *dest = &cmd_buffer->state.dynamic;
75 uint32_t copy_mask = src->mask;
76 uint32_t dest_mask = 0;
77
78 /* Make sure to copy the number of viewports/scissors because they can
79 * only be specified at pipeline creation time.
80 */
81 dest->viewport.count = src->viewport.count;
82 dest->scissor.count = src->scissor.count;
83 dest->discard_rectangle.count = src->discard_rectangle.count;
84
85 if (copy_mask & TU_DYNAMIC_VIEWPORT) {
86 if (memcmp(&dest->viewport.viewports,
87 &src->viewport.viewports,
88 src->viewport.count * sizeof(VkViewport))) {
89 typed_memcpy(dest->viewport.viewports,
90 src->viewport.viewports,
91 src->viewport.count);
92 dest_mask |= TU_DYNAMIC_VIEWPORT;
93 }
94 }
95
96 if (copy_mask & TU_DYNAMIC_SCISSOR) {
97 if (memcmp(&dest->scissor.scissors,
98 &src->scissor.scissors,
99 src->scissor.count * sizeof(VkRect2D))) {
100 typed_memcpy(
101 dest->scissor.scissors, src->scissor.scissors, src->scissor.count);
102 dest_mask |= TU_DYNAMIC_SCISSOR;
103 }
104 }
105
106 if (copy_mask & TU_DYNAMIC_LINE_WIDTH) {
107 if (dest->line_width != src->line_width) {
108 dest->line_width = src->line_width;
109 dest_mask |= TU_DYNAMIC_LINE_WIDTH;
110 }
111 }
112
113 if (copy_mask & TU_DYNAMIC_DEPTH_BIAS) {
114 if (memcmp(
115 &dest->depth_bias, &src->depth_bias, sizeof(src->depth_bias))) {
116 dest->depth_bias = src->depth_bias;
117 dest_mask |= TU_DYNAMIC_DEPTH_BIAS;
118 }
119 }
120
121 if (copy_mask & TU_DYNAMIC_BLEND_CONSTANTS) {
122 if (memcmp(&dest->blend_constants,
123 &src->blend_constants,
124 sizeof(src->blend_constants))) {
125 typed_memcpy(dest->blend_constants, src->blend_constants, 4);
126 dest_mask |= TU_DYNAMIC_BLEND_CONSTANTS;
127 }
128 }
129
130 if (copy_mask & TU_DYNAMIC_DEPTH_BOUNDS) {
131 if (memcmp(&dest->depth_bounds,
132 &src->depth_bounds,
133 sizeof(src->depth_bounds))) {
134 dest->depth_bounds = src->depth_bounds;
135 dest_mask |= TU_DYNAMIC_DEPTH_BOUNDS;
136 }
137 }
138
139 if (copy_mask & TU_DYNAMIC_STENCIL_COMPARE_MASK) {
140 if (memcmp(&dest->stencil_compare_mask,
141 &src->stencil_compare_mask,
142 sizeof(src->stencil_compare_mask))) {
143 dest->stencil_compare_mask = src->stencil_compare_mask;
144 dest_mask |= TU_DYNAMIC_STENCIL_COMPARE_MASK;
145 }
146 }
147
148 if (copy_mask & TU_DYNAMIC_STENCIL_WRITE_MASK) {
149 if (memcmp(&dest->stencil_write_mask,
150 &src->stencil_write_mask,
151 sizeof(src->stencil_write_mask))) {
152 dest->stencil_write_mask = src->stencil_write_mask;
153 dest_mask |= TU_DYNAMIC_STENCIL_WRITE_MASK;
154 }
155 }
156
157 if (copy_mask & TU_DYNAMIC_STENCIL_REFERENCE) {
158 if (memcmp(&dest->stencil_reference,
159 &src->stencil_reference,
160 sizeof(src->stencil_reference))) {
161 dest->stencil_reference = src->stencil_reference;
162 dest_mask |= TU_DYNAMIC_STENCIL_REFERENCE;
163 }
164 }
165
166 if (copy_mask & TU_DYNAMIC_DISCARD_RECTANGLE) {
167 if (memcmp(&dest->discard_rectangle.rectangles,
168 &src->discard_rectangle.rectangles,
169 src->discard_rectangle.count * sizeof(VkRect2D))) {
170 typed_memcpy(dest->discard_rectangle.rectangles,
171 src->discard_rectangle.rectangles,
172 src->discard_rectangle.count);
173 dest_mask |= TU_DYNAMIC_DISCARD_RECTANGLE;
174 }
175 }
176 }
177
178 static VkResult
179 tu_create_cmd_buffer(struct tu_device *device,
180 struct tu_cmd_pool *pool,
181 VkCommandBufferLevel level,
182 VkCommandBuffer *pCommandBuffer)
183 {
184 struct tu_cmd_buffer *cmd_buffer;
185 cmd_buffer = vk_zalloc(
186 &pool->alloc, sizeof(*cmd_buffer), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
187 if (cmd_buffer == NULL)
188 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
189
190 cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
191 cmd_buffer->device = device;
192 cmd_buffer->pool = pool;
193 cmd_buffer->level = level;
194
195 if (pool) {
196 list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers);
197 cmd_buffer->queue_family_index = pool->queue_family_index;
198
199 } else {
200 /* Init the pool_link so we can safely call list_del when we destroy
201 * the command buffer
202 */
203 list_inithead(&cmd_buffer->pool_link);
204 cmd_buffer->queue_family_index = TU_QUEUE_GENERAL;
205 }
206
207 *pCommandBuffer = tu_cmd_buffer_to_handle(cmd_buffer);
208
209 list_inithead(&cmd_buffer->upload.list);
210
211 return VK_SUCCESS;
212 }
213
214 static void
215 tu_cmd_buffer_destroy(struct tu_cmd_buffer *cmd_buffer)
216 {
217 list_del(&cmd_buffer->pool_link);
218
219 for (unsigned i = 0; i < VK_PIPELINE_BIND_POINT_RANGE_SIZE; i++)
220 free(cmd_buffer->descriptors[i].push_set.set.mapped_ptr);
221
222 vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
223 }
224
225 static VkResult
226 tu_reset_cmd_buffer(struct tu_cmd_buffer *cmd_buffer)
227 {
228 cmd_buffer->record_result = VK_SUCCESS;
229
230 for (unsigned i = 0; i < VK_PIPELINE_BIND_POINT_RANGE_SIZE; i++) {
231 cmd_buffer->descriptors[i].dirty = 0;
232 cmd_buffer->descriptors[i].valid = 0;
233 cmd_buffer->descriptors[i].push_dirty = false;
234 }
235
236 cmd_buffer->status = TU_CMD_BUFFER_STATUS_INITIAL;
237
238 return cmd_buffer->record_result;
239 }
240
241 VkResult
242 tu_AllocateCommandBuffers(VkDevice _device,
243 const VkCommandBufferAllocateInfo *pAllocateInfo,
244 VkCommandBuffer *pCommandBuffers)
245 {
246 TU_FROM_HANDLE(tu_device, device, _device);
247 TU_FROM_HANDLE(tu_cmd_pool, pool, pAllocateInfo->commandPool);
248
249 VkResult result = VK_SUCCESS;
250 uint32_t i;
251
252 for (i = 0; i < pAllocateInfo->commandBufferCount; i++) {
253
254 if (!list_empty(&pool->free_cmd_buffers)) {
255 struct tu_cmd_buffer *cmd_buffer = list_first_entry(
256 &pool->free_cmd_buffers, struct tu_cmd_buffer, pool_link);
257
258 list_del(&cmd_buffer->pool_link);
259 list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers);
260
261 result = tu_reset_cmd_buffer(cmd_buffer);
262 cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
263 cmd_buffer->level = pAllocateInfo->level;
264
265 pCommandBuffers[i] = tu_cmd_buffer_to_handle(cmd_buffer);
266 } else {
267 result = tu_create_cmd_buffer(
268 device, pool, pAllocateInfo->level, &pCommandBuffers[i]);
269 }
270 if (result != VK_SUCCESS)
271 break;
272 }
273
274 if (result != VK_SUCCESS) {
275 tu_FreeCommandBuffers(
276 _device, pAllocateInfo->commandPool, i, pCommandBuffers);
277
278 /* From the Vulkan 1.0.66 spec:
279 *
280 * "vkAllocateCommandBuffers can be used to create multiple
281 * command buffers. If the creation of any of those command
282 * buffers fails, the implementation must destroy all
283 * successfully created command buffer objects from this
284 * command, set all entries of the pCommandBuffers array to
285 * NULL and return the error."
286 */
287 memset(pCommandBuffers,
288 0,
289 sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
290 }
291
292 return result;
293 }
294
295 void
296 tu_FreeCommandBuffers(VkDevice device,
297 VkCommandPool commandPool,
298 uint32_t commandBufferCount,
299 const VkCommandBuffer *pCommandBuffers)
300 {
301 for (uint32_t i = 0; i < commandBufferCount; i++) {
302 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, pCommandBuffers[i]);
303
304 if (cmd_buffer) {
305 if (cmd_buffer->pool) {
306 list_del(&cmd_buffer->pool_link);
307 list_addtail(&cmd_buffer->pool_link,
308 &cmd_buffer->pool->free_cmd_buffers);
309 } else
310 tu_cmd_buffer_destroy(cmd_buffer);
311 }
312 }
313 }
314
315 VkResult
316 tu_ResetCommandBuffer(VkCommandBuffer commandBuffer,
317 VkCommandBufferResetFlags flags)
318 {
319 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
320 return tu_reset_cmd_buffer(cmd_buffer);
321 }
322
323 VkResult
324 tu_BeginCommandBuffer(VkCommandBuffer commandBuffer,
325 const VkCommandBufferBeginInfo *pBeginInfo)
326 {
327 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
328 VkResult result = VK_SUCCESS;
329
330 if (cmd_buffer->status != TU_CMD_BUFFER_STATUS_INITIAL) {
331 /* If the command buffer has already been resetted with
332 * vkResetCommandBuffer, no need to do it again.
333 */
334 result = tu_reset_cmd_buffer(cmd_buffer);
335 if (result != VK_SUCCESS)
336 return result;
337 }
338
339 memset(&cmd_buffer->state, 0, sizeof(cmd_buffer->state));
340 cmd_buffer->usage_flags = pBeginInfo->flags;
341
342 /* setup initial configuration into command buffer */
343 if (cmd_buffer->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
344 switch (cmd_buffer->queue_family_index) {
345 case TU_QUEUE_GENERAL:
346 /* init */
347 break;
348 default:
349 break;
350 }
351 }
352
353 cmd_buffer->status = TU_CMD_BUFFER_STATUS_RECORDING;
354
355 return result;
356 }
357
358 void
359 tu_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
360 uint32_t firstBinding,
361 uint32_t bindingCount,
362 const VkBuffer *pBuffers,
363 const VkDeviceSize *pOffsets)
364 {
365 }
366
367 void
368 tu_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
369 VkBuffer buffer,
370 VkDeviceSize offset,
371 VkIndexType indexType)
372 {
373 }
374
375 void
376 tu_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
377 VkPipelineBindPoint pipelineBindPoint,
378 VkPipelineLayout _layout,
379 uint32_t firstSet,
380 uint32_t descriptorSetCount,
381 const VkDescriptorSet *pDescriptorSets,
382 uint32_t dynamicOffsetCount,
383 const uint32_t *pDynamicOffsets)
384 {
385 }
386
387 void
388 tu_CmdPushConstants(VkCommandBuffer commandBuffer,
389 VkPipelineLayout layout,
390 VkShaderStageFlags stageFlags,
391 uint32_t offset,
392 uint32_t size,
393 const void *pValues)
394 {
395 }
396
397 VkResult
398 tu_EndCommandBuffer(VkCommandBuffer commandBuffer)
399 {
400 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
401
402 cmd_buffer->status = TU_CMD_BUFFER_STATUS_EXECUTABLE;
403
404 return cmd_buffer->record_result;
405 }
406
407 void
408 tu_CmdBindPipeline(VkCommandBuffer commandBuffer,
409 VkPipelineBindPoint pipelineBindPoint,
410 VkPipeline _pipeline)
411 {
412 }
413
414 void
415 tu_CmdSetViewport(VkCommandBuffer commandBuffer,
416 uint32_t firstViewport,
417 uint32_t viewportCount,
418 const VkViewport *pViewports)
419 {
420 }
421
422 void
423 tu_CmdSetScissor(VkCommandBuffer commandBuffer,
424 uint32_t firstScissor,
425 uint32_t scissorCount,
426 const VkRect2D *pScissors)
427 {
428 }
429
430 void
431 tu_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
432 {
433 }
434
435 void
436 tu_CmdSetDepthBias(VkCommandBuffer commandBuffer,
437 float depthBiasConstantFactor,
438 float depthBiasClamp,
439 float depthBiasSlopeFactor)
440 {
441 }
442
443 void
444 tu_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
445 const float blendConstants[4])
446 {
447 }
448
449 void
450 tu_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
451 float minDepthBounds,
452 float maxDepthBounds)
453 {
454 }
455
456 void
457 tu_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
458 VkStencilFaceFlags faceMask,
459 uint32_t compareMask)
460 {
461 }
462
463 void
464 tu_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
465 VkStencilFaceFlags faceMask,
466 uint32_t writeMask)
467 {
468 }
469
470 void
471 tu_CmdSetStencilReference(VkCommandBuffer commandBuffer,
472 VkStencilFaceFlags faceMask,
473 uint32_t reference)
474 {
475 }
476
477 void
478 tu_CmdExecuteCommands(VkCommandBuffer commandBuffer,
479 uint32_t commandBufferCount,
480 const VkCommandBuffer *pCmdBuffers)
481 {
482 }
483
484 VkResult
485 tu_CreateCommandPool(VkDevice _device,
486 const VkCommandPoolCreateInfo *pCreateInfo,
487 const VkAllocationCallbacks *pAllocator,
488 VkCommandPool *pCmdPool)
489 {
490 TU_FROM_HANDLE(tu_device, device, _device);
491 struct tu_cmd_pool *pool;
492
493 pool = vk_alloc2(&device->alloc,
494 pAllocator,
495 sizeof(*pool),
496 8,
497 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
498 if (pool == NULL)
499 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
500
501 if (pAllocator)
502 pool->alloc = *pAllocator;
503 else
504 pool->alloc = device->alloc;
505
506 list_inithead(&pool->cmd_buffers);
507 list_inithead(&pool->free_cmd_buffers);
508
509 pool->queue_family_index = pCreateInfo->queueFamilyIndex;
510
511 *pCmdPool = tu_cmd_pool_to_handle(pool);
512
513 return VK_SUCCESS;
514 }
515
516 void
517 tu_DestroyCommandPool(VkDevice _device,
518 VkCommandPool commandPool,
519 const VkAllocationCallbacks *pAllocator)
520 {
521 TU_FROM_HANDLE(tu_device, device, _device);
522 TU_FROM_HANDLE(tu_cmd_pool, pool, commandPool);
523
524 if (!pool)
525 return;
526
527 list_for_each_entry_safe(
528 struct tu_cmd_buffer, cmd_buffer, &pool->cmd_buffers, pool_link)
529 {
530 tu_cmd_buffer_destroy(cmd_buffer);
531 }
532
533 list_for_each_entry_safe(
534 struct tu_cmd_buffer, cmd_buffer, &pool->free_cmd_buffers, pool_link)
535 {
536 tu_cmd_buffer_destroy(cmd_buffer);
537 }
538
539 vk_free2(&device->alloc, pAllocator, pool);
540 }
541
542 VkResult
543 tu_ResetCommandPool(VkDevice device,
544 VkCommandPool commandPool,
545 VkCommandPoolResetFlags flags)
546 {
547 TU_FROM_HANDLE(tu_cmd_pool, pool, commandPool);
548 VkResult result;
549
550 list_for_each_entry(
551 struct tu_cmd_buffer, cmd_buffer, &pool->cmd_buffers, pool_link)
552 {
553 result = tu_reset_cmd_buffer(cmd_buffer);
554 if (result != VK_SUCCESS)
555 return result;
556 }
557
558 return VK_SUCCESS;
559 }
560
561 void
562 tu_TrimCommandPool(VkDevice device,
563 VkCommandPool commandPool,
564 VkCommandPoolTrimFlagsKHR flags)
565 {
566 TU_FROM_HANDLE(tu_cmd_pool, pool, commandPool);
567
568 if (!pool)
569 return;
570
571 list_for_each_entry_safe(
572 struct tu_cmd_buffer, cmd_buffer, &pool->free_cmd_buffers, pool_link)
573 {
574 tu_cmd_buffer_destroy(cmd_buffer);
575 }
576 }
577
578 void
579 tu_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
580 const VkRenderPassBeginInfo *pRenderPassBegin,
581 VkSubpassContents contents)
582 {
583 }
584
585 void
586 tu_CmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
587 const VkRenderPassBeginInfo *pRenderPassBeginInfo,
588 const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
589 {
590 tu_CmdBeginRenderPass(
591 commandBuffer, pRenderPassBeginInfo, pSubpassBeginInfo->contents);
592 }
593
594 void
595 tu_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
596 {
597 }
598
599 void
600 tu_CmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
601 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
602 const VkSubpassEndInfoKHR *pSubpassEndInfo)
603 {
604 tu_CmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
605 }
606
607 struct tu_draw_info
608 {
609 /**
610 * Number of vertices.
611 */
612 uint32_t count;
613
614 /**
615 * Index of the first vertex.
616 */
617 int32_t vertex_offset;
618
619 /**
620 * First instance id.
621 */
622 uint32_t first_instance;
623
624 /**
625 * Number of instances.
626 */
627 uint32_t instance_count;
628
629 /**
630 * First index (indexed draws only).
631 */
632 uint32_t first_index;
633
634 /**
635 * Whether it's an indexed draw.
636 */
637 bool indexed;
638
639 /**
640 * Indirect draw parameters resource.
641 */
642 struct tu_buffer *indirect;
643 uint64_t indirect_offset;
644 uint32_t stride;
645
646 /**
647 * Draw count parameters resource.
648 */
649 struct tu_buffer *count_buffer;
650 uint64_t count_buffer_offset;
651 };
652
653 static void
654 tu_draw(struct tu_cmd_buffer *cmd_buffer, const struct tu_draw_info *info)
655 {
656 }
657
658 void
659 tu_CmdDraw(VkCommandBuffer commandBuffer,
660 uint32_t vertexCount,
661 uint32_t instanceCount,
662 uint32_t firstVertex,
663 uint32_t firstInstance)
664 {
665 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
666 struct tu_draw_info info = {};
667
668 info.count = vertexCount;
669 info.instance_count = instanceCount;
670 info.first_instance = firstInstance;
671 info.vertex_offset = firstVertex;
672
673 tu_draw(cmd_buffer, &info);
674 }
675
676 void
677 tu_CmdDrawIndexed(VkCommandBuffer commandBuffer,
678 uint32_t indexCount,
679 uint32_t instanceCount,
680 uint32_t firstIndex,
681 int32_t vertexOffset,
682 uint32_t firstInstance)
683 {
684 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
685 struct tu_draw_info info = {};
686
687 info.indexed = true;
688 info.count = indexCount;
689 info.instance_count = instanceCount;
690 info.first_index = firstIndex;
691 info.vertex_offset = vertexOffset;
692 info.first_instance = firstInstance;
693
694 tu_draw(cmd_buffer, &info);
695 }
696
697 void
698 tu_CmdDrawIndirect(VkCommandBuffer commandBuffer,
699 VkBuffer _buffer,
700 VkDeviceSize offset,
701 uint32_t drawCount,
702 uint32_t stride)
703 {
704 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
705 TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
706 struct tu_draw_info info = {};
707
708 info.count = drawCount;
709 info.indirect = buffer;
710 info.indirect_offset = offset;
711 info.stride = stride;
712
713 tu_draw(cmd_buffer, &info);
714 }
715
716 void
717 tu_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
718 VkBuffer _buffer,
719 VkDeviceSize offset,
720 uint32_t drawCount,
721 uint32_t stride)
722 {
723 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
724 TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
725 struct tu_draw_info info = {};
726
727 info.indexed = true;
728 info.count = drawCount;
729 info.indirect = buffer;
730 info.indirect_offset = offset;
731 info.stride = stride;
732
733 tu_draw(cmd_buffer, &info);
734 }
735
736 struct tu_dispatch_info
737 {
738 /**
739 * Determine the layout of the grid (in block units) to be used.
740 */
741 uint32_t blocks[3];
742
743 /**
744 * A starting offset for the grid. If unaligned is set, the offset
745 * must still be aligned.
746 */
747 uint32_t offsets[3];
748 /**
749 * Whether it's an unaligned compute dispatch.
750 */
751 bool unaligned;
752
753 /**
754 * Indirect compute parameters resource.
755 */
756 struct tu_buffer *indirect;
757 uint64_t indirect_offset;
758 };
759
760 static void
761 tu_dispatch(struct tu_cmd_buffer *cmd_buffer,
762 const struct tu_dispatch_info *info)
763 {
764 }
765
766 void
767 tu_CmdDispatchBase(VkCommandBuffer commandBuffer,
768 uint32_t base_x,
769 uint32_t base_y,
770 uint32_t base_z,
771 uint32_t x,
772 uint32_t y,
773 uint32_t z)
774 {
775 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
776 struct tu_dispatch_info info = {};
777
778 info.blocks[0] = x;
779 info.blocks[1] = y;
780 info.blocks[2] = z;
781
782 info.offsets[0] = base_x;
783 info.offsets[1] = base_y;
784 info.offsets[2] = base_z;
785 tu_dispatch(cmd_buffer, &info);
786 }
787
788 void
789 tu_CmdDispatch(VkCommandBuffer commandBuffer,
790 uint32_t x,
791 uint32_t y,
792 uint32_t z)
793 {
794 tu_CmdDispatchBase(commandBuffer, 0, 0, 0, x, y, z);
795 }
796
797 void
798 tu_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
799 VkBuffer _buffer,
800 VkDeviceSize offset)
801 {
802 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
803 TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
804 struct tu_dispatch_info info = {};
805
806 info.indirect = buffer;
807 info.indirect_offset = offset;
808
809 tu_dispatch(cmd_buffer, &info);
810 }
811
812 void
813 tu_CmdEndRenderPass(VkCommandBuffer commandBuffer)
814 {
815 }
816
817 void
818 tu_CmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
819 const VkSubpassEndInfoKHR *pSubpassEndInfo)
820 {
821 tu_CmdEndRenderPass(commandBuffer);
822 }
823
824 struct tu_barrier_info
825 {
826 uint32_t eventCount;
827 const VkEvent *pEvents;
828 VkPipelineStageFlags srcStageMask;
829 };
830
831 static void
832 tu_barrier(struct tu_cmd_buffer *cmd_buffer,
833 uint32_t memoryBarrierCount,
834 const VkMemoryBarrier *pMemoryBarriers,
835 uint32_t bufferMemoryBarrierCount,
836 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
837 uint32_t imageMemoryBarrierCount,
838 const VkImageMemoryBarrier *pImageMemoryBarriers,
839 const struct tu_barrier_info *info)
840 {
841 }
842
843 void
844 tu_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
845 VkPipelineStageFlags srcStageMask,
846 VkPipelineStageFlags destStageMask,
847 VkBool32 byRegion,
848 uint32_t memoryBarrierCount,
849 const VkMemoryBarrier *pMemoryBarriers,
850 uint32_t bufferMemoryBarrierCount,
851 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
852 uint32_t imageMemoryBarrierCount,
853 const VkImageMemoryBarrier *pImageMemoryBarriers)
854 {
855 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
856 struct tu_barrier_info info;
857
858 info.eventCount = 0;
859 info.pEvents = NULL;
860 info.srcStageMask = srcStageMask;
861
862 tu_barrier(cmd_buffer,
863 memoryBarrierCount,
864 pMemoryBarriers,
865 bufferMemoryBarrierCount,
866 pBufferMemoryBarriers,
867 imageMemoryBarrierCount,
868 pImageMemoryBarriers,
869 &info);
870 }
871
872 static void
873 write_event(struct tu_cmd_buffer *cmd_buffer,
874 struct tu_event *event,
875 VkPipelineStageFlags stageMask,
876 unsigned value)
877 {
878 }
879
880 void
881 tu_CmdSetEvent(VkCommandBuffer commandBuffer,
882 VkEvent _event,
883 VkPipelineStageFlags stageMask)
884 {
885 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
886 TU_FROM_HANDLE(tu_event, event, _event);
887
888 write_event(cmd_buffer, event, stageMask, 1);
889 }
890
891 void
892 tu_CmdResetEvent(VkCommandBuffer commandBuffer,
893 VkEvent _event,
894 VkPipelineStageFlags stageMask)
895 {
896 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
897 TU_FROM_HANDLE(tu_event, event, _event);
898
899 write_event(cmd_buffer, event, stageMask, 0);
900 }
901
902 void
903 tu_CmdWaitEvents(VkCommandBuffer commandBuffer,
904 uint32_t eventCount,
905 const VkEvent *pEvents,
906 VkPipelineStageFlags srcStageMask,
907 VkPipelineStageFlags dstStageMask,
908 uint32_t memoryBarrierCount,
909 const VkMemoryBarrier *pMemoryBarriers,
910 uint32_t bufferMemoryBarrierCount,
911 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
912 uint32_t imageMemoryBarrierCount,
913 const VkImageMemoryBarrier *pImageMemoryBarriers)
914 {
915 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, commandBuffer);
916 struct tu_barrier_info info;
917
918 info.eventCount = eventCount;
919 info.pEvents = pEvents;
920 info.srcStageMask = 0;
921
922 tu_barrier(cmd_buffer,
923 memoryBarrierCount,
924 pMemoryBarriers,
925 bufferMemoryBarrierCount,
926 pBufferMemoryBarriers,
927 imageMemoryBarrierCount,
928 pImageMemoryBarriers,
929 &info);
930 }
931
932 void
933 tu_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
934 {
935 /* No-op */
936 }