2 * Copyright © 2019 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "val_private.h"
25 #include "pipe/p_context.h"
27 static VkResult
val_create_cmd_buffer(
28 struct val_device
* device
,
29 struct val_cmd_pool
* pool
,
30 VkCommandBufferLevel level
,
31 VkCommandBuffer
* pCommandBuffer
)
33 struct val_cmd_buffer
*cmd_buffer
;
35 cmd_buffer
= vk_alloc(&pool
->alloc
, sizeof(*cmd_buffer
), 8,
36 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
37 if (cmd_buffer
== NULL
)
38 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
40 vk_object_base_init(&device
->vk
, &cmd_buffer
->base
,
41 VK_OBJECT_TYPE_COMMAND_BUFFER
);
42 cmd_buffer
->device
= device
;
43 cmd_buffer
->pool
= pool
;
44 list_inithead(&cmd_buffer
->cmds
);
45 cmd_buffer
->status
= VAL_CMD_BUFFER_STATUS_INITIAL
;
47 list_addtail(&cmd_buffer
->pool_link
, &pool
->cmd_buffers
);
49 /* Init the pool_link so we can safefly call list_del when we destroy
52 list_inithead(&cmd_buffer
->pool_link
);
54 *pCommandBuffer
= val_cmd_buffer_to_handle(cmd_buffer
);
60 val_cmd_buffer_free_all_cmds(struct val_cmd_buffer
*cmd_buffer
)
62 struct val_cmd_buffer_entry
*tmp
, *cmd
;
63 LIST_FOR_EACH_ENTRY_SAFE(cmd
, tmp
, &cmd_buffer
->cmds
, cmd_link
) {
64 list_del(&cmd
->cmd_link
);
65 vk_free(&cmd_buffer
->pool
->alloc
, cmd
);
69 static VkResult
val_reset_cmd_buffer(struct val_cmd_buffer
*cmd_buffer
)
71 val_cmd_buffer_free_all_cmds(cmd_buffer
);
72 list_inithead(&cmd_buffer
->cmds
);
73 cmd_buffer
->status
= VAL_CMD_BUFFER_STATUS_INITIAL
;
77 VkResult
val_AllocateCommandBuffers(
79 const VkCommandBufferAllocateInfo
* pAllocateInfo
,
80 VkCommandBuffer
* pCommandBuffers
)
82 VAL_FROM_HANDLE(val_device
, device
, _device
);
83 VAL_FROM_HANDLE(val_cmd_pool
, pool
, pAllocateInfo
->commandPool
);
85 VkResult result
= VK_SUCCESS
;
88 for (i
= 0; i
< pAllocateInfo
->commandBufferCount
; i
++) {
90 if (!list_is_empty(&pool
->free_cmd_buffers
)) {
91 struct val_cmd_buffer
*cmd_buffer
= list_first_entry(&pool
->free_cmd_buffers
, struct val_cmd_buffer
, pool_link
);
93 list_del(&cmd_buffer
->pool_link
);
94 list_addtail(&cmd_buffer
->pool_link
, &pool
->cmd_buffers
);
96 result
= val_reset_cmd_buffer(cmd_buffer
);
97 cmd_buffer
->level
= pAllocateInfo
->level
;
99 pCommandBuffers
[i
] = val_cmd_buffer_to_handle(cmd_buffer
);
101 result
= val_create_cmd_buffer(device
, pool
, pAllocateInfo
->level
,
102 &pCommandBuffers
[i
]);
103 if (result
!= VK_SUCCESS
)
108 if (result
!= VK_SUCCESS
) {
109 val_FreeCommandBuffers(_device
, pAllocateInfo
->commandPool
,
111 memset(pCommandBuffers
, 0,
112 sizeof(*pCommandBuffers
) * pAllocateInfo
->commandBufferCount
);
119 val_cmd_buffer_destroy(struct val_cmd_buffer
*cmd_buffer
)
121 val_cmd_buffer_free_all_cmds(cmd_buffer
);
122 list_del(&cmd_buffer
->pool_link
);
123 vk_object_base_finish(&cmd_buffer
->base
);
124 vk_free(&cmd_buffer
->pool
->alloc
, cmd_buffer
);
127 void val_FreeCommandBuffers(
129 VkCommandPool commandPool
,
130 uint32_t commandBufferCount
,
131 const VkCommandBuffer
* pCommandBuffers
)
133 for (uint32_t i
= 0; i
< commandBufferCount
; i
++) {
134 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, pCommandBuffers
[i
]);
137 if (cmd_buffer
->pool
) {
138 list_del(&cmd_buffer
->pool_link
);
139 list_addtail(&cmd_buffer
->pool_link
, &cmd_buffer
->pool
->free_cmd_buffers
);
141 val_cmd_buffer_destroy(cmd_buffer
);
146 VkResult
val_ResetCommandBuffer(
147 VkCommandBuffer commandBuffer
,
148 VkCommandBufferResetFlags flags
)
150 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
152 return val_reset_cmd_buffer(cmd_buffer
);
155 VkResult
val_BeginCommandBuffer(
156 VkCommandBuffer commandBuffer
,
157 const VkCommandBufferBeginInfo
* pBeginInfo
)
159 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
161 if (cmd_buffer
->status
!= VAL_CMD_BUFFER_STATUS_INITIAL
) {
162 result
= val_reset_cmd_buffer(cmd_buffer
);
163 if (result
!= VK_SUCCESS
)
166 cmd_buffer
->status
= VAL_CMD_BUFFER_STATUS_RECORDING
;
170 VkResult
val_EndCommandBuffer(
171 VkCommandBuffer commandBuffer
)
173 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
174 cmd_buffer
->status
= VAL_CMD_BUFFER_STATUS_EXECUTABLE
;
178 VkResult
val_CreateCommandPool(
180 const VkCommandPoolCreateInfo
* pCreateInfo
,
181 const VkAllocationCallbacks
* pAllocator
,
182 VkCommandPool
* pCmdPool
)
184 VAL_FROM_HANDLE(val_device
, device
, _device
);
185 struct val_cmd_pool
*pool
;
187 pool
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*pool
), 8,
188 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
190 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
192 vk_object_base_init(&device
->vk
, &pool
->base
,
193 VK_OBJECT_TYPE_COMMAND_POOL
);
195 pool
->alloc
= *pAllocator
;
197 pool
->alloc
= device
->alloc
;
199 list_inithead(&pool
->cmd_buffers
);
200 list_inithead(&pool
->free_cmd_buffers
);
202 *pCmdPool
= val_cmd_pool_to_handle(pool
);
207 void val_DestroyCommandPool(
209 VkCommandPool commandPool
,
210 const VkAllocationCallbacks
* pAllocator
)
212 VAL_FROM_HANDLE(val_device
, device
, _device
);
213 VAL_FROM_HANDLE(val_cmd_pool
, pool
, commandPool
);
218 list_for_each_entry_safe(struct val_cmd_buffer
, cmd_buffer
,
219 &pool
->cmd_buffers
, pool_link
) {
220 val_cmd_buffer_destroy(cmd_buffer
);
223 list_for_each_entry_safe(struct val_cmd_buffer
, cmd_buffer
,
224 &pool
->free_cmd_buffers
, pool_link
) {
225 val_cmd_buffer_destroy(cmd_buffer
);
228 vk_object_base_finish(&pool
->base
);
229 vk_free2(&device
->alloc
, pAllocator
, pool
);
232 VkResult
val_ResetCommandPool(
234 VkCommandPool commandPool
,
235 VkCommandPoolResetFlags flags
)
237 VAL_FROM_HANDLE(val_cmd_pool
, pool
, commandPool
);
240 list_for_each_entry(struct val_cmd_buffer
, cmd_buffer
,
241 &pool
->cmd_buffers
, pool_link
) {
242 result
= val_reset_cmd_buffer(cmd_buffer
);
243 if (result
!= VK_SUCCESS
)
249 void val_TrimCommandPool(
251 VkCommandPool commandPool
,
252 VkCommandPoolTrimFlags flags
)
254 VAL_FROM_HANDLE(val_cmd_pool
, pool
, commandPool
);
259 list_for_each_entry_safe(struct val_cmd_buffer
, cmd_buffer
,
260 &pool
->free_cmd_buffers
, pool_link
) {
261 val_cmd_buffer_destroy(cmd_buffer
);
265 static struct val_cmd_buffer_entry
*cmd_buf_entry_alloc_size(struct val_cmd_buffer
*cmd_buffer
,
269 struct val_cmd_buffer_entry
*cmd
;
270 uint32_t cmd_size
= sizeof(*cmd
) + extra_size
;
271 cmd
= vk_alloc(&cmd_buffer
->pool
->alloc
,
273 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
277 cmd
->cmd_type
= type
;
281 static struct val_cmd_buffer_entry
*cmd_buf_entry_alloc(struct val_cmd_buffer
*cmd_buffer
,
284 return cmd_buf_entry_alloc_size(cmd_buffer
, 0, type
);
287 static void cmd_buf_queue(struct val_cmd_buffer
*cmd_buffer
,
288 struct val_cmd_buffer_entry
*cmd
)
290 list_addtail(&cmd
->cmd_link
, &cmd_buffer
->cmds
);
294 state_setup_attachments(struct val_attachment_state
*attachments
,
295 struct val_render_pass
*pass
,
296 const VkClearValue
*clear_values
)
298 for (uint32_t i
= 0; i
< pass
->attachment_count
; ++i
) {
299 struct val_render_pass_attachment
*att
= &pass
->attachments
[i
];
300 VkImageAspectFlags att_aspects
= vk_format_aspects(att
->format
);
301 VkImageAspectFlags clear_aspects
= 0;
302 if (att_aspects
== VK_IMAGE_ASPECT_COLOR_BIT
) {
303 /* color attachment */
304 if (att
->load_op
== VK_ATTACHMENT_LOAD_OP_CLEAR
) {
305 clear_aspects
|= VK_IMAGE_ASPECT_COLOR_BIT
;
308 /* depthstencil attachment */
309 if ((att_aspects
& VK_IMAGE_ASPECT_DEPTH_BIT
) &&
310 att
->load_op
== VK_ATTACHMENT_LOAD_OP_CLEAR
) {
311 clear_aspects
|= VK_IMAGE_ASPECT_DEPTH_BIT
;
312 if ((att_aspects
& VK_IMAGE_ASPECT_STENCIL_BIT
) &&
313 att
->stencil_load_op
== VK_ATTACHMENT_LOAD_OP_DONT_CARE
)
314 clear_aspects
|= VK_IMAGE_ASPECT_STENCIL_BIT
;
316 if ((att_aspects
& VK_IMAGE_ASPECT_STENCIL_BIT
) &&
317 att
->stencil_load_op
== VK_ATTACHMENT_LOAD_OP_CLEAR
) {
318 clear_aspects
|= VK_IMAGE_ASPECT_STENCIL_BIT
;
321 attachments
[i
].pending_clear_aspects
= clear_aspects
;
323 attachments
[i
].clear_value
= clear_values
[i
];
327 void val_CmdBeginRenderPass(
328 VkCommandBuffer commandBuffer
,
329 const VkRenderPassBeginInfo
* pRenderPassBegin
,
330 VkSubpassContents contents
)
332 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
333 VAL_FROM_HANDLE(val_render_pass
, pass
, pRenderPassBegin
->renderPass
);
334 VAL_FROM_HANDLE(val_framebuffer
, framebuffer
, pRenderPassBegin
->framebuffer
);
335 struct val_cmd_buffer_entry
*cmd
;
336 uint32_t cmd_size
= pass
->attachment_count
* sizeof(struct val_attachment_state
);
338 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_BEGIN_RENDER_PASS
);
342 cmd
->u
.begin_render_pass
.render_pass
= pass
;
343 cmd
->u
.begin_render_pass
.framebuffer
= framebuffer
;
344 cmd
->u
.begin_render_pass
.render_area
= pRenderPassBegin
->renderArea
;
346 cmd
->u
.begin_render_pass
.attachments
= (struct val_attachment_state
*)(cmd
+ 1);
347 state_setup_attachments(cmd
->u
.begin_render_pass
.attachments
, pass
, pRenderPassBegin
->pClearValues
);
349 cmd_buf_queue(cmd_buffer
, cmd
);
352 void val_CmdNextSubpass(
353 VkCommandBuffer commandBuffer
,
354 VkSubpassContents contents
)
356 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
357 struct val_cmd_buffer_entry
*cmd
;
359 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_NEXT_SUBPASS
);
363 cmd
->u
.next_subpass
.contents
= contents
;
365 cmd_buf_queue(cmd_buffer
, cmd
);
368 void val_CmdBindVertexBuffers(
369 VkCommandBuffer commandBuffer
,
370 uint32_t firstBinding
,
371 uint32_t bindingCount
,
372 const VkBuffer
* pBuffers
,
373 const VkDeviceSize
* pOffsets
)
375 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
376 struct val_cmd_buffer_entry
*cmd
;
377 struct val_buffer
**buffers
;
378 VkDeviceSize
*offsets
;
380 uint32_t cmd_size
= bindingCount
* sizeof(struct val_buffer
*) + bindingCount
* sizeof(VkDeviceSize
);
382 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_BIND_VERTEX_BUFFERS
);
386 cmd
->u
.vertex_buffers
.first
= firstBinding
;
387 cmd
->u
.vertex_buffers
.binding_count
= bindingCount
;
389 buffers
= (struct val_buffer
**)(cmd
+ 1);
390 offsets
= (VkDeviceSize
*)(buffers
+ bindingCount
);
391 for (i
= 0; i
< bindingCount
; i
++) {
392 buffers
[i
] = val_buffer_from_handle(pBuffers
[i
]);
393 offsets
[i
] = pOffsets
[i
];
395 cmd
->u
.vertex_buffers
.buffers
= buffers
;
396 cmd
->u
.vertex_buffers
.offsets
= offsets
;
398 cmd_buf_queue(cmd_buffer
, cmd
);
401 void val_CmdBindPipeline(
402 VkCommandBuffer commandBuffer
,
403 VkPipelineBindPoint pipelineBindPoint
,
404 VkPipeline _pipeline
)
406 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
407 VAL_FROM_HANDLE(val_pipeline
, pipeline
, _pipeline
);
408 struct val_cmd_buffer_entry
*cmd
;
410 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_BIND_PIPELINE
);
414 cmd
->u
.pipeline
.bind_point
= pipelineBindPoint
;
415 cmd
->u
.pipeline
.pipeline
= pipeline
;
417 cmd_buf_queue(cmd_buffer
, cmd
);
420 void val_CmdBindDescriptorSets(
421 VkCommandBuffer commandBuffer
,
422 VkPipelineBindPoint pipelineBindPoint
,
423 VkPipelineLayout _layout
,
425 uint32_t descriptorSetCount
,
426 const VkDescriptorSet
* pDescriptorSets
,
427 uint32_t dynamicOffsetCount
,
428 const uint32_t* pDynamicOffsets
)
430 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
431 VAL_FROM_HANDLE(val_pipeline_layout
, layout
, _layout
);
432 struct val_cmd_buffer_entry
*cmd
;
433 struct val_descriptor_set
**sets
;
436 uint32_t cmd_size
= descriptorSetCount
* sizeof(struct val_descriptor_set
*) + dynamicOffsetCount
* sizeof(uint32_t);
438 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_BIND_DESCRIPTOR_SETS
);
442 cmd
->u
.descriptor_sets
.bind_point
= pipelineBindPoint
;
443 cmd
->u
.descriptor_sets
.layout
= layout
;
444 cmd
->u
.descriptor_sets
.first
= firstSet
;
445 cmd
->u
.descriptor_sets
.count
= descriptorSetCount
;
447 sets
= (struct val_descriptor_set
**)(cmd
+ 1);
448 for (i
= 0; i
< descriptorSetCount
; i
++) {
449 sets
[i
] = val_descriptor_set_from_handle(pDescriptorSets
[i
]);
451 cmd
->u
.descriptor_sets
.sets
= sets
;
453 cmd
->u
.descriptor_sets
.dynamic_offset_count
= dynamicOffsetCount
;
454 offsets
= (uint32_t *)(sets
+ descriptorSetCount
);
455 for (i
= 0; i
< dynamicOffsetCount
; i
++)
456 offsets
[i
] = pDynamicOffsets
[i
];
457 cmd
->u
.descriptor_sets
.dynamic_offsets
= offsets
;
459 cmd_buf_queue(cmd_buffer
, cmd
);
463 VkCommandBuffer commandBuffer
,
464 uint32_t vertexCount
,
465 uint32_t instanceCount
,
466 uint32_t firstVertex
,
467 uint32_t firstInstance
)
469 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
470 struct val_cmd_buffer_entry
*cmd
;
472 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DRAW
);
476 cmd
->u
.draw
.vertex_count
= vertexCount
;
477 cmd
->u
.draw
.instance_count
= instanceCount
;
478 cmd
->u
.draw
.first_vertex
= firstVertex
;
479 cmd
->u
.draw
.first_instance
= firstInstance
;
481 cmd_buf_queue(cmd_buffer
, cmd
);
484 void val_CmdEndRenderPass(
485 VkCommandBuffer commandBuffer
)
487 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
488 struct val_cmd_buffer_entry
*cmd
;
490 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_END_RENDER_PASS
);
494 cmd_buf_queue(cmd_buffer
, cmd
);
497 void val_CmdSetViewport(
498 VkCommandBuffer commandBuffer
,
499 uint32_t firstViewport
,
500 uint32_t viewportCount
,
501 const VkViewport
* pViewports
)
503 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
504 struct val_cmd_buffer_entry
*cmd
;
507 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_VIEWPORT
);
511 cmd
->u
.set_viewport
.first_viewport
= firstViewport
;
512 cmd
->u
.set_viewport
.viewport_count
= viewportCount
;
513 for (i
= 0; i
< viewportCount
; i
++)
514 cmd
->u
.set_viewport
.viewports
[i
] = pViewports
[i
];
516 cmd_buf_queue(cmd_buffer
, cmd
);
519 void val_CmdSetScissor(
520 VkCommandBuffer commandBuffer
,
521 uint32_t firstScissor
,
522 uint32_t scissorCount
,
523 const VkRect2D
* pScissors
)
525 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
526 struct val_cmd_buffer_entry
*cmd
;
529 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_SCISSOR
);
533 cmd
->u
.set_scissor
.first_scissor
= firstScissor
;
534 cmd
->u
.set_scissor
.scissor_count
= scissorCount
;
535 for (i
= 0; i
< scissorCount
; i
++)
536 cmd
->u
.set_scissor
.scissors
[i
] = pScissors
[i
];
538 cmd_buf_queue(cmd_buffer
, cmd
);
541 void val_CmdSetLineWidth(
542 VkCommandBuffer commandBuffer
,
545 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
546 struct val_cmd_buffer_entry
*cmd
;
548 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_LINE_WIDTH
);
552 cmd
->u
.set_line_width
.line_width
= lineWidth
;
554 cmd_buf_queue(cmd_buffer
, cmd
);
557 void val_CmdSetDepthBias(
558 VkCommandBuffer commandBuffer
,
559 float depthBiasConstantFactor
,
560 float depthBiasClamp
,
561 float depthBiasSlopeFactor
)
563 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
564 struct val_cmd_buffer_entry
*cmd
;
566 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_DEPTH_BIAS
);
570 cmd
->u
.set_depth_bias
.constant_factor
= depthBiasConstantFactor
;
571 cmd
->u
.set_depth_bias
.clamp
= depthBiasClamp
;
572 cmd
->u
.set_depth_bias
.slope_factor
= depthBiasSlopeFactor
;
574 cmd_buf_queue(cmd_buffer
, cmd
);
577 void val_CmdSetBlendConstants(
578 VkCommandBuffer commandBuffer
,
579 const float blendConstants
[4])
581 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
582 struct val_cmd_buffer_entry
*cmd
;
584 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_BLEND_CONSTANTS
);
588 memcpy(cmd
->u
.set_blend_constants
.blend_constants
, blendConstants
, 4 * sizeof(float));
590 cmd_buf_queue(cmd_buffer
, cmd
);
593 void val_CmdSetDepthBounds(
594 VkCommandBuffer commandBuffer
,
595 float minDepthBounds
,
596 float maxDepthBounds
)
598 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
599 struct val_cmd_buffer_entry
*cmd
;
601 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_DEPTH_BOUNDS
);
605 cmd
->u
.set_depth_bounds
.min_depth
= minDepthBounds
;
606 cmd
->u
.set_depth_bounds
.max_depth
= maxDepthBounds
;
608 cmd_buf_queue(cmd_buffer
, cmd
);
611 void val_CmdSetStencilCompareMask(
612 VkCommandBuffer commandBuffer
,
613 VkStencilFaceFlags faceMask
,
614 uint32_t compareMask
)
616 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
617 struct val_cmd_buffer_entry
*cmd
;
619 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_STENCIL_COMPARE_MASK
);
623 cmd
->u
.stencil_vals
.face_mask
= faceMask
;
624 cmd
->u
.stencil_vals
.value
= compareMask
;
626 cmd_buf_queue(cmd_buffer
, cmd
);
629 void val_CmdSetStencilWriteMask(
630 VkCommandBuffer commandBuffer
,
631 VkStencilFaceFlags faceMask
,
634 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
635 struct val_cmd_buffer_entry
*cmd
;
637 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_STENCIL_WRITE_MASK
);
641 cmd
->u
.stencil_vals
.face_mask
= faceMask
;
642 cmd
->u
.stencil_vals
.value
= writeMask
;
644 cmd_buf_queue(cmd_buffer
, cmd
);
648 void val_CmdSetStencilReference(
649 VkCommandBuffer commandBuffer
,
650 VkStencilFaceFlags faceMask
,
653 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
654 struct val_cmd_buffer_entry
*cmd
;
656 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_STENCIL_REFERENCE
);
660 cmd
->u
.stencil_vals
.face_mask
= faceMask
;
661 cmd
->u
.stencil_vals
.value
= reference
;
663 cmd_buf_queue(cmd_buffer
, cmd
);
666 void val_CmdPushConstants(
667 VkCommandBuffer commandBuffer
,
668 VkPipelineLayout layout
,
669 VkShaderStageFlags stageFlags
,
674 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
675 struct val_cmd_buffer_entry
*cmd
;
677 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, (size
- 4), VAL_CMD_PUSH_CONSTANTS
);
681 cmd
->u
.push_constants
.stage
= stageFlags
;
682 cmd
->u
.push_constants
.offset
= offset
;
683 cmd
->u
.push_constants
.size
= size
;
684 memcpy(cmd
->u
.push_constants
.val
, pValues
, size
);
686 cmd_buf_queue(cmd_buffer
, cmd
);
689 void val_CmdBindIndexBuffer(
690 VkCommandBuffer commandBuffer
,
693 VkIndexType indexType
)
695 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
696 VAL_FROM_HANDLE(val_buffer
, buffer
, _buffer
);
697 struct val_cmd_buffer_entry
*cmd
;
699 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_BIND_INDEX_BUFFER
);
703 cmd
->u
.index_buffer
.buffer
= buffer
;
704 cmd
->u
.index_buffer
.offset
= offset
;
705 cmd
->u
.index_buffer
.index_type
= indexType
;
707 cmd_buf_queue(cmd_buffer
, cmd
);
710 void val_CmdDrawIndexed(
711 VkCommandBuffer commandBuffer
,
713 uint32_t instanceCount
,
715 int32_t vertexOffset
,
716 uint32_t firstInstance
)
718 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
719 struct val_cmd_buffer_entry
*cmd
;
721 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DRAW_INDEXED
);
725 cmd
->u
.draw_indexed
.index_count
= indexCount
;
726 cmd
->u
.draw_indexed
.instance_count
= instanceCount
;
727 cmd
->u
.draw_indexed
.first_index
= firstIndex
;
728 cmd
->u
.draw_indexed
.vertex_offset
= vertexOffset
;
729 cmd
->u
.draw_indexed
.first_instance
= firstInstance
;
731 cmd_buf_queue(cmd_buffer
, cmd
);
734 void val_CmdDrawIndirect(
735 VkCommandBuffer commandBuffer
,
741 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
742 VAL_FROM_HANDLE(val_buffer
, buf
, _buffer
);
743 struct val_cmd_buffer_entry
*cmd
;
745 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DRAW_INDIRECT
);
749 cmd
->u
.draw_indirect
.offset
= offset
;
750 cmd
->u
.draw_indirect
.buffer
= buf
;
751 cmd
->u
.draw_indirect
.draw_count
= drawCount
;
752 cmd
->u
.draw_indirect
.stride
= stride
;
754 cmd_buf_queue(cmd_buffer
, cmd
);
757 void val_CmdDrawIndexedIndirect(
758 VkCommandBuffer commandBuffer
,
764 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
765 VAL_FROM_HANDLE(val_buffer
, buf
, _buffer
);
766 struct val_cmd_buffer_entry
*cmd
;
768 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DRAW_INDEXED_INDIRECT
);
772 cmd
->u
.draw_indirect
.offset
= offset
;
773 cmd
->u
.draw_indirect
.buffer
= buf
;
774 cmd
->u
.draw_indirect
.draw_count
= drawCount
;
775 cmd
->u
.draw_indirect
.stride
= stride
;
777 cmd_buf_queue(cmd_buffer
, cmd
);
780 void val_CmdDispatch(
781 VkCommandBuffer commandBuffer
,
786 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
787 struct val_cmd_buffer_entry
*cmd
;
789 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DISPATCH
);
793 cmd
->u
.dispatch
.x
= x
;
794 cmd
->u
.dispatch
.y
= y
;
795 cmd
->u
.dispatch
.z
= z
;
797 cmd_buf_queue(cmd_buffer
, cmd
);
800 void val_CmdDispatchIndirect(
801 VkCommandBuffer commandBuffer
,
805 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
806 struct val_cmd_buffer_entry
*cmd
;
808 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_DISPATCH_INDIRECT
);
812 cmd
->u
.dispatch_indirect
.buffer
= val_buffer_from_handle(_buffer
);
813 cmd
->u
.dispatch_indirect
.offset
= offset
;
815 cmd_buf_queue(cmd_buffer
, cmd
);
818 void val_CmdExecuteCommands(
819 VkCommandBuffer commandBuffer
,
820 uint32_t commandBufferCount
,
821 const VkCommandBuffer
* pCmdBuffers
)
823 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
824 struct val_cmd_buffer_entry
*cmd
;
825 uint32_t cmd_size
= commandBufferCount
* sizeof(struct val_cmd_buffer
*);
827 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_EXECUTE_COMMANDS
);
831 cmd
->u
.execute_commands
.command_buffer_count
= commandBufferCount
;
832 for (unsigned i
= 0; i
< commandBufferCount
; i
++)
833 cmd
->u
.execute_commands
.cmd_buffers
[i
] = val_cmd_buffer_from_handle(pCmdBuffers
[i
]);
835 cmd_buf_queue(cmd_buffer
, cmd
);
838 void val_CmdSetEvent(VkCommandBuffer commandBuffer
,
840 VkPipelineStageFlags stageMask
)
842 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
843 VAL_FROM_HANDLE(val_event
, event
, _event
);
844 struct val_cmd_buffer_entry
*cmd
;
846 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_EVENT
);
850 cmd
->u
.event_set
.event
= event
;
851 cmd
->u
.event_set
.value
= true;
852 cmd
->u
.event_set
.flush
= !!(stageMask
== VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
);
854 cmd_buf_queue(cmd_buffer
, cmd
);
857 void val_CmdResetEvent(VkCommandBuffer commandBuffer
,
859 VkPipelineStageFlags stageMask
)
861 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
862 VAL_FROM_HANDLE(val_event
, event
, _event
);
863 struct val_cmd_buffer_entry
*cmd
;
865 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_SET_EVENT
);
869 cmd
->u
.event_set
.event
= event
;
870 cmd
->u
.event_set
.value
= false;
871 cmd
->u
.event_set
.flush
= !!(stageMask
== VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
);
873 cmd_buf_queue(cmd_buffer
, cmd
);
877 void val_CmdWaitEvents(VkCommandBuffer commandBuffer
,
879 const VkEvent
* pEvents
,
880 VkPipelineStageFlags srcStageMask
,
881 VkPipelineStageFlags dstStageMask
,
882 uint32_t memoryBarrierCount
,
883 const VkMemoryBarrier
* pMemoryBarriers
,
884 uint32_t bufferMemoryBarrierCount
,
885 const VkBufferMemoryBarrier
* pBufferMemoryBarriers
,
886 uint32_t imageMemoryBarrierCount
,
887 const VkImageMemoryBarrier
* pImageMemoryBarriers
)
889 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
890 struct val_cmd_buffer_entry
*cmd
;
891 uint32_t cmd_size
= 0;
893 cmd_size
+= eventCount
* sizeof(struct val_event
*);
894 cmd_size
+= memoryBarrierCount
* sizeof(VkMemoryBarrier
);
895 cmd_size
+= bufferMemoryBarrierCount
* sizeof(VkBufferMemoryBarrier
);
896 cmd_size
+= imageMemoryBarrierCount
* sizeof(VkImageMemoryBarrier
);
898 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_WAIT_EVENTS
);
902 cmd
->u
.wait_events
.src_stage_mask
= srcStageMask
;
903 cmd
->u
.wait_events
.dst_stage_mask
= dstStageMask
;
904 cmd
->u
.wait_events
.event_count
= eventCount
;
905 cmd
->u
.wait_events
.events
= (struct val_event
**)(cmd
+ 1);
906 for (unsigned i
= 0; i
< eventCount
; i
++)
907 cmd
->u
.wait_events
.events
[i
] = val_event_from_handle(pEvents
[i
]);
908 cmd
->u
.wait_events
.memory_barrier_count
= memoryBarrierCount
;
909 cmd
->u
.wait_events
.buffer_memory_barrier_count
= bufferMemoryBarrierCount
;
910 cmd
->u
.wait_events
.image_memory_barrier_count
= imageMemoryBarrierCount
;
912 /* TODO finish off this */
913 cmd_buf_queue(cmd_buffer
, cmd
);
917 void val_CmdCopyBufferToImage(
918 VkCommandBuffer commandBuffer
,
921 VkImageLayout destImageLayout
,
922 uint32_t regionCount
,
923 const VkBufferImageCopy
* pRegions
)
925 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
926 VAL_FROM_HANDLE(val_buffer
, src_buffer
, srcBuffer
);
927 VAL_FROM_HANDLE(val_image
, dst_image
, destImage
);
928 struct val_cmd_buffer_entry
*cmd
;
929 uint32_t cmd_size
= regionCount
* sizeof(VkBufferImageCopy
);
931 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_COPY_BUFFER_TO_IMAGE
);
935 cmd
->u
.buffer_to_img
.src
= src_buffer
;
936 cmd
->u
.buffer_to_img
.dst
= dst_image
;
937 cmd
->u
.buffer_to_img
.dst_layout
= destImageLayout
;
938 cmd
->u
.buffer_to_img
.region_count
= regionCount
;
941 VkBufferImageCopy
*regions
;
943 regions
= (VkBufferImageCopy
*)(cmd
+ 1);
944 memcpy(regions
, pRegions
, regionCount
* sizeof(VkBufferImageCopy
));
945 cmd
->u
.buffer_to_img
.regions
= regions
;
948 cmd_buf_queue(cmd_buffer
, cmd
);
951 void val_CmdCopyImageToBuffer(
952 VkCommandBuffer commandBuffer
,
954 VkImageLayout srcImageLayout
,
956 uint32_t regionCount
,
957 const VkBufferImageCopy
* pRegions
)
959 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
960 VAL_FROM_HANDLE(val_image
, src_image
, srcImage
);
961 VAL_FROM_HANDLE(val_buffer
, dst_buffer
, destBuffer
);
962 struct val_cmd_buffer_entry
*cmd
;
963 uint32_t cmd_size
= regionCount
* sizeof(VkBufferImageCopy
);
965 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_COPY_IMAGE_TO_BUFFER
);
969 cmd
->u
.img_to_buffer
.src
= src_image
;
970 cmd
->u
.img_to_buffer
.dst
= dst_buffer
;
971 cmd
->u
.img_to_buffer
.src_layout
= srcImageLayout
;
972 cmd
->u
.img_to_buffer
.region_count
= regionCount
;
975 VkBufferImageCopy
*regions
;
977 regions
= (VkBufferImageCopy
*)(cmd
+ 1);
978 memcpy(regions
, pRegions
, regionCount
* sizeof(VkBufferImageCopy
));
979 cmd
->u
.img_to_buffer
.regions
= regions
;
982 cmd_buf_queue(cmd_buffer
, cmd
);
985 void val_CmdCopyImage(
986 VkCommandBuffer commandBuffer
,
988 VkImageLayout srcImageLayout
,
990 VkImageLayout destImageLayout
,
991 uint32_t regionCount
,
992 const VkImageCopy
* pRegions
)
994 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
995 VAL_FROM_HANDLE(val_image
, src_image
, srcImage
);
996 VAL_FROM_HANDLE(val_image
, dest_image
, destImage
);
997 struct val_cmd_buffer_entry
*cmd
;
998 uint32_t cmd_size
= regionCount
* sizeof(VkImageCopy
);
1000 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_COPY_IMAGE
);
1004 cmd
->u
.copy_image
.src
= src_image
;
1005 cmd
->u
.copy_image
.dst
= dest_image
;
1006 cmd
->u
.copy_image
.src_layout
= srcImageLayout
;
1007 cmd
->u
.copy_image
.dst_layout
= destImageLayout
;
1008 cmd
->u
.copy_image
.region_count
= regionCount
;
1011 VkImageCopy
*regions
;
1013 regions
= (VkImageCopy
*)(cmd
+ 1);
1014 memcpy(regions
, pRegions
, regionCount
* sizeof(VkImageCopy
));
1015 cmd
->u
.copy_image
.regions
= regions
;
1018 cmd_buf_queue(cmd_buffer
, cmd
);
1022 void val_CmdCopyBuffer(
1023 VkCommandBuffer commandBuffer
,
1025 VkBuffer destBuffer
,
1026 uint32_t regionCount
,
1027 const VkBufferCopy
* pRegions
)
1029 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1030 VAL_FROM_HANDLE(val_buffer
, src_buffer
, srcBuffer
);
1031 VAL_FROM_HANDLE(val_buffer
, dest_buffer
, destBuffer
);
1032 struct val_cmd_buffer_entry
*cmd
;
1033 uint32_t cmd_size
= regionCount
* sizeof(VkBufferCopy
);
1035 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_COPY_BUFFER
);
1039 cmd
->u
.copy_buffer
.src
= src_buffer
;
1040 cmd
->u
.copy_buffer
.dst
= dest_buffer
;
1041 cmd
->u
.copy_buffer
.region_count
= regionCount
;
1044 VkBufferCopy
*regions
;
1046 regions
= (VkBufferCopy
*)(cmd
+ 1);
1047 memcpy(regions
, pRegions
, regionCount
* sizeof(VkBufferCopy
));
1048 cmd
->u
.copy_buffer
.regions
= regions
;
1051 cmd_buf_queue(cmd_buffer
, cmd
);
1054 void val_CmdBlitImage(
1055 VkCommandBuffer commandBuffer
,
1057 VkImageLayout srcImageLayout
,
1059 VkImageLayout destImageLayout
,
1060 uint32_t regionCount
,
1061 const VkImageBlit
* pRegions
,
1064 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1065 VAL_FROM_HANDLE(val_image
, src_image
, srcImage
);
1066 VAL_FROM_HANDLE(val_image
, dest_image
, destImage
);
1067 struct val_cmd_buffer_entry
*cmd
;
1068 uint32_t cmd_size
= regionCount
* sizeof(VkImageBlit
);
1070 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_BLIT_IMAGE
);
1074 cmd
->u
.blit_image
.src
= src_image
;
1075 cmd
->u
.blit_image
.dst
= dest_image
;
1076 cmd
->u
.blit_image
.src_layout
= srcImageLayout
;
1077 cmd
->u
.blit_image
.dst_layout
= destImageLayout
;
1078 cmd
->u
.blit_image
.filter
= filter
;
1079 cmd
->u
.blit_image
.region_count
= regionCount
;
1082 VkImageBlit
*regions
;
1084 regions
= (VkImageBlit
*)(cmd
+ 1);
1085 memcpy(regions
, pRegions
, regionCount
* sizeof(VkImageBlit
));
1086 cmd
->u
.blit_image
.regions
= regions
;
1089 cmd_buf_queue(cmd_buffer
, cmd
);
1092 void val_CmdClearAttachments(
1093 VkCommandBuffer commandBuffer
,
1094 uint32_t attachmentCount
,
1095 const VkClearAttachment
* pAttachments
,
1097 const VkClearRect
* pRects
)
1099 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1100 struct val_cmd_buffer_entry
*cmd
;
1101 uint32_t cmd_size
= attachmentCount
* sizeof(VkClearAttachment
) + rectCount
* sizeof(VkClearRect
);
1103 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_CLEAR_ATTACHMENTS
);
1107 cmd
->u
.clear_attachments
.attachment_count
= attachmentCount
;
1108 cmd
->u
.clear_attachments
.attachments
= (VkClearAttachment
*)(cmd
+ 1);
1109 for (unsigned i
= 0; i
< attachmentCount
; i
++)
1110 cmd
->u
.clear_attachments
.attachments
[i
] = pAttachments
[i
];
1111 cmd
->u
.clear_attachments
.rect_count
= rectCount
;
1112 cmd
->u
.clear_attachments
.rects
= (VkClearRect
*)(cmd
->u
.clear_attachments
.attachments
+ attachmentCount
);
1113 for (unsigned i
= 0; i
< rectCount
; i
++)
1114 cmd
->u
.clear_attachments
.rects
[i
] = pRects
[i
];
1116 cmd_buf_queue(cmd_buffer
, cmd
);
1119 void val_CmdFillBuffer(
1120 VkCommandBuffer commandBuffer
,
1122 VkDeviceSize dstOffset
,
1123 VkDeviceSize fillSize
,
1126 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1127 VAL_FROM_HANDLE(val_buffer
, dst_buffer
, dstBuffer
);
1128 struct val_cmd_buffer_entry
*cmd
;
1130 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_FILL_BUFFER
);
1134 cmd
->u
.fill_buffer
.buffer
= dst_buffer
;
1135 cmd
->u
.fill_buffer
.offset
= dstOffset
;
1136 cmd
->u
.fill_buffer
.fill_size
= fillSize
;
1137 cmd
->u
.fill_buffer
.data
= data
;
1139 cmd_buf_queue(cmd_buffer
, cmd
);
1142 void val_CmdUpdateBuffer(
1143 VkCommandBuffer commandBuffer
,
1145 VkDeviceSize dstOffset
,
1146 VkDeviceSize dataSize
,
1149 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1150 VAL_FROM_HANDLE(val_buffer
, dst_buffer
, dstBuffer
);
1151 struct val_cmd_buffer_entry
*cmd
;
1153 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, dataSize
, VAL_CMD_UPDATE_BUFFER
);
1157 cmd
->u
.update_buffer
.buffer
= dst_buffer
;
1158 cmd
->u
.update_buffer
.offset
= dstOffset
;
1159 cmd
->u
.update_buffer
.data_size
= dataSize
;
1160 memcpy(cmd
->u
.update_buffer
.data
, pData
, dataSize
);
1162 cmd_buf_queue(cmd_buffer
, cmd
);
1165 void val_CmdClearColorImage(
1166 VkCommandBuffer commandBuffer
,
1168 VkImageLayout imageLayout
,
1169 const VkClearColorValue
* pColor
,
1170 uint32_t rangeCount
,
1171 const VkImageSubresourceRange
* pRanges
)
1173 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1174 VAL_FROM_HANDLE(val_image
, image
, image_h
);
1175 struct val_cmd_buffer_entry
*cmd
;
1176 uint32_t cmd_size
= rangeCount
* sizeof(VkImageSubresourceRange
);
1178 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_CLEAR_COLOR_IMAGE
);
1182 cmd
->u
.clear_color_image
.image
= image
;
1183 cmd
->u
.clear_color_image
.layout
= imageLayout
;
1184 cmd
->u
.clear_color_image
.clear_val
= *pColor
;
1185 cmd
->u
.clear_color_image
.range_count
= rangeCount
;
1186 cmd
->u
.clear_color_image
.ranges
= (VkImageSubresourceRange
*)(cmd
+ 1);
1187 for (unsigned i
= 0; i
< rangeCount
; i
++)
1188 cmd
->u
.clear_color_image
.ranges
[i
] = pRanges
[i
];
1190 cmd_buf_queue(cmd_buffer
, cmd
);
1193 void val_CmdClearDepthStencilImage(
1194 VkCommandBuffer commandBuffer
,
1196 VkImageLayout imageLayout
,
1197 const VkClearDepthStencilValue
* pDepthStencil
,
1198 uint32_t rangeCount
,
1199 const VkImageSubresourceRange
* pRanges
)
1201 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1202 VAL_FROM_HANDLE(val_image
, image
, image_h
);
1203 struct val_cmd_buffer_entry
*cmd
;
1204 uint32_t cmd_size
= rangeCount
* sizeof(VkImageSubresourceRange
);
1206 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_CLEAR_DEPTH_STENCIL_IMAGE
);
1210 cmd
->u
.clear_ds_image
.image
= image
;
1211 cmd
->u
.clear_ds_image
.layout
= imageLayout
;
1212 cmd
->u
.clear_ds_image
.clear_val
= *pDepthStencil
;
1213 cmd
->u
.clear_ds_image
.range_count
= rangeCount
;
1214 cmd
->u
.clear_ds_image
.ranges
= (VkImageSubresourceRange
*)(cmd
+ 1);
1215 for (unsigned i
= 0; i
< rangeCount
; i
++)
1216 cmd
->u
.clear_ds_image
.ranges
[i
] = pRanges
[i
];
1218 cmd_buf_queue(cmd_buffer
, cmd
);
1222 void val_CmdResolveImage(
1223 VkCommandBuffer commandBuffer
,
1225 VkImageLayout srcImageLayout
,
1227 VkImageLayout destImageLayout
,
1228 uint32_t regionCount
,
1229 const VkImageResolve
* regions
)
1231 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1232 VAL_FROM_HANDLE(val_image
, src_image
, srcImage
);
1233 VAL_FROM_HANDLE(val_image
, dst_image
, destImage
);
1234 struct val_cmd_buffer_entry
*cmd
;
1235 uint32_t cmd_size
= regionCount
* sizeof(VkImageResolve
);
1237 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_RESOLVE_IMAGE
);
1241 cmd
->u
.resolve_image
.src
= src_image
;
1242 cmd
->u
.resolve_image
.dst
= dst_image
;
1243 cmd
->u
.resolve_image
.src_layout
= srcImageLayout
;
1244 cmd
->u
.resolve_image
.dst_layout
= destImageLayout
;
1245 cmd
->u
.resolve_image
.region_count
= regionCount
;
1246 cmd
->u
.resolve_image
.regions
= (VkImageResolve
*)(cmd
+ 1);
1247 for (unsigned i
= 0; i
< regionCount
; i
++)
1248 cmd
->u
.resolve_image
.regions
[i
] = regions
[i
];
1250 cmd_buf_queue(cmd_buffer
, cmd
);
1253 void val_CmdResetQueryPool(
1254 VkCommandBuffer commandBuffer
,
1255 VkQueryPool queryPool
,
1256 uint32_t firstQuery
,
1257 uint32_t queryCount
)
1259 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1260 VAL_FROM_HANDLE(val_query_pool
, query_pool
, queryPool
);
1261 struct val_cmd_buffer_entry
*cmd
;
1263 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_RESET_QUERY_POOL
);
1267 cmd
->u
.query
.pool
= query_pool
;
1268 cmd
->u
.query
.query
= firstQuery
;
1269 cmd
->u
.query
.index
= queryCount
;
1271 cmd_buf_queue(cmd_buffer
, cmd
);
1274 void val_CmdBeginQueryIndexedEXT(
1275 VkCommandBuffer commandBuffer
,
1276 VkQueryPool queryPool
,
1278 VkQueryControlFlags flags
,
1281 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1282 VAL_FROM_HANDLE(val_query_pool
, query_pool
, queryPool
);
1283 struct val_cmd_buffer_entry
*cmd
;
1285 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_BEGIN_QUERY
);
1289 cmd
->u
.query
.pool
= query_pool
;
1290 cmd
->u
.query
.query
= query
;
1291 cmd
->u
.query
.index
= index
;
1292 cmd
->u
.query
.precise
= true;
1294 cmd_buf_queue(cmd_buffer
, cmd
);
1297 void val_CmdBeginQuery(
1298 VkCommandBuffer commandBuffer
,
1299 VkQueryPool queryPool
,
1301 VkQueryControlFlags flags
)
1303 val_CmdBeginQueryIndexedEXT(commandBuffer
, queryPool
, query
, flags
, 0);
1306 void val_CmdEndQueryIndexedEXT(
1307 VkCommandBuffer commandBuffer
,
1308 VkQueryPool queryPool
,
1312 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1313 VAL_FROM_HANDLE(val_query_pool
, query_pool
, queryPool
);
1314 struct val_cmd_buffer_entry
*cmd
;
1316 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_END_QUERY
);
1320 cmd
->u
.query
.pool
= query_pool
;
1321 cmd
->u
.query
.query
= query
;
1322 cmd
->u
.query
.index
= index
;
1324 cmd_buf_queue(cmd_buffer
, cmd
);
1327 void val_CmdEndQuery(
1328 VkCommandBuffer commandBuffer
,
1329 VkQueryPool queryPool
,
1332 val_CmdEndQueryIndexedEXT(commandBuffer
, queryPool
, query
, 0);
1335 void val_CmdWriteTimestamp(
1336 VkCommandBuffer commandBuffer
,
1337 VkPipelineStageFlagBits pipelineStage
,
1338 VkQueryPool queryPool
,
1341 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1342 VAL_FROM_HANDLE(val_query_pool
, query_pool
, queryPool
);
1343 struct val_cmd_buffer_entry
*cmd
;
1345 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_WRITE_TIMESTAMP
);
1349 cmd
->u
.query
.pool
= query_pool
;
1350 cmd
->u
.query
.query
= query
;
1351 cmd
->u
.query
.flush
= !(pipelineStage
== VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
);
1353 cmd_buf_queue(cmd_buffer
, cmd
);
1356 void val_CmdCopyQueryPoolResults(
1357 VkCommandBuffer commandBuffer
,
1358 VkQueryPool queryPool
,
1359 uint32_t firstQuery
,
1360 uint32_t queryCount
,
1362 VkDeviceSize dstOffset
,
1363 VkDeviceSize stride
,
1364 VkQueryResultFlags flags
)
1366 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1367 VAL_FROM_HANDLE(val_query_pool
, query_pool
, queryPool
);
1368 VAL_FROM_HANDLE(val_buffer
, buffer
, dstBuffer
);
1369 struct val_cmd_buffer_entry
*cmd
;
1371 cmd
= cmd_buf_entry_alloc(cmd_buffer
, VAL_CMD_COPY_QUERY_POOL_RESULTS
);
1375 cmd
->u
.copy_query_pool_results
.pool
= query_pool
;
1376 cmd
->u
.copy_query_pool_results
.first_query
= firstQuery
;
1377 cmd
->u
.copy_query_pool_results
.query_count
= queryCount
;
1378 cmd
->u
.copy_query_pool_results
.dst
= buffer
;
1379 cmd
->u
.copy_query_pool_results
.dst_offset
= dstOffset
;
1380 cmd
->u
.copy_query_pool_results
.stride
= stride
;
1381 cmd
->u
.copy_query_pool_results
.flags
= flags
;
1383 cmd_buf_queue(cmd_buffer
, cmd
);
1386 void val_CmdPipelineBarrier(
1387 VkCommandBuffer commandBuffer
,
1388 VkPipelineStageFlags srcStageMask
,
1389 VkPipelineStageFlags destStageMask
,
1391 uint32_t memoryBarrierCount
,
1392 const VkMemoryBarrier
* pMemoryBarriers
,
1393 uint32_t bufferMemoryBarrierCount
,
1394 const VkBufferMemoryBarrier
* pBufferMemoryBarriers
,
1395 uint32_t imageMemoryBarrierCount
,
1396 const VkImageMemoryBarrier
* pImageMemoryBarriers
)
1398 VAL_FROM_HANDLE(val_cmd_buffer
, cmd_buffer
, commandBuffer
);
1399 struct val_cmd_buffer_entry
*cmd
;
1400 uint32_t cmd_size
= 0;
1402 cmd_size
+= memoryBarrierCount
* sizeof(VkMemoryBarrier
);
1403 cmd_size
+= bufferMemoryBarrierCount
* sizeof(VkBufferMemoryBarrier
);
1404 cmd_size
+= imageMemoryBarrierCount
* sizeof(VkImageMemoryBarrier
);
1406 cmd
= cmd_buf_entry_alloc_size(cmd_buffer
, cmd_size
, VAL_CMD_PIPELINE_BARRIER
);
1410 cmd
->u
.pipeline_barrier
.src_stage_mask
= srcStageMask
;
1411 cmd
->u
.pipeline_barrier
.dst_stage_mask
= destStageMask
;
1412 cmd
->u
.pipeline_barrier
.by_region
= byRegion
;
1413 cmd
->u
.pipeline_barrier
.memory_barrier_count
= memoryBarrierCount
;
1414 cmd
->u
.pipeline_barrier
.buffer_memory_barrier_count
= bufferMemoryBarrierCount
;
1415 cmd
->u
.pipeline_barrier
.image_memory_barrier_count
= imageMemoryBarrierCount
;
1417 /* TODO finish off this */
1418 cmd_buf_queue(cmd_buffer
, cmd
);