2 * Copyright © 2019 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
34 #include "util/macros.h"
35 #include "util/list.h"
37 #include "compiler/shader_enums.h"
38 #include "pipe/p_screen.h"
39 #include "pipe/p_state.h"
42 /* Pre-declarations needed for WSI entrypoints */
45 typedef struct xcb_connection_t xcb_connection_t
;
46 typedef uint32_t xcb_visualid_t
;
47 typedef uint32_t xcb_window_t
;
50 #include <vulkan/vulkan.h>
51 #include <vulkan/vk_icd.h>
53 #include "val_extensions.h"
54 #include "val_entrypoints.h"
55 #include "vk_object.h"
57 #include "wsi_common.h"
65 #define MAX_PUSH_CONSTANTS_SIZE 128
67 #define val_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
69 #define typed_memcpy(dest, src, count) ({ \
70 memcpy((dest), (src), (count) * sizeof(*(src))); \
73 int val_get_instance_entrypoint_index(const char *name
);
74 int val_get_device_entrypoint_index(const char *name
);
75 int val_get_physical_device_entrypoint_index(const char *name
);
77 const char *val_get_instance_entry_name(int index
);
78 const char *val_get_physical_device_entry_name(int index
);
79 const char *val_get_device_entry_name(int index
);
81 bool val_instance_entrypoint_is_enabled(int index
, uint32_t core_version
,
82 const struct val_instance_extension_table
*instance
);
83 bool val_physical_device_entrypoint_is_enabled(int index
, uint32_t core_version
,
84 const struct val_instance_extension_table
*instance
);
85 bool val_device_entrypoint_is_enabled(int index
, uint32_t core_version
,
86 const struct val_instance_extension_table
*instance
,
87 const struct val_device_extension_table
*device
);
89 void *val_lookup_entrypoint(const char *name
);
91 #define VAL_DEFINE_HANDLE_CASTS(__val_type, __VkType) \
93 static inline struct __val_type * \
94 __val_type ## _from_handle(__VkType _handle) \
96 return (struct __val_type *) _handle; \
99 static inline __VkType \
100 __val_type ## _to_handle(struct __val_type *_obj) \
102 return (__VkType) _obj; \
105 #define VAL_DEFINE_NONDISP_HANDLE_CASTS(__val_type, __VkType) \
107 static inline struct __val_type * \
108 __val_type ## _from_handle(__VkType _handle) \
110 return (struct __val_type *)(uintptr_t) _handle; \
113 static inline __VkType \
114 __val_type ## _to_handle(struct __val_type *_obj) \
116 return (__VkType)(uintptr_t) _obj; \
119 #define VAL_FROM_HANDLE(__val_type, __name, __handle) \
120 struct __val_type *__name = __val_type ## _from_handle(__handle)
122 VAL_DEFINE_HANDLE_CASTS(val_cmd_buffer
, VkCommandBuffer
)
123 VAL_DEFINE_HANDLE_CASTS(val_device
, VkDevice
)
124 VAL_DEFINE_HANDLE_CASTS(val_instance
, VkInstance
)
125 VAL_DEFINE_HANDLE_CASTS(val_physical_device
, VkPhysicalDevice
)
126 VAL_DEFINE_HANDLE_CASTS(val_queue
, VkQueue
)
128 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_cmd_pool
, VkCommandPool
)
129 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_buffer
, VkBuffer
)
130 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_buffer_view
, VkBufferView
)
131 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_pool
, VkDescriptorPool
)
132 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_set
, VkDescriptorSet
)
133 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_set_layout
, VkDescriptorSetLayout
)
134 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_device_memory
, VkDeviceMemory
)
135 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_event
, VkEvent
)
136 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_framebuffer
, VkFramebuffer
)
137 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_image
, VkImage
)
138 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_image_view
, VkImageView
);
139 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline_cache
, VkPipelineCache
)
140 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline
, VkPipeline
)
141 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline_layout
, VkPipelineLayout
)
142 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_query_pool
, VkQueryPool
)
143 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_render_pass
, VkRenderPass
)
144 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_sampler
, VkSampler
)
145 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_shader_module
, VkShaderModule
)
146 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_fence
, VkFence
);
147 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_semaphore
, VkSemaphore
);
149 /* Whenever we generate an error, pass it through this function. Useful for
150 * debugging, where we can break on it. Only call at error site, not when
151 * propagating errors. Might be useful to plug in a stack trace here.
154 VkResult
__vk_errorf(struct val_instance
*instance
, VkResult error
, const char *file
, int line
, const char *format
, ...);
156 #define VAL_DEBUG_ALL_ENTRYPOINTS (1 << 0)
158 #define vk_error(instance, error) __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
159 #define vk_errorf(instance, error, format, ...) __vk_errorf(instance, error, __FILE__, __LINE__, format, ## __VA_ARGS__);
161 void __val_finishme(const char *file
, int line
, const char *format
, ...)
162 val_printflike(3, 4);
164 #define val_finishme(format, ...) \
165 __val_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
167 #define stub_return(v) \
169 val_finishme("stub %s", __func__); \
175 val_finishme("stub %s", __func__); \
179 struct val_shader_module
{
180 struct vk_object_base base
;
185 static inline gl_shader_stage
186 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
188 assert(__builtin_popcount(vk_stage
) == 1);
189 return ffs(vk_stage
) - 1;
192 static inline VkShaderStageFlagBits
193 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
195 return (1 << mesa_stage
);
198 #define VAL_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
200 #define val_foreach_stage(stage, stage_bits) \
201 for (gl_shader_stage stage, \
202 __tmp = (gl_shader_stage)((stage_bits) & VAL_STAGE_MASK); \
203 stage = __builtin_ffs(__tmp) - 1, __tmp; \
204 __tmp &= ~(1 << (stage)))
206 struct val_physical_device
{
207 VK_LOADER_DATA _loader_data
;
208 struct val_instance
* instance
;
210 struct pipe_loader_device
*pld
;
211 struct pipe_screen
*pscreen
;
214 struct wsi_device wsi_device
;
215 struct val_device_extension_table supported_extensions
;
218 struct val_instance
{
219 struct vk_object_base base
;
221 VkAllocationCallbacks alloc
;
224 int physicalDeviceCount
;
225 struct val_physical_device physicalDevice
;
227 uint64_t debug_flags
;
229 struct pipe_loader_device
*devs
;
232 struct val_instance_extension_table enabled_extensions
;
233 struct val_instance_dispatch_table dispatch
;
234 struct val_physical_device_dispatch_table physical_device_dispatch
;
235 struct val_device_dispatch_table device_dispatch
;
238 VkResult
val_init_wsi(struct val_physical_device
*physical_device
);
239 void val_finish_wsi(struct val_physical_device
*physical_device
);
241 bool val_instance_extension_supported(const char *name
);
242 uint32_t val_physical_device_api_version(struct val_physical_device
*dev
);
243 bool val_physical_device_extension_supported(struct val_physical_device
*dev
,
247 VK_LOADER_DATA _loader_data
;
248 VkDeviceQueueCreateFlags flags
;
249 struct val_device
* device
;
250 struct pipe_context
*ctx
;
255 struct list_head workqueue
;
259 struct val_queue_work
{
260 struct list_head list
;
261 uint32_t cmd_buffer_count
;
262 struct val_cmd_buffer
**cmd_buffers
;
263 struct val_fence
*fence
;
266 struct val_pipeline_cache
{
267 struct vk_object_base base
;
268 struct val_device
* device
;
269 VkAllocationCallbacks alloc
;
275 VkAllocationCallbacks alloc
;
277 struct val_queue queue
;
278 struct val_instance
* instance
;
279 struct val_physical_device
*physical_device
;
280 struct pipe_screen
*pscreen
;
283 struct val_device_extension_table enabled_extensions
;
284 struct val_device_dispatch_table dispatch
;
287 void val_device_get_cache_uuid(void *uuid
);
289 struct val_device_memory
{
290 struct vk_object_base base
;
291 struct pipe_memory_allocation
*pmem
;
293 VkDeviceSize map_size
;
298 struct vk_object_base base
;
303 struct pipe_resource
*bo
;
306 static inline uint32_t
307 val_get_layerCount(const struct val_image
*image
,
308 const VkImageSubresourceRange
*range
)
310 return range
->layerCount
== VK_REMAINING_ARRAY_LAYERS
?
311 image
->bo
->array_size
- range
->baseArrayLayer
: range
->layerCount
;
314 static inline uint32_t
315 val_get_levelCount(const struct val_image
*image
,
316 const VkImageSubresourceRange
*range
)
318 return range
->levelCount
== VK_REMAINING_MIP_LEVELS
?
319 (image
->bo
->last_level
+ 1) - range
->baseMipLevel
: range
->levelCount
;
322 struct val_image_create_info
{
323 const VkImageCreateInfo
*vk_info
;
329 val_image_create(VkDevice _device
,
330 const struct val_image_create_info
*create_info
,
331 const VkAllocationCallbacks
* alloc
,
334 struct val_image_view
{
335 struct vk_object_base base
;
336 const struct val_image
*image
; /**< VkImageViewCreateInfo::image */
338 VkImageViewType view_type
;
340 enum pipe_format pformat
;
341 VkComponentMapping components
;
342 VkImageSubresourceRange subresourceRange
;
344 struct pipe_surface
*surface
; /* have we created a pipe surface for this? */
347 struct val_subpass_attachment
{
349 VkImageLayout layout
;
354 uint32_t attachment_count
;
355 struct val_subpass_attachment
* attachments
;
357 uint32_t input_count
;
358 uint32_t color_count
;
359 struct val_subpass_attachment
* input_attachments
;
360 struct val_subpass_attachment
* color_attachments
;
361 struct val_subpass_attachment
* resolve_attachments
;
362 struct val_subpass_attachment
* depth_stencil_attachment
;
363 struct val_subpass_attachment
* ds_resolve_attachment
;
365 /** Subpass has at least one color resolve attachment */
366 bool has_color_resolve
;
368 /** Subpass has at least one color attachment */
371 VkSampleCountFlagBits max_sample_count
;
374 struct val_render_pass_attachment
{
377 VkAttachmentLoadOp load_op
;
378 VkAttachmentLoadOp stencil_load_op
;
379 VkImageLayout initial_layout
;
380 VkImageLayout final_layout
;
382 /* The subpass id in which the attachment will be used first/last. */
383 uint32_t first_subpass_idx
;
384 uint32_t last_subpass_idx
;
387 struct val_render_pass
{
388 struct vk_object_base base
;
389 uint32_t attachment_count
;
390 uint32_t subpass_count
;
391 struct val_subpass_attachment
* subpass_attachments
;
392 struct val_render_pass_attachment
* attachments
;
393 struct val_subpass subpasses
[0];
397 struct vk_object_base base
;
398 VkSamplerCreateInfo create_info
;
402 struct val_framebuffer
{
403 struct vk_object_base base
;
408 uint32_t attachment_count
;
409 struct val_image_view
* attachments
[0];
412 struct val_descriptor_set_binding_layout
{
413 uint16_t descriptor_index
;
414 /* Number of array elements in this binding */
415 VkDescriptorType type
;
419 int16_t dynamic_index
;
421 int16_t const_buffer_index
;
422 int16_t shader_buffer_index
;
423 int16_t sampler_index
;
424 int16_t sampler_view_index
;
426 } stage
[MESA_SHADER_STAGES
];
428 /* Immutable samplers (or NULL if no immutable samplers) */
429 struct val_sampler
**immutable_samplers
;
432 struct val_descriptor_set_layout
{
433 struct vk_object_base base
;
434 /* Number of bindings in this descriptor set */
435 uint16_t binding_count
;
437 /* Total size of the descriptor set with room for all array entries */
440 /* Shader stages affected by this descriptor set */
441 uint16_t shader_stages
;
444 uint16_t const_buffer_count
;
445 uint16_t shader_buffer_count
;
446 uint16_t sampler_count
;
447 uint16_t sampler_view_count
;
448 uint16_t image_count
;
449 } stage
[MESA_SHADER_STAGES
];
451 /* Number of dynamic offsets used by this descriptor set */
452 uint16_t dynamic_offset_count
;
454 /* Bindings in this descriptor set */
455 struct val_descriptor_set_binding_layout binding
[0];
458 struct val_descriptor
{
459 VkDescriptorType type
;
463 struct val_image_view
*image_view
;
464 struct val_sampler
*sampler
;
469 struct val_buffer
*buffer
;
471 struct val_buffer_view
*buffer_view
;
475 struct val_descriptor_set
{
476 struct vk_object_base base
;
477 const struct val_descriptor_set_layout
*layout
;
478 struct list_head link
;
479 struct val_descriptor descriptors
[0];
482 struct val_descriptor_pool
{
483 struct vk_object_base base
;
484 VkDescriptorPoolCreateFlags flags
;
487 struct list_head sets
;
491 val_descriptor_set_create(struct val_device
*device
,
492 const struct val_descriptor_set_layout
*layout
,
493 struct val_descriptor_set
**out_set
);
496 val_descriptor_set_destroy(struct val_device
*device
,
497 struct val_descriptor_set
*set
);
499 struct val_pipeline_layout
{
500 struct vk_object_base base
;
502 struct val_descriptor_set_layout
*layout
;
503 uint32_t dynamic_offset_start
;
507 uint32_t push_constant_size
;
509 bool has_dynamic_offsets
;
510 } stage
[MESA_SHADER_STAGES
];
513 struct val_pipeline
{
514 struct vk_object_base base
;
515 struct val_device
* device
;
516 struct val_pipeline_layout
* layout
;
518 bool is_compute_pipeline
;
519 bool force_min_sample
;
520 nir_shader
*pipeline_nir
[MESA_SHADER_STAGES
];
521 void *shader_cso
[PIPE_SHADER_TYPES
];
522 VkGraphicsPipelineCreateInfo graphics_create_info
;
523 VkComputePipelineCreateInfo compute_create_info
;
527 struct vk_object_base base
;
528 uint64_t event_storage
;
532 struct vk_object_base base
;
534 struct pipe_fence_handle
*handle
;
537 struct val_semaphore
{
538 struct vk_object_base base
;
543 struct vk_object_base base
;
544 struct val_device
* device
;
547 VkBufferUsageFlags usage
;
550 struct pipe_resource
*bo
;
554 struct val_buffer_view
{
555 struct vk_object_base base
;
557 enum pipe_format pformat
;
558 struct val_buffer
*buffer
;
563 struct val_query_pool
{
564 struct vk_object_base base
;
567 enum pipe_query_type base_type
;
568 struct pipe_query
*queries
[0];
571 struct val_cmd_pool
{
572 struct vk_object_base base
;
573 VkAllocationCallbacks alloc
;
574 struct list_head cmd_buffers
;
575 struct list_head free_cmd_buffers
;
579 enum val_cmd_buffer_status
{
580 VAL_CMD_BUFFER_STATUS_INVALID
,
581 VAL_CMD_BUFFER_STATUS_INITIAL
,
582 VAL_CMD_BUFFER_STATUS_RECORDING
,
583 VAL_CMD_BUFFER_STATUS_EXECUTABLE
,
584 VAL_CMD_BUFFER_STATUS_PENDING
,
587 struct val_cmd_buffer
{
588 struct vk_object_base base
;
590 struct val_device
* device
;
592 VkCommandBufferLevel level
;
593 enum val_cmd_buffer_status status
;
594 struct val_cmd_pool
* pool
;
595 struct list_head pool_link
;
597 struct list_head cmds
;
599 uint8_t push_constants
[MAX_PUSH_CONSTANTS_SIZE
];
602 /* in same order and buffer building commands in spec. */
604 VAL_CMD_BIND_PIPELINE
,
605 VAL_CMD_SET_VIEWPORT
,
607 VAL_CMD_SET_LINE_WIDTH
,
608 VAL_CMD_SET_DEPTH_BIAS
,
609 VAL_CMD_SET_BLEND_CONSTANTS
,
610 VAL_CMD_SET_DEPTH_BOUNDS
,
611 VAL_CMD_SET_STENCIL_COMPARE_MASK
,
612 VAL_CMD_SET_STENCIL_WRITE_MASK
,
613 VAL_CMD_SET_STENCIL_REFERENCE
,
614 VAL_CMD_BIND_DESCRIPTOR_SETS
,
615 VAL_CMD_BIND_INDEX_BUFFER
,
616 VAL_CMD_BIND_VERTEX_BUFFERS
,
618 VAL_CMD_DRAW_INDEXED
,
619 VAL_CMD_DRAW_INDIRECT
,
620 VAL_CMD_DRAW_INDEXED_INDIRECT
,
622 VAL_CMD_DISPATCH_INDIRECT
,
626 VAL_CMD_COPY_BUFFER_TO_IMAGE
,
627 VAL_CMD_COPY_IMAGE_TO_BUFFER
,
628 VAL_CMD_UPDATE_BUFFER
,
630 VAL_CMD_CLEAR_COLOR_IMAGE
,
631 VAL_CMD_CLEAR_DEPTH_STENCIL_IMAGE
,
632 VAL_CMD_CLEAR_ATTACHMENTS
,
633 VAL_CMD_RESOLVE_IMAGE
,
637 VAL_CMD_PIPELINE_BARRIER
,
640 VAL_CMD_RESET_QUERY_POOL
,
641 VAL_CMD_WRITE_TIMESTAMP
,
642 VAL_CMD_COPY_QUERY_POOL_RESULTS
,
643 VAL_CMD_PUSH_CONSTANTS
,
644 VAL_CMD_BEGIN_RENDER_PASS
,
645 VAL_CMD_NEXT_SUBPASS
,
646 VAL_CMD_END_RENDER_PASS
,
647 VAL_CMD_EXECUTE_COMMANDS
,
650 struct val_cmd_bind_pipeline
{
651 VkPipelineBindPoint bind_point
;
652 struct val_pipeline
*pipeline
;
655 struct val_cmd_set_viewport
{
656 uint32_t first_viewport
;
657 uint32_t viewport_count
;
658 VkViewport viewports
[16];
661 struct val_cmd_set_scissor
{
662 uint32_t first_scissor
;
663 uint32_t scissor_count
;
664 VkRect2D scissors
[16];
667 struct val_cmd_set_line_width
{
671 struct val_cmd_set_depth_bias
{
672 float constant_factor
;
677 struct val_cmd_set_blend_constants
{
678 float blend_constants
[4];
681 struct val_cmd_set_depth_bounds
{
686 struct val_cmd_set_stencil_vals
{
687 VkStencilFaceFlags face_mask
;
691 struct val_cmd_bind_descriptor_sets
{
692 VkPipelineBindPoint bind_point
;
693 struct val_pipeline_layout
*layout
;
696 struct val_descriptor_set
**sets
;
697 uint32_t dynamic_offset_count
;
698 const uint32_t *dynamic_offsets
;
701 struct val_cmd_bind_index_buffer
{
702 const struct val_buffer
*buffer
;
704 VkIndexType index_type
;
707 struct val_cmd_bind_vertex_buffers
{
709 uint32_t binding_count
;
710 struct val_buffer
**buffers
;
711 const VkDeviceSize
*offsets
;
714 struct val_cmd_draw
{
715 uint32_t vertex_count
;
716 uint32_t instance_count
;
717 uint32_t first_vertex
;
718 uint32_t first_instance
;
721 struct val_cmd_draw_indexed
{
722 uint32_t index_count
;
723 uint32_t instance_count
;
724 uint32_t first_index
;
725 uint32_t vertex_offset
;
726 uint32_t first_instance
;
729 struct val_cmd_draw_indirect
{
731 struct val_buffer
*buffer
;
736 struct val_cmd_dispatch
{
742 struct val_cmd_dispatch_indirect
{
743 const struct val_buffer
*buffer
;
747 struct val_cmd_copy_buffer
{
748 struct val_buffer
*src
;
749 struct val_buffer
*dst
;
750 uint32_t region_count
;
751 const VkBufferCopy
*regions
;
754 struct val_cmd_copy_image
{
755 struct val_image
*src
;
756 struct val_image
*dst
;
757 VkImageLayout src_layout
;
758 VkImageLayout dst_layout
;
759 uint32_t region_count
;
760 const VkImageCopy
*regions
;
763 struct val_cmd_blit_image
{
764 struct val_image
*src
;
765 struct val_image
*dst
;
766 VkImageLayout src_layout
;
767 VkImageLayout dst_layout
;
768 uint32_t region_count
;
769 const VkImageBlit
*regions
;
773 struct val_cmd_copy_buffer_to_image
{
774 struct val_buffer
*src
;
775 struct val_image
*dst
;
776 VkImageLayout dst_layout
;
777 uint32_t region_count
;
778 const VkBufferImageCopy
*regions
;
781 struct val_cmd_copy_image_to_buffer
{
782 struct val_image
*src
;
783 struct val_buffer
*dst
;
784 VkImageLayout src_layout
;
785 uint32_t region_count
;
786 const VkBufferImageCopy
*regions
;
789 struct val_cmd_update_buffer
{
790 struct val_buffer
*buffer
;
792 VkDeviceSize data_size
;
796 struct val_cmd_fill_buffer
{
797 struct val_buffer
*buffer
;
799 VkDeviceSize fill_size
;
803 struct val_cmd_clear_color_image
{
804 struct val_image
*image
;
805 VkImageLayout layout
;
806 VkClearColorValue clear_val
;
807 uint32_t range_count
;
808 VkImageSubresourceRange
*ranges
;
811 struct val_cmd_clear_ds_image
{
812 struct val_image
*image
;
813 VkImageLayout layout
;
814 VkClearDepthStencilValue clear_val
;
815 uint32_t range_count
;
816 VkImageSubresourceRange
*ranges
;
819 struct val_cmd_clear_attachments
{
820 uint32_t attachment_count
;
821 VkClearAttachment
*attachments
;
826 struct val_cmd_resolve_image
{
827 struct val_image
*src
;
828 struct val_image
*dst
;
829 VkImageLayout src_layout
;
830 VkImageLayout dst_layout
;
831 uint32_t region_count
;
832 VkImageResolve
*regions
;
835 struct val_cmd_event_set
{
836 struct val_event
*event
;
841 struct val_cmd_wait_events
{
842 uint32_t event_count
;
843 struct val_event
**events
;
844 VkPipelineStageFlags src_stage_mask
;
845 VkPipelineStageFlags dst_stage_mask
;
846 uint32_t memory_barrier_count
;
847 VkMemoryBarrier
*memory_barriers
;
848 uint32_t buffer_memory_barrier_count
;
849 VkBufferMemoryBarrier
*buffer_memory_barriers
;
850 uint32_t image_memory_barrier_count
;
851 VkImageMemoryBarrier
*image_memory_barriers
;
854 struct val_cmd_pipeline_barrier
{
855 VkPipelineStageFlags src_stage_mask
;
856 VkPipelineStageFlags dst_stage_mask
;
858 uint32_t memory_barrier_count
;
859 VkMemoryBarrier
*memory_barriers
;
860 uint32_t buffer_memory_barrier_count
;
861 VkBufferMemoryBarrier
*buffer_memory_barriers
;
862 uint32_t image_memory_barrier_count
;
863 VkImageMemoryBarrier
*image_memory_barriers
;
866 struct val_cmd_query_cmd
{
867 struct val_query_pool
*pool
;
874 struct val_cmd_copy_query_pool_results
{
875 struct val_query_pool
*pool
;
876 uint32_t first_query
;
877 uint32_t query_count
;
878 struct val_buffer
*dst
;
879 VkDeviceSize dst_offset
;
881 VkQueryResultFlags flags
;
884 struct val_cmd_push_constants
{
885 VkShaderStageFlags stage
;
891 struct val_attachment_state
{
892 VkImageAspectFlags pending_clear_aspects
;
893 VkClearValue clear_value
;
896 struct val_cmd_begin_render_pass
{
897 struct val_framebuffer
*framebuffer
;
898 struct val_render_pass
*render_pass
;
899 VkRect2D render_area
;
900 struct val_attachment_state
*attachments
;
903 struct val_cmd_next_subpass
{
904 VkSubpassContents contents
;
907 struct val_cmd_execute_commands
{
908 uint32_t command_buffer_count
;
909 struct val_cmd_buffer
*cmd_buffers
[0];
912 struct val_cmd_buffer_entry
{
913 struct list_head cmd_link
;
916 struct val_cmd_bind_pipeline pipeline
;
917 struct val_cmd_set_viewport set_viewport
;
918 struct val_cmd_set_scissor set_scissor
;
919 struct val_cmd_set_line_width set_line_width
;
920 struct val_cmd_set_depth_bias set_depth_bias
;
921 struct val_cmd_set_blend_constants set_blend_constants
;
922 struct val_cmd_set_depth_bounds set_depth_bounds
;
923 struct val_cmd_set_stencil_vals stencil_vals
;
924 struct val_cmd_bind_descriptor_sets descriptor_sets
;
925 struct val_cmd_bind_vertex_buffers vertex_buffers
;
926 struct val_cmd_bind_index_buffer index_buffer
;
927 struct val_cmd_draw draw
;
928 struct val_cmd_draw_indexed draw_indexed
;
929 struct val_cmd_draw_indirect draw_indirect
;
930 struct val_cmd_dispatch dispatch
;
931 struct val_cmd_dispatch_indirect dispatch_indirect
;
932 struct val_cmd_copy_buffer copy_buffer
;
933 struct val_cmd_copy_image copy_image
;
934 struct val_cmd_blit_image blit_image
;
935 struct val_cmd_copy_buffer_to_image buffer_to_img
;
936 struct val_cmd_copy_image_to_buffer img_to_buffer
;
937 struct val_cmd_update_buffer update_buffer
;
938 struct val_cmd_fill_buffer fill_buffer
;
939 struct val_cmd_clear_color_image clear_color_image
;
940 struct val_cmd_clear_ds_image clear_ds_image
;
941 struct val_cmd_clear_attachments clear_attachments
;
942 struct val_cmd_resolve_image resolve_image
;
943 struct val_cmd_event_set event_set
;
944 struct val_cmd_wait_events wait_events
;
945 struct val_cmd_pipeline_barrier pipeline_barrier
;
946 struct val_cmd_query_cmd query
;
947 struct val_cmd_copy_query_pool_results copy_query_pool_results
;
948 struct val_cmd_push_constants push_constants
;
949 struct val_cmd_begin_render_pass begin_render_pass
;
950 struct val_cmd_next_subpass next_subpass
;
951 struct val_cmd_execute_commands execute_commands
;
955 VkResult
val_execute_cmds(struct val_device
*device
,
956 struct val_queue
*queue
,
957 struct val_fence
*fence
,
958 struct val_cmd_buffer
*cmd_buffer
);
960 enum pipe_format
vk_format_to_pipe(VkFormat format
);
962 static inline VkImageAspectFlags
963 vk_format_aspects(VkFormat format
)
966 case VK_FORMAT_UNDEFINED
:
969 case VK_FORMAT_S8_UINT
:
970 return VK_IMAGE_ASPECT_STENCIL_BIT
;
972 case VK_FORMAT_D16_UNORM_S8_UINT
:
973 case VK_FORMAT_D24_UNORM_S8_UINT
:
974 case VK_FORMAT_D32_SFLOAT_S8_UINT
:
975 return VK_IMAGE_ASPECT_DEPTH_BIT
| VK_IMAGE_ASPECT_STENCIL_BIT
;
977 case VK_FORMAT_D16_UNORM
:
978 case VK_FORMAT_X8_D24_UNORM_PACK32
:
979 case VK_FORMAT_D32_SFLOAT
:
980 return VK_IMAGE_ASPECT_DEPTH_BIT
;
983 return VK_IMAGE_ASPECT_COLOR_BIT
;