Added few more stubs so that control reaches to DestroyDevice().
[mesa.git] / src / gallium / frontends / vallium / val_private.h
1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #pragma once
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <stdbool.h>
28 #include <string.h>
29 #include <strings.h>
30 #include <pthread.h>
31 #include <assert.h>
32 #include <stdint.h>
33
34 #include "util/macros.h"
35 #include "util/list.h"
36
37 #include "compiler/shader_enums.h"
38 #include "pipe/p_screen.h"
39 #include "pipe/p_state.h"
40 #include "nir.h"
41
42 /* Pre-declarations needed for WSI entrypoints */
43 struct wl_surface;
44 struct wl_display;
45 typedef struct xcb_connection_t xcb_connection_t;
46 typedef uint32_t xcb_visualid_t;
47 typedef uint32_t xcb_window_t;
48
49 #define VK_PROTOTYPES
50 #include <vulkan/vulkan.h>
51 #include <vulkan/vk_icd.h>
52
53 #include "val_extensions.h"
54 #include "val_entrypoints.h"
55 #include "vk_object.h"
56
57 #include "wsi_common.h"
58
59 #include <assert.h>
60 #ifdef __cplusplus
61 extern "C" {
62 #endif
63
64 #define MAX_SETS 8
65 #define MAX_PUSH_CONSTANTS_SIZE 128
66
67 #define val_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
68
69 #define typed_memcpy(dest, src, count) ({ \
70 memcpy((dest), (src), (count) * sizeof(*(src))); \
71 })
72
73 int val_get_instance_entrypoint_index(const char *name);
74 int val_get_device_entrypoint_index(const char *name);
75 int val_get_physical_device_entrypoint_index(const char *name);
76
77 const char *val_get_instance_entry_name(int index);
78 const char *val_get_physical_device_entry_name(int index);
79 const char *val_get_device_entry_name(int index);
80
81 bool val_instance_entrypoint_is_enabled(int index, uint32_t core_version,
82 const struct val_instance_extension_table *instance);
83 bool val_physical_device_entrypoint_is_enabled(int index, uint32_t core_version,
84 const struct val_instance_extension_table *instance);
85 bool val_device_entrypoint_is_enabled(int index, uint32_t core_version,
86 const struct val_instance_extension_table *instance,
87 const struct val_device_extension_table *device);
88
89 void *val_lookup_entrypoint(const char *name);
90
91 #define VAL_DEFINE_HANDLE_CASTS(__val_type, __VkType) \
92 \
93 static inline struct __val_type * \
94 __val_type ## _from_handle(__VkType _handle) \
95 { \
96 return (struct __val_type *) _handle; \
97 } \
98 \
99 static inline __VkType \
100 __val_type ## _to_handle(struct __val_type *_obj) \
101 { \
102 return (__VkType) _obj; \
103 }
104
105 #define VAL_DEFINE_NONDISP_HANDLE_CASTS(__val_type, __VkType) \
106 \
107 static inline struct __val_type * \
108 __val_type ## _from_handle(__VkType _handle) \
109 { \
110 return (struct __val_type *)(uintptr_t) _handle; \
111 } \
112 \
113 static inline __VkType \
114 __val_type ## _to_handle(struct __val_type *_obj) \
115 { \
116 return (__VkType)(uintptr_t) _obj; \
117 }
118
119 #define VAL_FROM_HANDLE(__val_type, __name, __handle) \
120 struct __val_type *__name = __val_type ## _from_handle(__handle)
121
122 VAL_DEFINE_HANDLE_CASTS(val_cmd_buffer, VkCommandBuffer)
123 VAL_DEFINE_HANDLE_CASTS(val_device, VkDevice)
124 VAL_DEFINE_HANDLE_CASTS(val_instance, VkInstance)
125 VAL_DEFINE_HANDLE_CASTS(val_physical_device, VkPhysicalDevice)
126 VAL_DEFINE_HANDLE_CASTS(val_queue, VkQueue)
127
128 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_cmd_pool, VkCommandPool)
129 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_buffer, VkBuffer)
130 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_buffer_view, VkBufferView)
131 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_pool, VkDescriptorPool)
132 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_set, VkDescriptorSet)
133 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_descriptor_set_layout, VkDescriptorSetLayout)
134 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_device_memory, VkDeviceMemory)
135 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_event, VkEvent)
136 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_framebuffer, VkFramebuffer)
137 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_image, VkImage)
138 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_image_view, VkImageView);
139 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline_cache, VkPipelineCache)
140 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline, VkPipeline)
141 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_pipeline_layout, VkPipelineLayout)
142 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_query_pool, VkQueryPool)
143 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_render_pass, VkRenderPass)
144 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_sampler, VkSampler)
145 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_shader_module, VkShaderModule)
146 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_fence, VkFence);
147 VAL_DEFINE_NONDISP_HANDLE_CASTS(val_semaphore, VkSemaphore);
148
149 /* Whenever we generate an error, pass it through this function. Useful for
150 * debugging, where we can break on it. Only call at error site, not when
151 * propagating errors. Might be useful to plug in a stack trace here.
152 */
153
154 VkResult __vk_errorf(struct val_instance *instance, VkResult error, const char *file, int line, const char *format, ...);
155
156 #define VAL_DEBUG_ALL_ENTRYPOINTS (1 << 0)
157
158 #define vk_error(instance, error) __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
159 #define vk_errorf(instance, error, format, ...) __vk_errorf(instance, error, __FILE__, __LINE__, format, ## __VA_ARGS__);
160
161 void __val_finishme(const char *file, int line, const char *format, ...)
162 val_printflike(3, 4);
163
164 #define val_finishme(format, ...) \
165 __val_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
166
167 #define stub_return(v) \
168 do { \
169 val_finishme("stub %s", __func__); \
170 return (v); \
171 } while (0)
172
173 #define stub() \
174 do { \
175 val_finishme("stub %s", __func__); \
176 return; \
177 } while (0)
178
179 struct val_shader_module {
180 struct vk_object_base base;
181 uint32_t size;
182 char data[0];
183 };
184
185 static inline gl_shader_stage
186 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage)
187 {
188 assert(__builtin_popcount(vk_stage) == 1);
189 return ffs(vk_stage) - 1;
190 }
191
192 static inline VkShaderStageFlagBits
193 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage)
194 {
195 return (1 << mesa_stage);
196 }
197
198 #define VAL_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
199
200 #define val_foreach_stage(stage, stage_bits) \
201 for (gl_shader_stage stage, \
202 __tmp = (gl_shader_stage)((stage_bits) & VAL_STAGE_MASK); \
203 stage = __builtin_ffs(__tmp) - 1, __tmp; \
204 __tmp &= ~(1 << (stage)))
205
206 struct val_physical_device {
207 VK_LOADER_DATA _loader_data;
208 struct val_instance * instance;
209
210 struct pipe_loader_device *pld;
211 struct pipe_screen *pscreen;
212 uint32_t max_images;
213
214 struct wsi_device wsi_device;
215 struct val_device_extension_table supported_extensions;
216 };
217
218 struct val_instance {
219 struct vk_object_base base;
220
221 VkAllocationCallbacks alloc;
222
223 uint32_t apiVersion;
224 int physicalDeviceCount;
225 struct val_physical_device physicalDevice;
226
227 uint64_t debug_flags;
228
229 struct pipe_loader_device *devs;
230 int num_devices;
231
232 struct val_instance_extension_table enabled_extensions;
233 struct val_instance_dispatch_table dispatch;
234 struct val_physical_device_dispatch_table physical_device_dispatch;
235 struct val_device_dispatch_table device_dispatch;
236 };
237
238 VkResult val_init_wsi(struct val_physical_device *physical_device);
239 void val_finish_wsi(struct val_physical_device *physical_device);
240
241 bool val_instance_extension_supported(const char *name);
242 uint32_t val_physical_device_api_version(struct val_physical_device *dev);
243 bool val_physical_device_extension_supported(struct val_physical_device *dev,
244 const char *name);
245
246 struct val_queue {
247 VK_LOADER_DATA _loader_data;
248 VkDeviceQueueCreateFlags flags;
249 struct val_device * device;
250 struct pipe_context *ctx;
251 bool shutdown;
252 thrd_t exec_thread;
253 mtx_t m;
254 cnd_t new_work;
255 struct list_head workqueue;
256 uint32_t count;
257 };
258
259 struct val_queue_work {
260 struct list_head list;
261 uint32_t cmd_buffer_count;
262 struct val_cmd_buffer **cmd_buffers;
263 struct val_fence *fence;
264 };
265
266 struct val_pipeline_cache {
267 struct vk_object_base base;
268 struct val_device * device;
269 VkAllocationCallbacks alloc;
270 };
271
272 struct val_device {
273 struct vk_device vk;
274
275 VkAllocationCallbacks alloc;
276
277 struct val_queue queue;
278 struct val_instance * instance;
279 struct val_physical_device *physical_device;
280 struct pipe_screen *pscreen;
281
282 mtx_t fence_lock;
283 struct val_device_extension_table enabled_extensions;
284 struct val_device_dispatch_table dispatch;
285 };
286
287 void val_device_get_cache_uuid(void *uuid);
288
289 struct val_device_memory {
290 struct vk_object_base base;
291 struct pipe_memory_allocation *pmem;
292 uint32_t type_index;
293 VkDeviceSize map_size;
294 void * map;
295 };
296
297 struct val_image {
298 struct vk_object_base base;
299 VkImageType type;
300 VkFormat vk_format;
301 VkDeviceSize size;
302 uint32_t alignment;
303 struct pipe_resource *bo;
304 };
305
306 static inline uint32_t
307 val_get_layerCount(const struct val_image *image,
308 const VkImageSubresourceRange *range)
309 {
310 return range->layerCount == VK_REMAINING_ARRAY_LAYERS ?
311 image->bo->array_size - range->baseArrayLayer : range->layerCount;
312 }
313
314 static inline uint32_t
315 val_get_levelCount(const struct val_image *image,
316 const VkImageSubresourceRange *range)
317 {
318 return range->levelCount == VK_REMAINING_MIP_LEVELS ?
319 (image->bo->last_level + 1) - range->baseMipLevel : range->levelCount;
320 }
321
322 struct val_image_create_info {
323 const VkImageCreateInfo *vk_info;
324 uint32_t bind_flags;
325 uint32_t stride;
326 };
327
328 VkResult
329 val_image_create(VkDevice _device,
330 const struct val_image_create_info *create_info,
331 const VkAllocationCallbacks* alloc,
332 VkImage *pImage);
333
334 struct val_image_view {
335 struct vk_object_base base;
336 const struct val_image *image; /**< VkImageViewCreateInfo::image */
337
338 VkImageViewType view_type;
339 VkFormat format;
340 enum pipe_format pformat;
341 VkComponentMapping components;
342 VkImageSubresourceRange subresourceRange;
343
344 struct pipe_surface *surface; /* have we created a pipe surface for this? */
345 };
346
347 struct val_subpass_attachment {
348 uint32_t attachment;
349 VkImageLayout layout;
350 bool in_render_loop;
351 };
352
353 struct val_subpass {
354 uint32_t attachment_count;
355 struct val_subpass_attachment * attachments;
356
357 uint32_t input_count;
358 uint32_t color_count;
359 struct val_subpass_attachment * input_attachments;
360 struct val_subpass_attachment * color_attachments;
361 struct val_subpass_attachment * resolve_attachments;
362 struct val_subpass_attachment * depth_stencil_attachment;
363 struct val_subpass_attachment * ds_resolve_attachment;
364
365 /** Subpass has at least one color resolve attachment */
366 bool has_color_resolve;
367
368 /** Subpass has at least one color attachment */
369 bool has_color_att;
370
371 VkSampleCountFlagBits max_sample_count;
372 };
373
374 struct val_render_pass_attachment {
375 VkFormat format;
376 uint32_t samples;
377 VkAttachmentLoadOp load_op;
378 VkAttachmentLoadOp stencil_load_op;
379 VkImageLayout initial_layout;
380 VkImageLayout final_layout;
381
382 /* The subpass id in which the attachment will be used first/last. */
383 uint32_t first_subpass_idx;
384 uint32_t last_subpass_idx;
385 };
386
387 struct val_render_pass {
388 struct vk_object_base base;
389 uint32_t attachment_count;
390 uint32_t subpass_count;
391 struct val_subpass_attachment * subpass_attachments;
392 struct val_render_pass_attachment * attachments;
393 struct val_subpass subpasses[0];
394 };
395
396 struct val_sampler {
397 struct vk_object_base base;
398 VkSamplerCreateInfo create_info;
399 uint32_t state[4];
400 };
401
402 struct val_framebuffer {
403 struct vk_object_base base;
404 uint32_t width;
405 uint32_t height;
406 uint32_t layers;
407
408 uint32_t attachment_count;
409 struct val_image_view * attachments[0];
410 };
411
412 struct val_descriptor_set_binding_layout {
413 uint16_t descriptor_index;
414 /* Number of array elements in this binding */
415 VkDescriptorType type;
416 uint16_t array_size;
417 bool valid;
418
419 int16_t dynamic_index;
420 struct {
421 int16_t const_buffer_index;
422 int16_t shader_buffer_index;
423 int16_t sampler_index;
424 int16_t sampler_view_index;
425 int16_t image_index;
426 } stage[MESA_SHADER_STAGES];
427
428 /* Immutable samplers (or NULL if no immutable samplers) */
429 struct val_sampler **immutable_samplers;
430 };
431
432 struct val_descriptor_set_layout {
433 struct vk_object_base base;
434 /* Number of bindings in this descriptor set */
435 uint16_t binding_count;
436
437 /* Total size of the descriptor set with room for all array entries */
438 uint16_t size;
439
440 /* Shader stages affected by this descriptor set */
441 uint16_t shader_stages;
442
443 struct {
444 uint16_t const_buffer_count;
445 uint16_t shader_buffer_count;
446 uint16_t sampler_count;
447 uint16_t sampler_view_count;
448 uint16_t image_count;
449 } stage[MESA_SHADER_STAGES];
450
451 /* Number of dynamic offsets used by this descriptor set */
452 uint16_t dynamic_offset_count;
453
454 /* Bindings in this descriptor set */
455 struct val_descriptor_set_binding_layout binding[0];
456 };
457
458 struct val_descriptor {
459 VkDescriptorType type;
460
461 union {
462 struct {
463 struct val_image_view *image_view;
464 struct val_sampler *sampler;
465 };
466 struct {
467 uint64_t offset;
468 uint64_t range;
469 struct val_buffer *buffer;
470 } buf;
471 struct val_buffer_view *buffer_view;
472 };
473 };
474
475 struct val_descriptor_set {
476 struct vk_object_base base;
477 const struct val_descriptor_set_layout *layout;
478 struct list_head link;
479 struct val_descriptor descriptors[0];
480 };
481
482 struct val_descriptor_pool {
483 struct vk_object_base base;
484 VkDescriptorPoolCreateFlags flags;
485 uint32_t max_sets;
486
487 struct list_head sets;
488 };
489
490 VkResult
491 val_descriptor_set_create(struct val_device *device,
492 const struct val_descriptor_set_layout *layout,
493 struct val_descriptor_set **out_set);
494
495 void
496 val_descriptor_set_destroy(struct val_device *device,
497 struct val_descriptor_set *set);
498
499 struct val_pipeline_layout {
500 struct vk_object_base base;
501 struct {
502 struct val_descriptor_set_layout *layout;
503 uint32_t dynamic_offset_start;
504 } set[MAX_SETS];
505
506 uint32_t num_sets;
507 uint32_t push_constant_size;
508 struct {
509 bool has_dynamic_offsets;
510 } stage[MESA_SHADER_STAGES];
511 };
512
513 struct val_pipeline {
514 struct vk_object_base base;
515 struct val_device * device;
516 struct val_pipeline_layout * layout;
517
518 bool is_compute_pipeline;
519 bool force_min_sample;
520 nir_shader *pipeline_nir[MESA_SHADER_STAGES];
521 void *shader_cso[PIPE_SHADER_TYPES];
522 VkGraphicsPipelineCreateInfo graphics_create_info;
523 VkComputePipelineCreateInfo compute_create_info;
524 };
525
526 struct val_event {
527 struct vk_object_base base;
528 uint64_t event_storage;
529 };
530
531 struct val_fence {
532 struct vk_object_base base;
533 bool signaled;
534 struct pipe_fence_handle *handle;
535 };
536
537 struct val_semaphore {
538 struct vk_object_base base;
539 bool dummy;
540 };
541
542 struct val_buffer {
543 struct vk_object_base base;
544 struct val_device * device;
545 VkDeviceSize size;
546
547 VkBufferUsageFlags usage;
548 VkDeviceSize offset;
549
550 struct pipe_resource *bo;
551 uint64_t total_size;
552 };
553
554 struct val_buffer_view {
555 struct vk_object_base base;
556 VkFormat format;
557 enum pipe_format pformat;
558 struct val_buffer *buffer;
559 uint32_t offset;
560 uint64_t range;
561 };
562
563 struct val_query_pool {
564 struct vk_object_base base;
565 VkQueryType type;
566 uint32_t count;
567 enum pipe_query_type base_type;
568 struct pipe_query *queries[0];
569 };
570
571 struct val_cmd_pool {
572 struct vk_object_base base;
573 VkAllocationCallbacks alloc;
574 struct list_head cmd_buffers;
575 struct list_head free_cmd_buffers;
576 };
577
578
579 enum val_cmd_buffer_status {
580 VAL_CMD_BUFFER_STATUS_INVALID,
581 VAL_CMD_BUFFER_STATUS_INITIAL,
582 VAL_CMD_BUFFER_STATUS_RECORDING,
583 VAL_CMD_BUFFER_STATUS_EXECUTABLE,
584 VAL_CMD_BUFFER_STATUS_PENDING,
585 };
586
587 struct val_cmd_buffer {
588 struct vk_object_base base;
589
590 struct val_device * device;
591
592 VkCommandBufferLevel level;
593 enum val_cmd_buffer_status status;
594 struct val_cmd_pool * pool;
595 struct list_head pool_link;
596
597 struct list_head cmds;
598
599 uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
600 };
601
602 /* in same order and buffer building commands in spec. */
603 enum val_cmds {
604 VAL_CMD_BIND_PIPELINE,
605 VAL_CMD_SET_VIEWPORT,
606 VAL_CMD_SET_SCISSOR,
607 VAL_CMD_SET_LINE_WIDTH,
608 VAL_CMD_SET_DEPTH_BIAS,
609 VAL_CMD_SET_BLEND_CONSTANTS,
610 VAL_CMD_SET_DEPTH_BOUNDS,
611 VAL_CMD_SET_STENCIL_COMPARE_MASK,
612 VAL_CMD_SET_STENCIL_WRITE_MASK,
613 VAL_CMD_SET_STENCIL_REFERENCE,
614 VAL_CMD_BIND_DESCRIPTOR_SETS,
615 VAL_CMD_BIND_INDEX_BUFFER,
616 VAL_CMD_BIND_VERTEX_BUFFERS,
617 VAL_CMD_DRAW,
618 VAL_CMD_DRAW_INDEXED,
619 VAL_CMD_DRAW_INDIRECT,
620 VAL_CMD_DRAW_INDEXED_INDIRECT,
621 VAL_CMD_DISPATCH,
622 VAL_CMD_DISPATCH_INDIRECT,
623 VAL_CMD_COPY_BUFFER,
624 VAL_CMD_COPY_IMAGE,
625 VAL_CMD_BLIT_IMAGE,
626 VAL_CMD_COPY_BUFFER_TO_IMAGE,
627 VAL_CMD_COPY_IMAGE_TO_BUFFER,
628 VAL_CMD_UPDATE_BUFFER,
629 VAL_CMD_FILL_BUFFER,
630 VAL_CMD_CLEAR_COLOR_IMAGE,
631 VAL_CMD_CLEAR_DEPTH_STENCIL_IMAGE,
632 VAL_CMD_CLEAR_ATTACHMENTS,
633 VAL_CMD_RESOLVE_IMAGE,
634 VAL_CMD_SET_EVENT,
635 VAL_CMD_RESET_EVENT,
636 VAL_CMD_WAIT_EVENTS,
637 VAL_CMD_PIPELINE_BARRIER,
638 VAL_CMD_BEGIN_QUERY,
639 VAL_CMD_END_QUERY,
640 VAL_CMD_RESET_QUERY_POOL,
641 VAL_CMD_WRITE_TIMESTAMP,
642 VAL_CMD_COPY_QUERY_POOL_RESULTS,
643 VAL_CMD_PUSH_CONSTANTS,
644 VAL_CMD_BEGIN_RENDER_PASS,
645 VAL_CMD_NEXT_SUBPASS,
646 VAL_CMD_END_RENDER_PASS,
647 VAL_CMD_EXECUTE_COMMANDS,
648 };
649
650 struct val_cmd_bind_pipeline {
651 VkPipelineBindPoint bind_point;
652 struct val_pipeline *pipeline;
653 };
654
655 struct val_cmd_set_viewport {
656 uint32_t first_viewport;
657 uint32_t viewport_count;
658 VkViewport viewports[16];
659 };
660
661 struct val_cmd_set_scissor {
662 uint32_t first_scissor;
663 uint32_t scissor_count;
664 VkRect2D scissors[16];
665 };
666
667 struct val_cmd_set_line_width {
668 float line_width;
669 };
670
671 struct val_cmd_set_depth_bias {
672 float constant_factor;
673 float clamp;
674 float slope_factor;
675 };
676
677 struct val_cmd_set_blend_constants {
678 float blend_constants[4];
679 };
680
681 struct val_cmd_set_depth_bounds {
682 float min_depth;
683 float max_depth;
684 };
685
686 struct val_cmd_set_stencil_vals {
687 VkStencilFaceFlags face_mask;
688 uint32_t value;
689 };
690
691 struct val_cmd_bind_descriptor_sets {
692 VkPipelineBindPoint bind_point;
693 struct val_pipeline_layout *layout;
694 uint32_t first;
695 uint32_t count;
696 struct val_descriptor_set **sets;
697 uint32_t dynamic_offset_count;
698 const uint32_t *dynamic_offsets;
699 };
700
701 struct val_cmd_bind_index_buffer {
702 const struct val_buffer *buffer;
703 VkDeviceSize offset;
704 VkIndexType index_type;
705 };
706
707 struct val_cmd_bind_vertex_buffers {
708 uint32_t first;
709 uint32_t binding_count;
710 struct val_buffer **buffers;
711 const VkDeviceSize *offsets;
712 };
713
714 struct val_cmd_draw {
715 uint32_t vertex_count;
716 uint32_t instance_count;
717 uint32_t first_vertex;
718 uint32_t first_instance;
719 };
720
721 struct val_cmd_draw_indexed {
722 uint32_t index_count;
723 uint32_t instance_count;
724 uint32_t first_index;
725 uint32_t vertex_offset;
726 uint32_t first_instance;
727 };
728
729 struct val_cmd_draw_indirect {
730 VkDeviceSize offset;
731 struct val_buffer *buffer;
732 uint32_t draw_count;
733 uint32_t stride;
734 };
735
736 struct val_cmd_dispatch {
737 uint32_t x;
738 uint32_t y;
739 uint32_t z;
740 };
741
742 struct val_cmd_dispatch_indirect {
743 const struct val_buffer *buffer;
744 VkDeviceSize offset;
745 };
746
747 struct val_cmd_copy_buffer {
748 struct val_buffer *src;
749 struct val_buffer *dst;
750 uint32_t region_count;
751 const VkBufferCopy *regions;
752 };
753
754 struct val_cmd_copy_image {
755 struct val_image *src;
756 struct val_image *dst;
757 VkImageLayout src_layout;
758 VkImageLayout dst_layout;
759 uint32_t region_count;
760 const VkImageCopy *regions;
761 };
762
763 struct val_cmd_blit_image {
764 struct val_image *src;
765 struct val_image *dst;
766 VkImageLayout src_layout;
767 VkImageLayout dst_layout;
768 uint32_t region_count;
769 const VkImageBlit *regions;
770 VkFilter filter;
771 };
772
773 struct val_cmd_copy_buffer_to_image {
774 struct val_buffer *src;
775 struct val_image *dst;
776 VkImageLayout dst_layout;
777 uint32_t region_count;
778 const VkBufferImageCopy *regions;
779 };
780
781 struct val_cmd_copy_image_to_buffer {
782 struct val_image *src;
783 struct val_buffer *dst;
784 VkImageLayout src_layout;
785 uint32_t region_count;
786 const VkBufferImageCopy *regions;
787 };
788
789 struct val_cmd_update_buffer {
790 struct val_buffer *buffer;
791 VkDeviceSize offset;
792 VkDeviceSize data_size;
793 char data[0];
794 };
795
796 struct val_cmd_fill_buffer {
797 struct val_buffer *buffer;
798 VkDeviceSize offset;
799 VkDeviceSize fill_size;
800 uint32_t data;
801 };
802
803 struct val_cmd_clear_color_image {
804 struct val_image *image;
805 VkImageLayout layout;
806 VkClearColorValue clear_val;
807 uint32_t range_count;
808 VkImageSubresourceRange *ranges;
809 };
810
811 struct val_cmd_clear_ds_image {
812 struct val_image *image;
813 VkImageLayout layout;
814 VkClearDepthStencilValue clear_val;
815 uint32_t range_count;
816 VkImageSubresourceRange *ranges;
817 };
818
819 struct val_cmd_clear_attachments {
820 uint32_t attachment_count;
821 VkClearAttachment *attachments;
822 uint32_t rect_count;
823 VkClearRect *rects;
824 };
825
826 struct val_cmd_resolve_image {
827 struct val_image *src;
828 struct val_image *dst;
829 VkImageLayout src_layout;
830 VkImageLayout dst_layout;
831 uint32_t region_count;
832 VkImageResolve *regions;
833 };
834
835 struct val_cmd_event_set {
836 struct val_event *event;
837 bool value;
838 bool flush;
839 };
840
841 struct val_cmd_wait_events {
842 uint32_t event_count;
843 struct val_event **events;
844 VkPipelineStageFlags src_stage_mask;
845 VkPipelineStageFlags dst_stage_mask;
846 uint32_t memory_barrier_count;
847 VkMemoryBarrier *memory_barriers;
848 uint32_t buffer_memory_barrier_count;
849 VkBufferMemoryBarrier *buffer_memory_barriers;
850 uint32_t image_memory_barrier_count;
851 VkImageMemoryBarrier *image_memory_barriers;
852 };
853
854 struct val_cmd_pipeline_barrier {
855 VkPipelineStageFlags src_stage_mask;
856 VkPipelineStageFlags dst_stage_mask;
857 bool by_region;
858 uint32_t memory_barrier_count;
859 VkMemoryBarrier *memory_barriers;
860 uint32_t buffer_memory_barrier_count;
861 VkBufferMemoryBarrier *buffer_memory_barriers;
862 uint32_t image_memory_barrier_count;
863 VkImageMemoryBarrier *image_memory_barriers;
864 };
865
866 struct val_cmd_query_cmd {
867 struct val_query_pool *pool;
868 uint32_t query;
869 uint32_t index;
870 bool precise;
871 bool flush;
872 };
873
874 struct val_cmd_copy_query_pool_results {
875 struct val_query_pool *pool;
876 uint32_t first_query;
877 uint32_t query_count;
878 struct val_buffer *dst;
879 VkDeviceSize dst_offset;
880 VkDeviceSize stride;
881 VkQueryResultFlags flags;
882 };
883
884 struct val_cmd_push_constants {
885 VkShaderStageFlags stage;
886 uint32_t offset;
887 uint32_t size;
888 uint32_t val[1];
889 };
890
891 struct val_attachment_state {
892 VkImageAspectFlags pending_clear_aspects;
893 VkClearValue clear_value;
894 };
895
896 struct val_cmd_begin_render_pass {
897 struct val_framebuffer *framebuffer;
898 struct val_render_pass *render_pass;
899 VkRect2D render_area;
900 struct val_attachment_state *attachments;
901 };
902
903 struct val_cmd_next_subpass {
904 VkSubpassContents contents;
905 };
906
907 struct val_cmd_execute_commands {
908 uint32_t command_buffer_count;
909 struct val_cmd_buffer *cmd_buffers[0];
910 };
911
912 struct val_cmd_buffer_entry {
913 struct list_head cmd_link;
914 uint32_t cmd_type;
915 union {
916 struct val_cmd_bind_pipeline pipeline;
917 struct val_cmd_set_viewport set_viewport;
918 struct val_cmd_set_scissor set_scissor;
919 struct val_cmd_set_line_width set_line_width;
920 struct val_cmd_set_depth_bias set_depth_bias;
921 struct val_cmd_set_blend_constants set_blend_constants;
922 struct val_cmd_set_depth_bounds set_depth_bounds;
923 struct val_cmd_set_stencil_vals stencil_vals;
924 struct val_cmd_bind_descriptor_sets descriptor_sets;
925 struct val_cmd_bind_vertex_buffers vertex_buffers;
926 struct val_cmd_bind_index_buffer index_buffer;
927 struct val_cmd_draw draw;
928 struct val_cmd_draw_indexed draw_indexed;
929 struct val_cmd_draw_indirect draw_indirect;
930 struct val_cmd_dispatch dispatch;
931 struct val_cmd_dispatch_indirect dispatch_indirect;
932 struct val_cmd_copy_buffer copy_buffer;
933 struct val_cmd_copy_image copy_image;
934 struct val_cmd_blit_image blit_image;
935 struct val_cmd_copy_buffer_to_image buffer_to_img;
936 struct val_cmd_copy_image_to_buffer img_to_buffer;
937 struct val_cmd_update_buffer update_buffer;
938 struct val_cmd_fill_buffer fill_buffer;
939 struct val_cmd_clear_color_image clear_color_image;
940 struct val_cmd_clear_ds_image clear_ds_image;
941 struct val_cmd_clear_attachments clear_attachments;
942 struct val_cmd_resolve_image resolve_image;
943 struct val_cmd_event_set event_set;
944 struct val_cmd_wait_events wait_events;
945 struct val_cmd_pipeline_barrier pipeline_barrier;
946 struct val_cmd_query_cmd query;
947 struct val_cmd_copy_query_pool_results copy_query_pool_results;
948 struct val_cmd_push_constants push_constants;
949 struct val_cmd_begin_render_pass begin_render_pass;
950 struct val_cmd_next_subpass next_subpass;
951 struct val_cmd_execute_commands execute_commands;
952 } u;
953 };
954
955 VkResult val_execute_cmds(struct val_device *device,
956 struct val_queue *queue,
957 struct val_fence *fence,
958 struct val_cmd_buffer *cmd_buffer);
959
960 enum pipe_format vk_format_to_pipe(VkFormat format);
961
962 static inline VkImageAspectFlags
963 vk_format_aspects(VkFormat format)
964 {
965 switch (format) {
966 case VK_FORMAT_UNDEFINED:
967 return 0;
968
969 case VK_FORMAT_S8_UINT:
970 return VK_IMAGE_ASPECT_STENCIL_BIT;
971
972 case VK_FORMAT_D16_UNORM_S8_UINT:
973 case VK_FORMAT_D24_UNORM_S8_UINT:
974 case VK_FORMAT_D32_SFLOAT_S8_UINT:
975 return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
976
977 case VK_FORMAT_D16_UNORM:
978 case VK_FORMAT_X8_D24_UNORM_PACK32:
979 case VK_FORMAT_D32_SFLOAT:
980 return VK_IMAGE_ASPECT_DEPTH_BIT;
981
982 default:
983 return VK_IMAGE_ASPECT_COLOR_BIT;
984 }
985 }
986
987 #ifdef __cplusplus
988 }
989 #endif