2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
46 #include "c11/threads.h"
47 #include "compiler/shader_enums.h"
48 #include "main/macros.h"
49 #include "util/list.h"
50 #include "util/macros.h"
52 #include "vk_debug_report.h"
54 #include "drm/msm_drm.h"
55 #include "tu_descriptor_set.h"
56 #include "tu_extensions.h"
58 /* Pre-declarations needed for WSI entrypoints */
61 typedef struct xcb_connection_t xcb_connection_t
;
62 typedef uint32_t xcb_visualid_t
;
63 typedef uint32_t xcb_window_t
;
65 #include <vulkan/vk_android_native_buffer.h>
66 #include <vulkan/vk_icd.h>
67 #include <vulkan/vulkan.h>
68 #include <vulkan/vulkan_intel.h>
70 #include "tu_entrypoints.h"
73 #define MAX_VERTEX_ATTRIBS 32
75 #define MAX_VIEWPORTS 16
76 #define MAX_SCISSORS 16
77 #define MAX_DISCARD_RECTANGLES 4
78 #define MAX_PUSH_CONSTANTS_SIZE 128
79 #define MAX_PUSH_DESCRIPTORS 32
80 #define MAX_DYNAMIC_UNIFORM_BUFFERS 16
81 #define MAX_DYNAMIC_STORAGE_BUFFERS 8
82 #define MAX_DYNAMIC_BUFFERS \
83 (MAX_DYNAMIC_UNIFORM_BUFFERS + MAX_DYNAMIC_STORAGE_BUFFERS)
84 #define MAX_SAMPLES_LOG2 4
85 #define NUM_META_FS_KEYS 13
86 #define TU_MAX_DRM_DEVICES 8
89 #define NUM_DEPTH_CLEAR_PIPELINES 3
92 * This is the point we switch from using CP to compute shader
93 * for certain buffer operations.
95 #define TU_BUFFER_OPS_CS_THRESHOLD 4096
100 TU_MEM_HEAP_VRAM_CPU_ACCESS
,
108 TU_MEM_TYPE_GTT_WRITE_COMBINE
,
109 TU_MEM_TYPE_VRAM_CPU_ACCESS
,
110 TU_MEM_TYPE_GTT_CACHED
,
114 #define tu_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
116 static inline uint32_t
117 align_u32(uint32_t v
, uint32_t a
)
119 assert(a
!= 0 && a
== (a
& -a
));
120 return (v
+ a
- 1) & ~(a
- 1);
123 static inline uint32_t
124 align_u32_npot(uint32_t v
, uint32_t a
)
126 return (v
+ a
- 1) / a
* a
;
129 static inline uint64_t
130 align_u64(uint64_t v
, uint64_t a
)
132 assert(a
!= 0 && a
== (a
& -a
));
133 return (v
+ a
- 1) & ~(a
- 1);
136 static inline int32_t
137 align_i32(int32_t v
, int32_t a
)
139 assert(a
!= 0 && a
== (a
& -a
));
140 return (v
+ a
- 1) & ~(a
- 1);
143 /** Alignment must be a power of 2. */
145 tu_is_aligned(uintmax_t n
, uintmax_t a
)
147 assert(a
== (a
& -a
));
148 return (n
& (a
- 1)) == 0;
151 static inline uint32_t
152 round_up_u32(uint32_t v
, uint32_t a
)
154 return (v
+ a
- 1) / a
;
157 static inline uint64_t
158 round_up_u64(uint64_t v
, uint64_t a
)
160 return (v
+ a
- 1) / a
;
163 static inline uint32_t
164 tu_minify(uint32_t n
, uint32_t levels
)
166 if (unlikely(n
== 0))
169 return MAX2(n
>> levels
, 1);
172 tu_clamp_f(float f
, float min
, float max
)
185 tu_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
187 if (*inout_mask
& clear_mask
) {
188 *inout_mask
&= ~clear_mask
;
195 #define for_each_bit(b, dword) \
196 for (uint32_t __dword = (dword); \
197 (b) = __builtin_ffs(__dword) - 1, __dword; __dword &= ~(1 << (b)))
199 #define typed_memcpy(dest, src, count) \
201 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
202 memcpy((dest), (src), (count) * sizeof(*(src))); \
205 /* Whenever we generate an error, pass it through this function. Useful for
206 * debugging, where we can break on it. Only call at error site, not when
207 * propagating errors. Might be useful to plug in a stack trace here.
213 __vk_errorf(struct tu_instance
*instance
,
220 #define vk_error(instance, error) \
221 __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
222 #define vk_errorf(instance, error, format, ...) \
223 __vk_errorf(instance, error, __FILE__, __LINE__, format, ##__VA_ARGS__);
226 __tu_finishme(const char *file
, int line
, const char *format
, ...)
229 tu_loge(const char *format
, ...) tu_printflike(1, 2);
231 tu_loge_v(const char *format
, va_list va
);
233 tu_logi(const char *format
, ...) tu_printflike(1, 2);
235 tu_logi_v(const char *format
, va_list va
);
238 * Print a FINISHME message, including its source location.
240 #define tu_finishme(format, ...) \
242 static bool reported = false; \
244 __tu_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__); \
249 /* A non-fatal assert. Useful for debugging. */
251 #define tu_assert(x) \
253 if (unlikely(!(x))) \
254 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
260 /* Suppress -Wunused in stub functions */
261 #define tu_use_args(...) __tu_use_args(0, ##__VA_ARGS__)
263 __tu_use_args(int ignore
, ...)
269 tu_finishme("stub %s", __func__); \
273 tu_lookup_entrypoint_unchecked(const char *name
);
275 tu_lookup_entrypoint_checked(
277 uint32_t core_version
,
278 const struct tu_instance_extension_table
*instance
,
279 const struct tu_device_extension_table
*device
);
281 struct tu_physical_device
283 VK_LOADER_DATA _loader_data
;
285 struct tu_instance
*instance
;
288 char name
[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE
];
289 uint8_t driver_uuid
[VK_UUID_SIZE
];
290 uint8_t device_uuid
[VK_UUID_SIZE
];
291 uint8_t cache_uuid
[VK_UUID_SIZE
];
298 uint32_t tile_align_w
;
299 uint32_t tile_align_h
;
301 /* This is the drivers on-disk cache used as a fallback as opposed to
302 * the pipeline cache defined by apps.
304 struct disk_cache
*disk_cache
;
306 struct tu_device_extension_table supported_extensions
;
311 TU_DEBUG_STARTUP
= 1 << 0,
316 VK_LOADER_DATA _loader_data
;
318 VkAllocationCallbacks alloc
;
320 uint32_t api_version
;
321 int physical_device_count
;
322 struct tu_physical_device physical_devices
[TU_MAX_DRM_DEVICES
];
324 enum tu_debug_flags debug_flags
;
326 struct vk_debug_report_instance debug_report_callbacks
;
328 struct tu_instance_extension_table enabled_extensions
;
332 tu_instance_extension_supported(const char *name
);
334 tu_physical_device_api_version(struct tu_physical_device
*dev
);
336 tu_physical_device_extension_supported(struct tu_physical_device
*dev
,
341 struct tu_pipeline_cache
343 struct tu_device
*device
;
344 pthread_mutex_t mutex
;
348 uint32_t kernel_count
;
349 struct cache_entry
**hash_table
;
352 VkAllocationCallbacks alloc
;
355 struct tu_pipeline_key
360 tu_pipeline_cache_init(struct tu_pipeline_cache
*cache
,
361 struct tu_device
*device
);
363 tu_pipeline_cache_finish(struct tu_pipeline_cache
*cache
);
365 tu_pipeline_cache_load(struct tu_pipeline_cache
*cache
,
369 struct tu_shader_variant
;
372 tu_create_shader_variants_from_pipeline_cache(
373 struct tu_device
*device
,
374 struct tu_pipeline_cache
*cache
,
375 const unsigned char *sha1
,
376 struct tu_shader_variant
**variants
);
379 tu_pipeline_cache_insert_shaders(struct tu_device
*device
,
380 struct tu_pipeline_cache
*cache
,
381 const unsigned char *sha1
,
382 struct tu_shader_variant
**variants
,
383 const void *const *codes
,
384 const unsigned *code_sizes
);
388 VkAllocationCallbacks alloc
;
390 struct tu_pipeline_cache cache
;
394 #define TU_QUEUE_GENERAL 0
396 #define TU_MAX_QUEUE_FAMILIES 1
400 VK_LOADER_DATA _loader_data
;
401 struct tu_device
*device
;
402 uint32_t queue_family_index
;
404 VkDeviceQueueCreateFlags flags
;
406 uint32_t msm_queue_id
;
412 VK_LOADER_DATA _loader_data
;
414 VkAllocationCallbacks alloc
;
416 struct tu_instance
*instance
;
418 struct tu_meta_state meta_state
;
420 struct tu_queue
*queues
[TU_MAX_QUEUE_FAMILIES
];
421 int queue_count
[TU_MAX_QUEUE_FAMILIES
];
423 struct tu_physical_device
*physical_device
;
425 /* Backup in-memory cache to be used if the app doesn't provide one */
426 struct tu_pipeline_cache
*mem_cache
;
428 struct list_head shader_slabs
;
429 mtx_t shader_slab_mutex
;
431 struct tu_device_extension_table enabled_extensions
;
443 tu_bo_init_new(struct tu_device
*dev
, struct tu_bo
*bo
, uint64_t size
);
445 tu_bo_finish(struct tu_device
*dev
, struct tu_bo
*bo
);
447 tu_bo_map(struct tu_device
*dev
, struct tu_bo
*bo
);
449 struct tu_device_memory
454 /* for dedicated allocations */
455 struct tu_image
*image
;
456 struct tu_buffer
*buffer
;
463 struct tu_descriptor_range
469 struct tu_descriptor_set
471 const struct tu_descriptor_set_layout
*layout
;
475 uint32_t *mapped_ptr
;
476 struct tu_descriptor_range
*dynamic_descriptors
;
479 struct tu_push_descriptor_set
481 struct tu_descriptor_set set
;
485 struct tu_descriptor_pool_entry
489 struct tu_descriptor_set
*set
;
492 struct tu_descriptor_pool
495 uint64_t current_offset
;
498 uint8_t *host_memory_base
;
499 uint8_t *host_memory_ptr
;
500 uint8_t *host_memory_end
;
502 uint32_t entry_count
;
503 uint32_t max_entry_count
;
504 struct tu_descriptor_pool_entry entries
[0];
507 struct tu_descriptor_update_template_entry
509 VkDescriptorType descriptor_type
;
511 /* The number of descriptors to update */
512 uint32_t descriptor_count
;
514 /* Into mapped_ptr or dynamic_descriptors, in units of the respective array
518 /* In dwords. Not valid/used for dynamic descriptors */
521 uint32_t buffer_offset
;
523 /* Only valid for combined image samplers and samplers */
524 uint16_t has_sampler
;
530 /* For push descriptors */
531 const uint32_t *immutable_samplers
;
534 struct tu_descriptor_update_template
536 uint32_t entry_count
;
537 VkPipelineBindPoint bind_point
;
538 struct tu_descriptor_update_template_entry entry
[0];
545 VkBufferUsageFlags usage
;
546 VkBufferCreateFlags flags
;
549 enum tu_dynamic_state_bits
551 TU_DYNAMIC_VIEWPORT
= 1 << 0,
552 TU_DYNAMIC_SCISSOR
= 1 << 1,
553 TU_DYNAMIC_LINE_WIDTH
= 1 << 2,
554 TU_DYNAMIC_DEPTH_BIAS
= 1 << 3,
555 TU_DYNAMIC_BLEND_CONSTANTS
= 1 << 4,
556 TU_DYNAMIC_DEPTH_BOUNDS
= 1 << 5,
557 TU_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6,
558 TU_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7,
559 TU_DYNAMIC_STENCIL_REFERENCE
= 1 << 8,
560 TU_DYNAMIC_DISCARD_RECTANGLE
= 1 << 9,
561 TU_DYNAMIC_ALL
= (1 << 10) - 1,
564 struct tu_vertex_binding
566 struct tu_buffer
*buffer
;
570 struct tu_viewport_state
573 VkViewport viewports
[MAX_VIEWPORTS
];
576 struct tu_scissor_state
579 VkRect2D scissors
[MAX_SCISSORS
];
582 struct tu_discard_rectangle_state
585 VkRect2D rectangles
[MAX_DISCARD_RECTANGLES
];
588 struct tu_dynamic_state
591 * Bitmask of (1 << VK_DYNAMIC_STATE_*).
592 * Defines the set of saved dynamic state.
596 struct tu_viewport_state viewport
;
598 struct tu_scissor_state scissor
;
609 float blend_constants
[4];
621 } stencil_compare_mask
;
627 } stencil_write_mask
;
635 struct tu_discard_rectangle_state discard_rectangle
;
638 extern const struct tu_dynamic_state default_dynamic_state
;
641 tu_get_debug_option_name(int id
);
644 tu_get_perftest_option_name(int id
);
647 * Attachment state when recording a renderpass instance.
649 * The clear value is valid only if there exists a pending clear.
651 struct tu_attachment_state
653 VkImageAspectFlags pending_clear_aspects
;
654 uint32_t cleared_views
;
655 VkClearValue clear_value
;
656 VkImageLayout current_layout
;
659 struct tu_descriptor_state
661 struct tu_descriptor_set
*sets
[MAX_SETS
];
664 struct tu_push_descriptor_set push_set
;
666 uint32_t dynamic_buffers
[4 * MAX_DYNAMIC_BUFFERS
];
671 /* Vertex descriptors */
675 struct tu_dynamic_state dynamic
;
678 struct tu_buffer
*index_buffer
;
679 uint64_t index_offset
;
681 uint32_t max_index_count
;
684 const struct tu_render_pass
*pass
;
685 const struct tu_subpass
*subpass
;
686 const struct tu_framebuffer
*framebuffer
;
687 struct tu_attachment_state
*attachments
;
692 VkAllocationCallbacks alloc
;
693 struct list_head cmd_buffers
;
694 struct list_head free_cmd_buffers
;
695 uint32_t queue_family_index
;
698 struct tu_cmd_buffer_upload
703 struct list_head list
;
706 enum tu_cmd_buffer_status
708 TU_CMD_BUFFER_STATUS_INVALID
,
709 TU_CMD_BUFFER_STATUS_INITIAL
,
710 TU_CMD_BUFFER_STATUS_RECORDING
,
711 TU_CMD_BUFFER_STATUS_EXECUTABLE
,
712 TU_CMD_BUFFER_STATUS_PENDING
,
719 struct drm_msm_gem_submit_bo
*bo_infos
;
722 #define TU_BO_LIST_FAILED (~0)
725 tu_bo_list_init(struct tu_bo_list
*list
);
727 tu_bo_list_destroy(struct tu_bo_list
*list
);
729 tu_bo_list_reset(struct tu_bo_list
*list
);
731 tu_bo_list_add(struct tu_bo_list
*list
,
732 const struct tu_bo
*bo
,
735 tu_bo_list_merge(struct tu_bo_list
*list
, const struct tu_bo_list
*other
);
740 const struct tu_bo
*bo
;
752 /* for tu_cs_reserve_space_assert */
753 uint32_t *reserved_end
;
755 struct tu_cs_entry
*entries
;
756 uint32_t entry_count
;
757 uint32_t entry_capacity
;
761 uint32_t bo_capacity
;
766 VK_LOADER_DATA _loader_data
;
768 struct tu_device
*device
;
770 struct tu_cmd_pool
*pool
;
771 struct list_head pool_link
;
773 VkCommandBufferUsageFlags usage_flags
;
774 VkCommandBufferLevel level
;
775 enum tu_cmd_buffer_status status
;
777 struct tu_cmd_state state
;
778 struct tu_vertex_binding vertex_bindings
[MAX_VBS
];
779 uint32_t queue_family_index
;
781 uint8_t push_constants
[MAX_PUSH_CONSTANTS_SIZE
];
782 VkShaderStageFlags push_constant_stages
;
783 struct tu_descriptor_set meta_push_descriptors
;
785 struct tu_descriptor_state descriptors
[VK_PIPELINE_BIND_POINT_RANGE_SIZE
];
787 struct tu_cmd_buffer_upload upload
;
789 VkResult record_result
;
791 struct tu_bo_list bo_list
;
795 uint32_t marker_seqno
;
797 struct tu_bo scratch_bo
;
798 uint32_t scratch_seqno
;
800 /* current cs; command packets are always emitted to it */
801 struct tu_cs
*cur_cs
;
805 tu_get_memory_fd(struct tu_device
*device
,
806 struct tu_device_memory
*memory
,
810 * Takes x,y,z as exact numbers of invocations, instead of blocks.
812 * Limitations: Can't call normal dispatch functions without binding or
814 * the compute pipeline.
817 tu_unaligned_dispatch(struct tu_cmd_buffer
*cmd_buffer
,
827 struct tu_shader_module
;
829 #define TU_HASH_SHADER_IS_GEOM_COPY_SHADER (1 << 0)
830 #define TU_HASH_SHADER_SISCHED (1 << 1)
831 #define TU_HASH_SHADER_UNSAFE_MATH (1 << 2)
833 tu_hash_shaders(unsigned char *hash
,
834 const VkPipelineShaderStageCreateInfo
**stages
,
835 const struct tu_pipeline_layout
*layout
,
836 const struct tu_pipeline_key
*key
,
839 static inline gl_shader_stage
840 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
842 assert(__builtin_popcount(vk_stage
) == 1);
843 return ffs(vk_stage
) - 1;
846 static inline VkShaderStageFlagBits
847 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
849 return (1 << mesa_stage
);
852 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
854 #define tu_foreach_stage(stage, stage_bits) \
855 for (gl_shader_stage stage, \
856 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
857 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
859 struct tu_shader_module
861 struct nir_shader
*nir
;
862 unsigned char sha1
[20];
869 struct tu_device
*device
;
870 struct tu_dynamic_state dynamic_state
;
872 struct tu_pipeline_layout
*layout
;
874 bool need_indirect_descriptor_sets
;
875 VkShaderStageFlags active_stages
;
878 struct tu_userdata_info
*
879 tu_lookup_user_sgpr(struct tu_pipeline
*pipeline
,
880 gl_shader_stage stage
,
883 struct tu_shader_variant
*
884 tu_get_shader(struct tu_pipeline
*pipeline
, gl_shader_stage stage
);
886 struct tu_graphics_pipeline_create_info
890 bool db_stencil_clear
;
891 bool db_depth_disable_expclear
;
892 bool db_stencil_disable_expclear
;
893 bool db_flush_depth_inplace
;
894 bool db_flush_stencil_inplace
;
896 uint32_t custom_blend_mode
;
900 tu_graphics_pipeline_create(
902 VkPipelineCache cache
,
903 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
904 const struct tu_graphics_pipeline_create_info
*extra
,
905 const VkAllocationCallbacks
*alloc
,
906 VkPipeline
*pPipeline
);
908 struct vk_format_description
;
910 tu_translate_buffer_dataformat(const struct vk_format_description
*desc
,
913 tu_translate_buffer_numformat(const struct vk_format_description
*desc
,
916 tu_translate_colorformat(VkFormat format
);
918 tu_translate_color_numformat(VkFormat format
,
919 const struct vk_format_description
*desc
,
922 tu_colorformat_endian_swap(uint32_t colorformat
);
924 tu_translate_colorswap(VkFormat format
, bool do_endian_swap
);
926 tu_translate_dbformat(VkFormat format
);
928 tu_translate_tex_dataformat(VkFormat format
,
929 const struct vk_format_description
*desc
,
932 tu_translate_tex_numformat(VkFormat format
,
933 const struct vk_format_description
*desc
,
936 tu_format_pack_clear_color(VkFormat format
,
937 uint32_t clear_vals
[2],
938 VkClearColorValue
*value
);
940 tu_is_colorbuffer_format_supported(VkFormat format
, bool *blendable
);
942 tu_dcc_formats_compatible(VkFormat format1
, VkFormat format2
);
944 struct tu_image_level
954 /* The original VkFormat provided by the client. This may not match any
955 * of the actual surface formats.
958 VkImageAspectFlags aspects
;
959 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
960 VkImageTiling tiling
; /** VkImageCreateInfo::tiling */
961 VkImageCreateFlags flags
; /** VkImageCreateInfo::flags */
963 uint32_t level_count
;
964 uint32_t layer_count
;
970 VkDeviceSize layer_size
;
971 struct tu_image_level levels
[15];
974 unsigned queue_family_mask
;
978 /* For VK_ANDROID_native_buffer, the WSI image owns the memory, */
979 VkDeviceMemory owned_memory
;
982 const struct tu_bo
*bo
;
983 VkDeviceSize bo_offset
;
987 tu_image_queue_family_mask(const struct tu_image
*image
,
989 uint32_t queue_family
);
991 static inline uint32_t
992 tu_get_layerCount(const struct tu_image
*image
,
993 const VkImageSubresourceRange
*range
)
995 return range
->layerCount
== VK_REMAINING_ARRAY_LAYERS
996 ? image
->layer_count
- range
->baseArrayLayer
1000 static inline uint32_t
1001 tu_get_levelCount(const struct tu_image
*image
,
1002 const VkImageSubresourceRange
*range
)
1004 return range
->levelCount
== VK_REMAINING_MIP_LEVELS
1005 ? image
->level_count
- range
->baseMipLevel
1006 : range
->levelCount
;
1009 struct tu_image_view
1011 struct tu_image
*image
; /**< VkImageViewCreateInfo::image */
1013 VkImageViewType type
;
1014 VkImageAspectFlags aspect_mask
;
1016 uint32_t base_layer
;
1017 uint32_t layer_count
;
1019 uint32_t level_count
;
1020 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1022 uint32_t descriptor
[16];
1024 /* Descriptor for use as a storage image as opposed to a sampled image.
1025 * This has a few differences for cube maps (e.g. type).
1027 uint32_t storage_descriptor
[16];
1034 struct tu_image_create_info
1036 const VkImageCreateInfo
*vk_info
;
1038 bool no_metadata_planes
;
1042 tu_image_create(VkDevice _device
,
1043 const struct tu_image_create_info
*info
,
1044 const VkAllocationCallbacks
*alloc
,
1048 tu_image_from_gralloc(VkDevice device_h
,
1049 const VkImageCreateInfo
*base_info
,
1050 const VkNativeBufferANDROID
*gralloc_info
,
1051 const VkAllocationCallbacks
*alloc
,
1052 VkImage
*out_image_h
);
1055 tu_image_view_init(struct tu_image_view
*view
,
1056 struct tu_device
*device
,
1057 const VkImageViewCreateInfo
*pCreateInfo
);
1059 struct tu_buffer_view
1062 uint64_t range
; /**< VkBufferViewCreateInfo::range */
1066 tu_buffer_view_init(struct tu_buffer_view
*view
,
1067 struct tu_device
*device
,
1068 const VkBufferViewCreateInfo
*pCreateInfo
);
1070 static inline struct VkExtent3D
1071 tu_sanitize_image_extent(const VkImageType imageType
,
1072 const struct VkExtent3D imageExtent
)
1074 switch (imageType
) {
1075 case VK_IMAGE_TYPE_1D
:
1076 return (VkExtent3D
) { imageExtent
.width
, 1, 1 };
1077 case VK_IMAGE_TYPE_2D
:
1078 return (VkExtent3D
) { imageExtent
.width
, imageExtent
.height
, 1 };
1079 case VK_IMAGE_TYPE_3D
:
1082 unreachable("invalid image type");
1086 static inline struct VkOffset3D
1087 tu_sanitize_image_offset(const VkImageType imageType
,
1088 const struct VkOffset3D imageOffset
)
1090 switch (imageType
) {
1091 case VK_IMAGE_TYPE_1D
:
1092 return (VkOffset3D
) { imageOffset
.x
, 0, 0 };
1093 case VK_IMAGE_TYPE_2D
:
1094 return (VkOffset3D
) { imageOffset
.x
, imageOffset
.y
, 0 };
1095 case VK_IMAGE_TYPE_3D
:
1098 unreachable("invalid image type");
1102 struct tu_attachment_info
1104 struct tu_image_view
*attachment
;
1107 struct tu_framebuffer
1113 uint32_t attachment_count
;
1114 struct tu_attachment_info attachments
[0];
1117 struct tu_subpass_barrier
1119 VkPipelineStageFlags src_stage_mask
;
1120 VkAccessFlags src_access_mask
;
1121 VkAccessFlags dst_access_mask
;
1125 tu_subpass_barrier(struct tu_cmd_buffer
*cmd_buffer
,
1126 const struct tu_subpass_barrier
*barrier
);
1128 struct tu_subpass_attachment
1130 uint32_t attachment
;
1131 VkImageLayout layout
;
1136 uint32_t input_count
;
1137 uint32_t color_count
;
1138 struct tu_subpass_attachment
*input_attachments
;
1139 struct tu_subpass_attachment
*color_attachments
;
1140 struct tu_subpass_attachment
*resolve_attachments
;
1141 struct tu_subpass_attachment depth_stencil_attachment
;
1143 /** Subpass has at least one resolve attachment */
1146 struct tu_subpass_barrier start_barrier
;
1149 VkSampleCountFlagBits max_sample_count
;
1152 struct tu_render_pass_attachment
1156 VkAttachmentLoadOp load_op
;
1157 VkAttachmentLoadOp stencil_load_op
;
1158 VkImageLayout initial_layout
;
1159 VkImageLayout final_layout
;
1163 struct tu_render_pass
1165 uint32_t attachment_count
;
1166 uint32_t subpass_count
;
1167 struct tu_subpass_attachment
*subpass_attachments
;
1168 struct tu_render_pass_attachment
*attachments
;
1169 struct tu_subpass_barrier end_barrier
;
1170 struct tu_subpass subpasses
[0];
1174 tu_device_init_meta(struct tu_device
*device
);
1176 tu_device_finish_meta(struct tu_device
*device
);
1178 struct tu_query_pool
1181 uint32_t availability_offset
;
1185 uint32_t pipeline_stats_mask
;
1191 uint32_t temp_syncobj
;
1195 tu_set_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1196 VkPipelineBindPoint bind_point
,
1197 struct tu_descriptor_set
*set
,
1201 tu_update_descriptor_sets(struct tu_device
*device
,
1202 struct tu_cmd_buffer
*cmd_buffer
,
1203 VkDescriptorSet overrideSet
,
1204 uint32_t descriptorWriteCount
,
1205 const VkWriteDescriptorSet
*pDescriptorWrites
,
1206 uint32_t descriptorCopyCount
,
1207 const VkCopyDescriptorSet
*pDescriptorCopies
);
1210 tu_update_descriptor_set_with_template(
1211 struct tu_device
*device
,
1212 struct tu_cmd_buffer
*cmd_buffer
,
1213 struct tu_descriptor_set
*set
,
1214 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
1218 tu_meta_push_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1219 VkPipelineBindPoint pipelineBindPoint
,
1220 VkPipelineLayout _layout
,
1222 uint32_t descriptorWriteCount
,
1223 const VkWriteDescriptorSet
*pDescriptorWrites
);
1228 uint32_t temp_syncobj
;
1232 tu_drm_get_gpu_id(const struct tu_physical_device
*dev
, uint32_t *id
);
1235 tu_drm_get_gmem_size(const struct tu_physical_device
*dev
, uint32_t *size
);
1238 tu_drm_submitqueue_new(const struct tu_device
*dev
,
1240 uint32_t *queue_id
);
1243 tu_drm_submitqueue_close(const struct tu_device
*dev
, uint32_t queue_id
);
1246 tu_gem_new(const struct tu_device
*dev
, uint64_t size
, uint32_t flags
);
1248 tu_gem_close(const struct tu_device
*dev
, uint32_t gem_handle
);
1250 tu_gem_info_offset(const struct tu_device
*dev
, uint32_t gem_handle
);
1252 tu_gem_info_iova(const struct tu_device
*dev
, uint32_t gem_handle
);
1254 #define TU_DEFINE_HANDLE_CASTS(__tu_type, __VkType) \
1256 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1258 return (struct __tu_type *) _handle; \
1261 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1263 return (__VkType) _obj; \
1266 #define TU_DEFINE_NONDISP_HANDLE_CASTS(__tu_type, __VkType) \
1268 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1270 return (struct __tu_type *) (uintptr_t) _handle; \
1273 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1275 return (__VkType)(uintptr_t) _obj; \
1278 #define TU_FROM_HANDLE(__tu_type, __name, __handle) \
1279 struct __tu_type *__name = __tu_type##_from_handle(__handle)
1281 TU_DEFINE_HANDLE_CASTS(tu_cmd_buffer
, VkCommandBuffer
)
1282 TU_DEFINE_HANDLE_CASTS(tu_device
, VkDevice
)
1283 TU_DEFINE_HANDLE_CASTS(tu_instance
, VkInstance
)
1284 TU_DEFINE_HANDLE_CASTS(tu_physical_device
, VkPhysicalDevice
)
1285 TU_DEFINE_HANDLE_CASTS(tu_queue
, VkQueue
)
1287 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_cmd_pool
, VkCommandPool
)
1288 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer
, VkBuffer
)
1289 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer_view
, VkBufferView
)
1290 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_pool
, VkDescriptorPool
)
1291 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set
, VkDescriptorSet
)
1292 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set_layout
,
1293 VkDescriptorSetLayout
)
1294 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_update_template
,
1295 VkDescriptorUpdateTemplateKHR
)
1296 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_device_memory
, VkDeviceMemory
)
1297 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_fence
, VkFence
)
1298 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_event
, VkEvent
)
1299 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_framebuffer
, VkFramebuffer
)
1300 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image
, VkImage
)
1301 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image_view
, VkImageView
);
1302 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_cache
, VkPipelineCache
)
1303 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline
, VkPipeline
)
1304 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_layout
, VkPipelineLayout
)
1305 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_query_pool
, VkQueryPool
)
1306 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_render_pass
, VkRenderPass
)
1307 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_sampler
, VkSampler
)
1308 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_shader_module
, VkShaderModule
)
1309 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_semaphore
, VkSemaphore
)
1311 #endif /* TU_PRIVATE_H */