2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
46 #include "c11/threads.h"
47 #include "compiler/shader_enums.h"
48 #include "main/macros.h"
49 #include "util/list.h"
50 #include "util/macros.h"
52 #include "vk_debug_report.h"
54 #include "drm/msm_drm.h"
55 #include "tu_descriptor_set.h"
56 #include "tu_extensions.h"
58 /* Pre-declarations needed for WSI entrypoints */
61 typedef struct xcb_connection_t xcb_connection_t
;
62 typedef uint32_t xcb_visualid_t
;
63 typedef uint32_t xcb_window_t
;
65 #include <vulkan/vk_android_native_buffer.h>
66 #include <vulkan/vk_icd.h>
67 #include <vulkan/vulkan.h>
68 #include <vulkan/vulkan_intel.h>
70 #include "tu_entrypoints.h"
73 #define MAX_VERTEX_ATTRIBS 32
75 #define MAX_VIEWPORTS 16
76 #define MAX_SCISSORS 16
77 #define MAX_DISCARD_RECTANGLES 4
78 #define MAX_PUSH_CONSTANTS_SIZE 128
79 #define MAX_PUSH_DESCRIPTORS 32
80 #define MAX_DYNAMIC_UNIFORM_BUFFERS 16
81 #define MAX_DYNAMIC_STORAGE_BUFFERS 8
82 #define MAX_DYNAMIC_BUFFERS \
83 (MAX_DYNAMIC_UNIFORM_BUFFERS + MAX_DYNAMIC_STORAGE_BUFFERS)
84 #define MAX_SAMPLES_LOG2 4
85 #define NUM_META_FS_KEYS 13
86 #define TU_MAX_DRM_DEVICES 8
89 #define NUM_DEPTH_CLEAR_PIPELINES 3
92 * This is the point we switch from using CP to compute shader
93 * for certain buffer operations.
95 #define TU_BUFFER_OPS_CS_THRESHOLD 4096
100 TU_MEM_HEAP_VRAM_CPU_ACCESS
,
108 TU_MEM_TYPE_GTT_WRITE_COMBINE
,
109 TU_MEM_TYPE_VRAM_CPU_ACCESS
,
110 TU_MEM_TYPE_GTT_CACHED
,
114 #define tu_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
116 static inline uint32_t
117 align_u32(uint32_t v
, uint32_t a
)
119 assert(a
!= 0 && a
== (a
& -a
));
120 return (v
+ a
- 1) & ~(a
- 1);
123 static inline uint32_t
124 align_u32_npot(uint32_t v
, uint32_t a
)
126 return (v
+ a
- 1) / a
* a
;
129 static inline uint64_t
130 align_u64(uint64_t v
, uint64_t a
)
132 assert(a
!= 0 && a
== (a
& -a
));
133 return (v
+ a
- 1) & ~(a
- 1);
136 static inline int32_t
137 align_i32(int32_t v
, int32_t a
)
139 assert(a
!= 0 && a
== (a
& -a
));
140 return (v
+ a
- 1) & ~(a
- 1);
143 /** Alignment must be a power of 2. */
145 tu_is_aligned(uintmax_t n
, uintmax_t a
)
147 assert(a
== (a
& -a
));
148 return (n
& (a
- 1)) == 0;
151 static inline uint32_t
152 round_up_u32(uint32_t v
, uint32_t a
)
154 return (v
+ a
- 1) / a
;
157 static inline uint64_t
158 round_up_u64(uint64_t v
, uint64_t a
)
160 return (v
+ a
- 1) / a
;
163 static inline uint32_t
164 tu_minify(uint32_t n
, uint32_t levels
)
166 if (unlikely(n
== 0))
169 return MAX2(n
>> levels
, 1);
172 tu_clamp_f(float f
, float min
, float max
)
185 tu_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
187 if (*inout_mask
& clear_mask
) {
188 *inout_mask
&= ~clear_mask
;
195 #define for_each_bit(b, dword) \
196 for (uint32_t __dword = (dword); \
197 (b) = __builtin_ffs(__dword) - 1, __dword; __dword &= ~(1 << (b)))
199 #define typed_memcpy(dest, src, count) \
201 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
202 memcpy((dest), (src), (count) * sizeof(*(src))); \
205 /* Whenever we generate an error, pass it through this function. Useful for
206 * debugging, where we can break on it. Only call at error site, not when
207 * propagating errors. Might be useful to plug in a stack trace here.
213 __vk_errorf(struct tu_instance
*instance
,
220 #define vk_error(instance, error) \
221 __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
222 #define vk_errorf(instance, error, format, ...) \
223 __vk_errorf(instance, error, __FILE__, __LINE__, format, ##__VA_ARGS__);
226 __tu_finishme(const char *file
, int line
, const char *format
, ...)
229 tu_loge(const char *format
, ...) tu_printflike(1, 2);
231 tu_loge_v(const char *format
, va_list va
);
233 tu_logi(const char *format
, ...) tu_printflike(1, 2);
235 tu_logi_v(const char *format
, va_list va
);
238 * Print a FINISHME message, including its source location.
240 #define tu_finishme(format, ...) \
242 static bool reported = false; \
244 __tu_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__); \
249 /* A non-fatal assert. Useful for debugging. */
251 #define tu_assert(x) \
253 if (unlikely(!(x))) \
254 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
260 /* Suppress -Wunused in stub functions */
261 #define tu_use_args(...) __tu_use_args(0, ##__VA_ARGS__)
263 __tu_use_args(int ignore
, ...)
269 tu_finishme("stub %s", __func__); \
273 tu_lookup_entrypoint_unchecked(const char *name
);
275 tu_lookup_entrypoint_checked(
277 uint32_t core_version
,
278 const struct tu_instance_extension_table
*instance
,
279 const struct tu_device_extension_table
*device
);
281 struct tu_physical_device
283 VK_LOADER_DATA _loader_data
;
285 struct tu_instance
*instance
;
288 char name
[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE
];
289 uint8_t driver_uuid
[VK_UUID_SIZE
];
290 uint8_t device_uuid
[VK_UUID_SIZE
];
291 uint8_t cache_uuid
[VK_UUID_SIZE
];
299 /* This is the drivers on-disk cache used as a fallback as opposed to
300 * the pipeline cache defined by apps.
302 struct disk_cache
*disk_cache
;
304 struct tu_device_extension_table supported_extensions
;
309 TU_DEBUG_STARTUP
= 1 << 0,
314 VK_LOADER_DATA _loader_data
;
316 VkAllocationCallbacks alloc
;
318 uint32_t api_version
;
319 int physical_device_count
;
320 struct tu_physical_device physical_devices
[TU_MAX_DRM_DEVICES
];
322 enum tu_debug_flags debug_flags
;
324 struct vk_debug_report_instance debug_report_callbacks
;
326 struct tu_instance_extension_table enabled_extensions
;
330 tu_instance_extension_supported(const char *name
);
332 tu_physical_device_api_version(struct tu_physical_device
*dev
);
334 tu_physical_device_extension_supported(struct tu_physical_device
*dev
,
339 struct tu_pipeline_cache
341 struct tu_device
*device
;
342 pthread_mutex_t mutex
;
346 uint32_t kernel_count
;
347 struct cache_entry
**hash_table
;
350 VkAllocationCallbacks alloc
;
353 struct tu_pipeline_key
358 tu_pipeline_cache_init(struct tu_pipeline_cache
*cache
,
359 struct tu_device
*device
);
361 tu_pipeline_cache_finish(struct tu_pipeline_cache
*cache
);
363 tu_pipeline_cache_load(struct tu_pipeline_cache
*cache
,
367 struct tu_shader_variant
;
370 tu_create_shader_variants_from_pipeline_cache(
371 struct tu_device
*device
,
372 struct tu_pipeline_cache
*cache
,
373 const unsigned char *sha1
,
374 struct tu_shader_variant
**variants
);
377 tu_pipeline_cache_insert_shaders(struct tu_device
*device
,
378 struct tu_pipeline_cache
*cache
,
379 const unsigned char *sha1
,
380 struct tu_shader_variant
**variants
,
381 const void *const *codes
,
382 const unsigned *code_sizes
);
386 VkAllocationCallbacks alloc
;
388 struct tu_pipeline_cache cache
;
392 #define TU_QUEUE_GENERAL 0
394 #define TU_MAX_QUEUE_FAMILIES 1
398 VK_LOADER_DATA _loader_data
;
399 struct tu_device
*device
;
400 uint32_t queue_family_index
;
402 VkDeviceQueueCreateFlags flags
;
404 uint32_t msm_queue_id
;
410 VK_LOADER_DATA _loader_data
;
412 VkAllocationCallbacks alloc
;
414 struct tu_instance
*instance
;
416 struct tu_meta_state meta_state
;
418 struct tu_queue
*queues
[TU_MAX_QUEUE_FAMILIES
];
419 int queue_count
[TU_MAX_QUEUE_FAMILIES
];
421 struct tu_physical_device
*physical_device
;
423 /* Backup in-memory cache to be used if the app doesn't provide one */
424 struct tu_pipeline_cache
*mem_cache
;
426 struct list_head shader_slabs
;
427 mtx_t shader_slab_mutex
;
429 struct tu_device_extension_table enabled_extensions
;
441 tu_bo_init_new(struct tu_device
*dev
, struct tu_bo
*bo
, uint64_t size
);
443 tu_bo_finish(struct tu_device
*dev
, struct tu_bo
*bo
);
445 tu_bo_map(struct tu_device
*dev
, struct tu_bo
*bo
);
447 struct tu_device_memory
452 /* for dedicated allocations */
453 struct tu_image
*image
;
454 struct tu_buffer
*buffer
;
461 struct tu_descriptor_range
467 struct tu_descriptor_set
469 const struct tu_descriptor_set_layout
*layout
;
473 uint32_t *mapped_ptr
;
474 struct tu_descriptor_range
*dynamic_descriptors
;
477 struct tu_push_descriptor_set
479 struct tu_descriptor_set set
;
483 struct tu_descriptor_pool_entry
487 struct tu_descriptor_set
*set
;
490 struct tu_descriptor_pool
493 uint64_t current_offset
;
496 uint8_t *host_memory_base
;
497 uint8_t *host_memory_ptr
;
498 uint8_t *host_memory_end
;
500 uint32_t entry_count
;
501 uint32_t max_entry_count
;
502 struct tu_descriptor_pool_entry entries
[0];
505 struct tu_descriptor_update_template_entry
507 VkDescriptorType descriptor_type
;
509 /* The number of descriptors to update */
510 uint32_t descriptor_count
;
512 /* Into mapped_ptr or dynamic_descriptors, in units of the respective array
516 /* In dwords. Not valid/used for dynamic descriptors */
519 uint32_t buffer_offset
;
521 /* Only valid for combined image samplers and samplers */
522 uint16_t has_sampler
;
528 /* For push descriptors */
529 const uint32_t *immutable_samplers
;
532 struct tu_descriptor_update_template
534 uint32_t entry_count
;
535 VkPipelineBindPoint bind_point
;
536 struct tu_descriptor_update_template_entry entry
[0];
543 VkBufferUsageFlags usage
;
544 VkBufferCreateFlags flags
;
547 enum tu_dynamic_state_bits
549 TU_DYNAMIC_VIEWPORT
= 1 << 0,
550 TU_DYNAMIC_SCISSOR
= 1 << 1,
551 TU_DYNAMIC_LINE_WIDTH
= 1 << 2,
552 TU_DYNAMIC_DEPTH_BIAS
= 1 << 3,
553 TU_DYNAMIC_BLEND_CONSTANTS
= 1 << 4,
554 TU_DYNAMIC_DEPTH_BOUNDS
= 1 << 5,
555 TU_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6,
556 TU_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7,
557 TU_DYNAMIC_STENCIL_REFERENCE
= 1 << 8,
558 TU_DYNAMIC_DISCARD_RECTANGLE
= 1 << 9,
559 TU_DYNAMIC_ALL
= (1 << 10) - 1,
562 struct tu_vertex_binding
564 struct tu_buffer
*buffer
;
568 struct tu_viewport_state
571 VkViewport viewports
[MAX_VIEWPORTS
];
574 struct tu_scissor_state
577 VkRect2D scissors
[MAX_SCISSORS
];
580 struct tu_discard_rectangle_state
583 VkRect2D rectangles
[MAX_DISCARD_RECTANGLES
];
586 struct tu_dynamic_state
589 * Bitmask of (1 << VK_DYNAMIC_STATE_*).
590 * Defines the set of saved dynamic state.
594 struct tu_viewport_state viewport
;
596 struct tu_scissor_state scissor
;
607 float blend_constants
[4];
619 } stencil_compare_mask
;
625 } stencil_write_mask
;
633 struct tu_discard_rectangle_state discard_rectangle
;
636 extern const struct tu_dynamic_state default_dynamic_state
;
639 tu_get_debug_option_name(int id
);
642 tu_get_perftest_option_name(int id
);
645 * Attachment state when recording a renderpass instance.
647 * The clear value is valid only if there exists a pending clear.
649 struct tu_attachment_state
651 VkImageAspectFlags pending_clear_aspects
;
652 uint32_t cleared_views
;
653 VkClearValue clear_value
;
654 VkImageLayout current_layout
;
657 struct tu_descriptor_state
659 struct tu_descriptor_set
*sets
[MAX_SETS
];
662 struct tu_push_descriptor_set push_set
;
664 uint32_t dynamic_buffers
[4 * MAX_DYNAMIC_BUFFERS
];
669 /* Vertex descriptors */
673 struct tu_dynamic_state dynamic
;
676 struct tu_buffer
*index_buffer
;
677 uint64_t index_offset
;
679 uint32_t max_index_count
;
682 const struct tu_render_pass
*pass
;
683 const struct tu_subpass
*subpass
;
684 const struct tu_framebuffer
*framebuffer
;
685 struct tu_attachment_state
*attachments
;
690 VkAllocationCallbacks alloc
;
691 struct list_head cmd_buffers
;
692 struct list_head free_cmd_buffers
;
693 uint32_t queue_family_index
;
696 struct tu_cmd_buffer_upload
701 struct list_head list
;
704 enum tu_cmd_buffer_status
706 TU_CMD_BUFFER_STATUS_INVALID
,
707 TU_CMD_BUFFER_STATUS_INITIAL
,
708 TU_CMD_BUFFER_STATUS_RECORDING
,
709 TU_CMD_BUFFER_STATUS_EXECUTABLE
,
710 TU_CMD_BUFFER_STATUS_PENDING
,
717 struct drm_msm_gem_submit_bo
*bo_infos
;
720 #define TU_BO_LIST_FAILED (~0)
723 tu_bo_list_init(struct tu_bo_list
*list
);
725 tu_bo_list_destroy(struct tu_bo_list
*list
);
727 tu_bo_list_reset(struct tu_bo_list
*list
);
729 tu_bo_list_add(struct tu_bo_list
*list
,
730 const struct tu_bo
*bo
,
733 tu_bo_list_merge(struct tu_bo_list
*list
, const struct tu_bo_list
*other
);
738 const struct tu_bo
*bo
;
750 /* for tu_cs_reserve_space_assert */
751 uint32_t *reserved_end
;
753 struct tu_cs_entry
*entries
;
754 uint32_t entry_count
;
755 uint32_t entry_capacity
;
759 uint32_t bo_capacity
;
764 VK_LOADER_DATA _loader_data
;
766 struct tu_device
*device
;
768 struct tu_cmd_pool
*pool
;
769 struct list_head pool_link
;
771 VkCommandBufferUsageFlags usage_flags
;
772 VkCommandBufferLevel level
;
773 enum tu_cmd_buffer_status status
;
775 struct tu_cmd_state state
;
776 struct tu_vertex_binding vertex_bindings
[MAX_VBS
];
777 uint32_t queue_family_index
;
779 uint8_t push_constants
[MAX_PUSH_CONSTANTS_SIZE
];
780 VkShaderStageFlags push_constant_stages
;
781 struct tu_descriptor_set meta_push_descriptors
;
783 struct tu_descriptor_state descriptors
[VK_PIPELINE_BIND_POINT_RANGE_SIZE
];
785 struct tu_cmd_buffer_upload upload
;
787 VkResult record_result
;
789 struct tu_bo_list bo_list
;
793 uint32_t marker_seqno
;
795 struct tu_bo scratch_bo
;
796 uint32_t scratch_seqno
;
798 /* current cs; command packets are always emitted to it */
799 struct tu_cs
*cur_cs
;
803 tu_get_memory_fd(struct tu_device
*device
,
804 struct tu_device_memory
*memory
,
808 * Takes x,y,z as exact numbers of invocations, instead of blocks.
810 * Limitations: Can't call normal dispatch functions without binding or
812 * the compute pipeline.
815 tu_unaligned_dispatch(struct tu_cmd_buffer
*cmd_buffer
,
825 struct tu_shader_module
;
827 #define TU_HASH_SHADER_IS_GEOM_COPY_SHADER (1 << 0)
828 #define TU_HASH_SHADER_SISCHED (1 << 1)
829 #define TU_HASH_SHADER_UNSAFE_MATH (1 << 2)
831 tu_hash_shaders(unsigned char *hash
,
832 const VkPipelineShaderStageCreateInfo
**stages
,
833 const struct tu_pipeline_layout
*layout
,
834 const struct tu_pipeline_key
*key
,
837 static inline gl_shader_stage
838 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
840 assert(__builtin_popcount(vk_stage
) == 1);
841 return ffs(vk_stage
) - 1;
844 static inline VkShaderStageFlagBits
845 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
847 return (1 << mesa_stage
);
850 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
852 #define tu_foreach_stage(stage, stage_bits) \
853 for (gl_shader_stage stage, \
854 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
855 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
857 struct tu_shader_module
859 struct nir_shader
*nir
;
860 unsigned char sha1
[20];
867 struct tu_device
*device
;
868 struct tu_dynamic_state dynamic_state
;
870 struct tu_pipeline_layout
*layout
;
872 bool need_indirect_descriptor_sets
;
873 VkShaderStageFlags active_stages
;
876 struct tu_userdata_info
*
877 tu_lookup_user_sgpr(struct tu_pipeline
*pipeline
,
878 gl_shader_stage stage
,
881 struct tu_shader_variant
*
882 tu_get_shader(struct tu_pipeline
*pipeline
, gl_shader_stage stage
);
884 struct tu_graphics_pipeline_create_info
888 bool db_stencil_clear
;
889 bool db_depth_disable_expclear
;
890 bool db_stencil_disable_expclear
;
891 bool db_flush_depth_inplace
;
892 bool db_flush_stencil_inplace
;
894 uint32_t custom_blend_mode
;
898 tu_graphics_pipeline_create(
900 VkPipelineCache cache
,
901 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
902 const struct tu_graphics_pipeline_create_info
*extra
,
903 const VkAllocationCallbacks
*alloc
,
904 VkPipeline
*pPipeline
);
906 struct vk_format_description
;
908 tu_translate_buffer_dataformat(const struct vk_format_description
*desc
,
911 tu_translate_buffer_numformat(const struct vk_format_description
*desc
,
914 tu_translate_colorformat(VkFormat format
);
916 tu_translate_color_numformat(VkFormat format
,
917 const struct vk_format_description
*desc
,
920 tu_colorformat_endian_swap(uint32_t colorformat
);
922 tu_translate_colorswap(VkFormat format
, bool do_endian_swap
);
924 tu_translate_dbformat(VkFormat format
);
926 tu_translate_tex_dataformat(VkFormat format
,
927 const struct vk_format_description
*desc
,
930 tu_translate_tex_numformat(VkFormat format
,
931 const struct vk_format_description
*desc
,
934 tu_format_pack_clear_color(VkFormat format
,
935 uint32_t clear_vals
[2],
936 VkClearColorValue
*value
);
938 tu_is_colorbuffer_format_supported(VkFormat format
, bool *blendable
);
940 tu_dcc_formats_compatible(VkFormat format1
, VkFormat format2
);
942 struct tu_image_level
952 /* The original VkFormat provided by the client. This may not match any
953 * of the actual surface formats.
956 VkImageAspectFlags aspects
;
957 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
958 VkImageTiling tiling
; /** VkImageCreateInfo::tiling */
959 VkImageCreateFlags flags
; /** VkImageCreateInfo::flags */
961 uint32_t level_count
;
962 uint32_t layer_count
;
968 VkDeviceSize layer_size
;
969 struct tu_image_level levels
[15];
972 unsigned queue_family_mask
;
976 /* For VK_ANDROID_native_buffer, the WSI image owns the memory, */
977 VkDeviceMemory owned_memory
;
980 const struct tu_bo
*bo
;
981 VkDeviceSize bo_offset
;
985 tu_image_queue_family_mask(const struct tu_image
*image
,
987 uint32_t queue_family
);
989 static inline uint32_t
990 tu_get_layerCount(const struct tu_image
*image
,
991 const VkImageSubresourceRange
*range
)
993 return range
->layerCount
== VK_REMAINING_ARRAY_LAYERS
994 ? image
->layer_count
- range
->baseArrayLayer
998 static inline uint32_t
999 tu_get_levelCount(const struct tu_image
*image
,
1000 const VkImageSubresourceRange
*range
)
1002 return range
->levelCount
== VK_REMAINING_MIP_LEVELS
1003 ? image
->level_count
- range
->baseMipLevel
1004 : range
->levelCount
;
1007 struct tu_image_view
1009 struct tu_image
*image
; /**< VkImageViewCreateInfo::image */
1011 VkImageViewType type
;
1012 VkImageAspectFlags aspect_mask
;
1014 uint32_t base_layer
;
1015 uint32_t layer_count
;
1017 uint32_t level_count
;
1018 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1020 uint32_t descriptor
[16];
1022 /* Descriptor for use as a storage image as opposed to a sampled image.
1023 * This has a few differences for cube maps (e.g. type).
1025 uint32_t storage_descriptor
[16];
1032 struct tu_image_create_info
1034 const VkImageCreateInfo
*vk_info
;
1036 bool no_metadata_planes
;
1040 tu_image_create(VkDevice _device
,
1041 const struct tu_image_create_info
*info
,
1042 const VkAllocationCallbacks
*alloc
,
1046 tu_image_from_gralloc(VkDevice device_h
,
1047 const VkImageCreateInfo
*base_info
,
1048 const VkNativeBufferANDROID
*gralloc_info
,
1049 const VkAllocationCallbacks
*alloc
,
1050 VkImage
*out_image_h
);
1053 tu_image_view_init(struct tu_image_view
*view
,
1054 struct tu_device
*device
,
1055 const VkImageViewCreateInfo
*pCreateInfo
);
1057 struct tu_buffer_view
1060 uint64_t range
; /**< VkBufferViewCreateInfo::range */
1064 tu_buffer_view_init(struct tu_buffer_view
*view
,
1065 struct tu_device
*device
,
1066 const VkBufferViewCreateInfo
*pCreateInfo
);
1068 static inline struct VkExtent3D
1069 tu_sanitize_image_extent(const VkImageType imageType
,
1070 const struct VkExtent3D imageExtent
)
1072 switch (imageType
) {
1073 case VK_IMAGE_TYPE_1D
:
1074 return (VkExtent3D
) { imageExtent
.width
, 1, 1 };
1075 case VK_IMAGE_TYPE_2D
:
1076 return (VkExtent3D
) { imageExtent
.width
, imageExtent
.height
, 1 };
1077 case VK_IMAGE_TYPE_3D
:
1080 unreachable("invalid image type");
1084 static inline struct VkOffset3D
1085 tu_sanitize_image_offset(const VkImageType imageType
,
1086 const struct VkOffset3D imageOffset
)
1088 switch (imageType
) {
1089 case VK_IMAGE_TYPE_1D
:
1090 return (VkOffset3D
) { imageOffset
.x
, 0, 0 };
1091 case VK_IMAGE_TYPE_2D
:
1092 return (VkOffset3D
) { imageOffset
.x
, imageOffset
.y
, 0 };
1093 case VK_IMAGE_TYPE_3D
:
1096 unreachable("invalid image type");
1100 struct tu_attachment_info
1102 struct tu_image_view
*attachment
;
1105 struct tu_framebuffer
1111 uint32_t attachment_count
;
1112 struct tu_attachment_info attachments
[0];
1115 struct tu_subpass_barrier
1117 VkPipelineStageFlags src_stage_mask
;
1118 VkAccessFlags src_access_mask
;
1119 VkAccessFlags dst_access_mask
;
1123 tu_subpass_barrier(struct tu_cmd_buffer
*cmd_buffer
,
1124 const struct tu_subpass_barrier
*barrier
);
1126 struct tu_subpass_attachment
1128 uint32_t attachment
;
1129 VkImageLayout layout
;
1134 uint32_t input_count
;
1135 uint32_t color_count
;
1136 struct tu_subpass_attachment
*input_attachments
;
1137 struct tu_subpass_attachment
*color_attachments
;
1138 struct tu_subpass_attachment
*resolve_attachments
;
1139 struct tu_subpass_attachment depth_stencil_attachment
;
1141 /** Subpass has at least one resolve attachment */
1144 struct tu_subpass_barrier start_barrier
;
1147 VkSampleCountFlagBits max_sample_count
;
1150 struct tu_render_pass_attachment
1154 VkAttachmentLoadOp load_op
;
1155 VkAttachmentLoadOp stencil_load_op
;
1156 VkImageLayout initial_layout
;
1157 VkImageLayout final_layout
;
1161 struct tu_render_pass
1163 uint32_t attachment_count
;
1164 uint32_t subpass_count
;
1165 struct tu_subpass_attachment
*subpass_attachments
;
1166 struct tu_render_pass_attachment
*attachments
;
1167 struct tu_subpass_barrier end_barrier
;
1168 struct tu_subpass subpasses
[0];
1172 tu_device_init_meta(struct tu_device
*device
);
1174 tu_device_finish_meta(struct tu_device
*device
);
1176 struct tu_query_pool
1179 uint32_t availability_offset
;
1183 uint32_t pipeline_stats_mask
;
1189 uint32_t temp_syncobj
;
1193 tu_set_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1194 VkPipelineBindPoint bind_point
,
1195 struct tu_descriptor_set
*set
,
1199 tu_update_descriptor_sets(struct tu_device
*device
,
1200 struct tu_cmd_buffer
*cmd_buffer
,
1201 VkDescriptorSet overrideSet
,
1202 uint32_t descriptorWriteCount
,
1203 const VkWriteDescriptorSet
*pDescriptorWrites
,
1204 uint32_t descriptorCopyCount
,
1205 const VkCopyDescriptorSet
*pDescriptorCopies
);
1208 tu_update_descriptor_set_with_template(
1209 struct tu_device
*device
,
1210 struct tu_cmd_buffer
*cmd_buffer
,
1211 struct tu_descriptor_set
*set
,
1212 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
1216 tu_meta_push_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1217 VkPipelineBindPoint pipelineBindPoint
,
1218 VkPipelineLayout _layout
,
1220 uint32_t descriptorWriteCount
,
1221 const VkWriteDescriptorSet
*pDescriptorWrites
);
1226 uint32_t temp_syncobj
;
1230 tu_drm_get_gpu_id(const struct tu_physical_device
*dev
, uint32_t *id
);
1233 tu_drm_get_gmem_size(const struct tu_physical_device
*dev
, uint32_t *size
);
1236 tu_drm_submitqueue_new(const struct tu_device
*dev
,
1238 uint32_t *queue_id
);
1241 tu_drm_submitqueue_close(const struct tu_device
*dev
, uint32_t queue_id
);
1244 tu_gem_new(const struct tu_device
*dev
, uint64_t size
, uint32_t flags
);
1246 tu_gem_close(const struct tu_device
*dev
, uint32_t gem_handle
);
1248 tu_gem_info_offset(const struct tu_device
*dev
, uint32_t gem_handle
);
1250 tu_gem_info_iova(const struct tu_device
*dev
, uint32_t gem_handle
);
1252 #define TU_DEFINE_HANDLE_CASTS(__tu_type, __VkType) \
1254 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1256 return (struct __tu_type *) _handle; \
1259 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1261 return (__VkType) _obj; \
1264 #define TU_DEFINE_NONDISP_HANDLE_CASTS(__tu_type, __VkType) \
1266 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1268 return (struct __tu_type *) (uintptr_t) _handle; \
1271 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1273 return (__VkType)(uintptr_t) _obj; \
1276 #define TU_FROM_HANDLE(__tu_type, __name, __handle) \
1277 struct __tu_type *__name = __tu_type##_from_handle(__handle)
1279 TU_DEFINE_HANDLE_CASTS(tu_cmd_buffer
, VkCommandBuffer
)
1280 TU_DEFINE_HANDLE_CASTS(tu_device
, VkDevice
)
1281 TU_DEFINE_HANDLE_CASTS(tu_instance
, VkInstance
)
1282 TU_DEFINE_HANDLE_CASTS(tu_physical_device
, VkPhysicalDevice
)
1283 TU_DEFINE_HANDLE_CASTS(tu_queue
, VkQueue
)
1285 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_cmd_pool
, VkCommandPool
)
1286 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer
, VkBuffer
)
1287 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer_view
, VkBufferView
)
1288 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_pool
, VkDescriptorPool
)
1289 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set
, VkDescriptorSet
)
1290 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set_layout
,
1291 VkDescriptorSetLayout
)
1292 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_update_template
,
1293 VkDescriptorUpdateTemplateKHR
)
1294 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_device_memory
, VkDeviceMemory
)
1295 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_fence
, VkFence
)
1296 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_event
, VkEvent
)
1297 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_framebuffer
, VkFramebuffer
)
1298 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image
, VkImage
)
1299 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image_view
, VkImageView
);
1300 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_cache
, VkPipelineCache
)
1301 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline
, VkPipeline
)
1302 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_layout
, VkPipelineLayout
)
1303 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_query_pool
, VkQueryPool
)
1304 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_render_pass
, VkRenderPass
)
1305 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_sampler
, VkSampler
)
1306 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_shader_module
, VkShaderModule
)
1307 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_semaphore
, VkSemaphore
)
1309 #endif /* TU_PRIVATE_H */