2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
46 #include "c11/threads.h"
47 #include "compiler/shader_enums.h"
48 #include "main/macros.h"
49 #include "util/list.h"
50 #include "util/macros.h"
52 #include "vk_debug_report.h"
54 #include "drm/msm_drm.h"
56 #include "adreno_common.xml.h"
57 #include "adreno_pm4.xml.h"
60 #include "tu_descriptor_set.h"
61 #include "tu_extensions.h"
63 /* Pre-declarations needed for WSI entrypoints */
66 typedef struct xcb_connection_t xcb_connection_t
;
67 typedef uint32_t xcb_visualid_t
;
68 typedef uint32_t xcb_window_t
;
70 #include <vulkan/vk_android_native_buffer.h>
71 #include <vulkan/vk_icd.h>
72 #include <vulkan/vulkan.h>
73 #include <vulkan/vulkan_intel.h>
75 #include "tu_entrypoints.h"
78 #define MAX_VERTEX_ATTRIBS 32
80 #define MAX_VSC_PIPES 32
81 #define MAX_VIEWPORTS 1
82 #define MAX_SCISSORS 16
83 #define MAX_DISCARD_RECTANGLES 4
84 #define MAX_PUSH_CONSTANTS_SIZE 128
85 #define MAX_PUSH_DESCRIPTORS 32
86 #define MAX_DYNAMIC_UNIFORM_BUFFERS 16
87 #define MAX_DYNAMIC_STORAGE_BUFFERS 8
88 #define MAX_DYNAMIC_BUFFERS \
89 (MAX_DYNAMIC_UNIFORM_BUFFERS + MAX_DYNAMIC_STORAGE_BUFFERS)
90 #define MAX_SAMPLES_LOG2 4
91 #define NUM_META_FS_KEYS 13
92 #define TU_MAX_DRM_DEVICES 8
95 #define NUM_DEPTH_CLEAR_PIPELINES 3
98 * This is the point we switch from using CP to compute shader
99 * for certain buffer operations.
101 #define TU_BUFFER_OPS_CS_THRESHOLD 4096
106 TU_MEM_HEAP_VRAM_CPU_ACCESS
,
114 TU_MEM_TYPE_GTT_WRITE_COMBINE
,
115 TU_MEM_TYPE_VRAM_CPU_ACCESS
,
116 TU_MEM_TYPE_GTT_CACHED
,
120 #define tu_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
122 static inline uint32_t
123 align_u32(uint32_t v
, uint32_t a
)
125 assert(a
!= 0 && a
== (a
& -a
));
126 return (v
+ a
- 1) & ~(a
- 1);
129 static inline uint32_t
130 align_u32_npot(uint32_t v
, uint32_t a
)
132 return (v
+ a
- 1) / a
* a
;
135 static inline uint64_t
136 align_u64(uint64_t v
, uint64_t a
)
138 assert(a
!= 0 && a
== (a
& -a
));
139 return (v
+ a
- 1) & ~(a
- 1);
142 static inline int32_t
143 align_i32(int32_t v
, int32_t a
)
145 assert(a
!= 0 && a
== (a
& -a
));
146 return (v
+ a
- 1) & ~(a
- 1);
149 /** Alignment must be a power of 2. */
151 tu_is_aligned(uintmax_t n
, uintmax_t a
)
153 assert(a
== (a
& -a
));
154 return (n
& (a
- 1)) == 0;
157 static inline uint32_t
158 round_up_u32(uint32_t v
, uint32_t a
)
160 return (v
+ a
- 1) / a
;
163 static inline uint64_t
164 round_up_u64(uint64_t v
, uint64_t a
)
166 return (v
+ a
- 1) / a
;
169 static inline uint32_t
170 tu_minify(uint32_t n
, uint32_t levels
)
172 if (unlikely(n
== 0))
175 return MAX2(n
>> levels
, 1);
178 tu_clamp_f(float f
, float min
, float max
)
191 tu_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
193 if (*inout_mask
& clear_mask
) {
194 *inout_mask
&= ~clear_mask
;
201 #define for_each_bit(b, dword) \
202 for (uint32_t __dword = (dword); \
203 (b) = __builtin_ffs(__dword) - 1, __dword; __dword &= ~(1 << (b)))
205 #define typed_memcpy(dest, src, count) \
207 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
208 memcpy((dest), (src), (count) * sizeof(*(src))); \
211 /* Whenever we generate an error, pass it through this function. Useful for
212 * debugging, where we can break on it. Only call at error site, not when
213 * propagating errors. Might be useful to plug in a stack trace here.
219 __vk_errorf(struct tu_instance
*instance
,
226 #define vk_error(instance, error) \
227 __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
228 #define vk_errorf(instance, error, format, ...) \
229 __vk_errorf(instance, error, __FILE__, __LINE__, format, ##__VA_ARGS__);
232 __tu_finishme(const char *file
, int line
, const char *format
, ...)
235 tu_loge(const char *format
, ...) tu_printflike(1, 2);
237 tu_loge_v(const char *format
, va_list va
);
239 tu_logi(const char *format
, ...) tu_printflike(1, 2);
241 tu_logi_v(const char *format
, va_list va
);
244 * Print a FINISHME message, including its source location.
246 #define tu_finishme(format, ...) \
248 static bool reported = false; \
250 __tu_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__); \
255 /* A non-fatal assert. Useful for debugging. */
257 #define tu_assert(x) \
259 if (unlikely(!(x))) \
260 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
266 /* Suppress -Wunused in stub functions */
267 #define tu_use_args(...) __tu_use_args(0, ##__VA_ARGS__)
269 __tu_use_args(int ignore
, ...)
275 tu_finishme("stub %s", __func__); \
279 tu_lookup_entrypoint_unchecked(const char *name
);
281 tu_lookup_entrypoint_checked(
283 uint32_t core_version
,
284 const struct tu_instance_extension_table
*instance
,
285 const struct tu_device_extension_table
*device
);
287 struct tu_physical_device
289 VK_LOADER_DATA _loader_data
;
291 struct tu_instance
*instance
;
294 char name
[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE
];
295 uint8_t driver_uuid
[VK_UUID_SIZE
];
296 uint8_t device_uuid
[VK_UUID_SIZE
];
297 uint8_t cache_uuid
[VK_UUID_SIZE
];
304 uint32_t tile_align_w
;
305 uint32_t tile_align_h
;
307 /* This is the drivers on-disk cache used as a fallback as opposed to
308 * the pipeline cache defined by apps.
310 struct disk_cache
*disk_cache
;
312 struct tu_device_extension_table supported_extensions
;
317 TU_DEBUG_STARTUP
= 1 << 0,
322 VK_LOADER_DATA _loader_data
;
324 VkAllocationCallbacks alloc
;
326 uint32_t api_version
;
327 int physical_device_count
;
328 struct tu_physical_device physical_devices
[TU_MAX_DRM_DEVICES
];
330 enum tu_debug_flags debug_flags
;
332 struct vk_debug_report_instance debug_report_callbacks
;
334 struct tu_instance_extension_table enabled_extensions
;
338 tu_instance_extension_supported(const char *name
);
340 tu_physical_device_api_version(struct tu_physical_device
*dev
);
342 tu_physical_device_extension_supported(struct tu_physical_device
*dev
,
347 struct tu_pipeline_cache
349 struct tu_device
*device
;
350 pthread_mutex_t mutex
;
354 uint32_t kernel_count
;
355 struct cache_entry
**hash_table
;
358 VkAllocationCallbacks alloc
;
361 struct tu_pipeline_key
366 tu_pipeline_cache_init(struct tu_pipeline_cache
*cache
,
367 struct tu_device
*device
);
369 tu_pipeline_cache_finish(struct tu_pipeline_cache
*cache
);
371 tu_pipeline_cache_load(struct tu_pipeline_cache
*cache
,
375 struct tu_shader_variant
;
378 tu_create_shader_variants_from_pipeline_cache(
379 struct tu_device
*device
,
380 struct tu_pipeline_cache
*cache
,
381 const unsigned char *sha1
,
382 struct tu_shader_variant
**variants
);
385 tu_pipeline_cache_insert_shaders(struct tu_device
*device
,
386 struct tu_pipeline_cache
*cache
,
387 const unsigned char *sha1
,
388 struct tu_shader_variant
**variants
,
389 const void *const *codes
,
390 const unsigned *code_sizes
);
394 VkAllocationCallbacks alloc
;
396 struct tu_pipeline_cache cache
;
400 #define TU_QUEUE_GENERAL 0
402 #define TU_MAX_QUEUE_FAMILIES 1
411 tu_fence_init(struct tu_fence
*fence
, bool signaled
);
413 tu_fence_finish(struct tu_fence
*fence
);
415 tu_fence_update_fd(struct tu_fence
*fence
, int fd
);
417 tu_fence_copy(struct tu_fence
*fence
, const struct tu_fence
*src
);
419 tu_fence_signal(struct tu_fence
*fence
);
421 tu_fence_wait_idle(struct tu_fence
*fence
);
425 VK_LOADER_DATA _loader_data
;
426 struct tu_device
*device
;
427 uint32_t queue_family_index
;
429 VkDeviceQueueCreateFlags flags
;
431 uint32_t msm_queue_id
;
432 struct tu_fence submit_fence
;
437 VK_LOADER_DATA _loader_data
;
439 VkAllocationCallbacks alloc
;
441 struct tu_instance
*instance
;
443 struct tu_meta_state meta_state
;
445 struct tu_queue
*queues
[TU_MAX_QUEUE_FAMILIES
];
446 int queue_count
[TU_MAX_QUEUE_FAMILIES
];
448 struct tu_physical_device
*physical_device
;
450 /* Backup in-memory cache to be used if the app doesn't provide one */
451 struct tu_pipeline_cache
*mem_cache
;
453 struct list_head shader_slabs
;
454 mtx_t shader_slab_mutex
;
456 struct tu_device_extension_table enabled_extensions
;
468 tu_bo_init_new(struct tu_device
*dev
, struct tu_bo
*bo
, uint64_t size
);
470 tu_bo_init_dmabuf(struct tu_device
*dev
,
475 tu_bo_export_dmabuf(struct tu_device
*dev
, struct tu_bo
*bo
);
477 tu_bo_finish(struct tu_device
*dev
, struct tu_bo
*bo
);
479 tu_bo_map(struct tu_device
*dev
, struct tu_bo
*bo
);
484 const struct tu_bo
*bo
;
494 * A command stream in TU_CS_MODE_GROW mode grows automatically whenever it
495 * is full. tu_cs_begin must be called before command packet emission and
496 * tu_cs_end must be called after.
498 * This mode may create multiple entries internally. The entries must be
499 * submitted together.
504 * A command stream in TU_CS_MODE_EXTERNAL mode wraps an external,
505 * fixed-size buffer. tu_cs_begin and tu_cs_end are optional and have no
508 * This mode does not create any entry or any BO.
513 * A command stream in TU_CS_MODE_SUB_STREAM mode does not support direct
514 * command packet emission. tu_cs_begin_sub_stream must be called to get a
515 * sub-stream to emit comamnd packets to. When done with the sub-stream,
516 * tu_cs_end_sub_stream must be called.
518 * This mode does not create any entry internally.
520 TU_CS_MODE_SUB_STREAM
,
527 uint32_t *reserved_end
;
530 enum tu_cs_mode mode
;
531 uint32_t next_bo_size
;
533 struct tu_cs_entry
*entries
;
534 uint32_t entry_count
;
535 uint32_t entry_capacity
;
539 uint32_t bo_capacity
;
542 struct tu_device_memory
547 /* for dedicated allocations */
548 struct tu_image
*image
;
549 struct tu_buffer
*buffer
;
556 struct tu_descriptor_range
562 struct tu_descriptor_set
564 const struct tu_descriptor_set_layout
*layout
;
568 uint32_t *mapped_ptr
;
569 struct tu_descriptor_range
*dynamic_descriptors
;
572 struct tu_push_descriptor_set
574 struct tu_descriptor_set set
;
578 struct tu_descriptor_pool_entry
582 struct tu_descriptor_set
*set
;
585 struct tu_descriptor_pool
588 uint64_t current_offset
;
591 uint8_t *host_memory_base
;
592 uint8_t *host_memory_ptr
;
593 uint8_t *host_memory_end
;
595 uint32_t entry_count
;
596 uint32_t max_entry_count
;
597 struct tu_descriptor_pool_entry entries
[0];
600 struct tu_descriptor_update_template_entry
602 VkDescriptorType descriptor_type
;
604 /* The number of descriptors to update */
605 uint32_t descriptor_count
;
607 /* Into mapped_ptr or dynamic_descriptors, in units of the respective array
611 /* In dwords. Not valid/used for dynamic descriptors */
614 uint32_t buffer_offset
;
616 /* Only valid for combined image samplers and samplers */
617 uint16_t has_sampler
;
623 /* For push descriptors */
624 const uint32_t *immutable_samplers
;
627 struct tu_descriptor_update_template
629 uint32_t entry_count
;
630 VkPipelineBindPoint bind_point
;
631 struct tu_descriptor_update_template_entry entry
[0];
638 VkBufferUsageFlags usage
;
639 VkBufferCreateFlags flags
;
642 VkDeviceSize bo_offset
;
645 enum tu_dynamic_state_bits
647 TU_DYNAMIC_VIEWPORT
= 1 << 0,
648 TU_DYNAMIC_SCISSOR
= 1 << 1,
649 TU_DYNAMIC_LINE_WIDTH
= 1 << 2,
650 TU_DYNAMIC_DEPTH_BIAS
= 1 << 3,
651 TU_DYNAMIC_BLEND_CONSTANTS
= 1 << 4,
652 TU_DYNAMIC_DEPTH_BOUNDS
= 1 << 5,
653 TU_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6,
654 TU_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7,
655 TU_DYNAMIC_STENCIL_REFERENCE
= 1 << 8,
656 TU_DYNAMIC_DISCARD_RECTANGLE
= 1 << 9,
657 TU_DYNAMIC_ALL
= (1 << 10) - 1,
660 struct tu_vertex_binding
662 struct tu_buffer
*buffer
;
666 struct tu_viewport_state
669 VkViewport viewports
[MAX_VIEWPORTS
];
672 struct tu_scissor_state
675 VkRect2D scissors
[MAX_SCISSORS
];
678 struct tu_discard_rectangle_state
681 VkRect2D rectangles
[MAX_DISCARD_RECTANGLES
];
684 struct tu_dynamic_state
687 * Bitmask of (1 << VK_DYNAMIC_STATE_*).
688 * Defines the set of saved dynamic state.
692 struct tu_viewport_state viewport
;
694 struct tu_scissor_state scissor
;
705 float blend_constants
[4];
717 } stencil_compare_mask
;
723 } stencil_write_mask
;
731 struct tu_discard_rectangle_state discard_rectangle
;
734 extern const struct tu_dynamic_state default_dynamic_state
;
737 tu_get_debug_option_name(int id
);
740 tu_get_perftest_option_name(int id
);
743 * Attachment state when recording a renderpass instance.
745 * The clear value is valid only if there exists a pending clear.
747 struct tu_attachment_state
749 VkImageAspectFlags pending_clear_aspects
;
750 uint32_t cleared_views
;
751 VkClearValue clear_value
;
752 VkImageLayout current_layout
;
755 struct tu_descriptor_state
757 struct tu_descriptor_set
*sets
[MAX_SETS
];
760 struct tu_push_descriptor_set push_set
;
762 uint32_t dynamic_buffers
[4 * MAX_DYNAMIC_BUFFERS
];
773 struct tu_tiling_config
775 VkRect2D render_area
;
776 uint32_t buffer_cpp
[MAX_RTS
+ 2];
777 uint32_t buffer_count
;
779 /* position and size of the first tile */
781 /* number of tiles */
782 VkExtent2D tile_count
;
784 uint32_t gmem_offsets
[MAX_RTS
+ 2];
786 /* size of the first VSC pipe */
788 /* number of VSC pipes */
789 VkExtent2D pipe_count
;
791 /* pipe register values */
792 uint32_t pipe_config
[MAX_VSC_PIPES
];
793 uint32_t pipe_sizes
[MAX_VSC_PIPES
];
798 /* Vertex descriptors */
802 struct tu_dynamic_state dynamic
;
805 struct tu_buffer
*index_buffer
;
806 uint64_t index_offset
;
808 uint32_t max_index_count
;
811 const struct tu_render_pass
*pass
;
812 const struct tu_subpass
*subpass
;
813 const struct tu_framebuffer
*framebuffer
;
814 struct tu_attachment_state
*attachments
;
816 struct tu_tiling_config tiling_config
;
818 struct tu_cs_entry tile_load_ib
;
819 struct tu_cs_entry tile_store_ib
;
824 VkAllocationCallbacks alloc
;
825 struct list_head cmd_buffers
;
826 struct list_head free_cmd_buffers
;
827 uint32_t queue_family_index
;
830 struct tu_cmd_buffer_upload
835 struct list_head list
;
838 enum tu_cmd_buffer_status
840 TU_CMD_BUFFER_STATUS_INVALID
,
841 TU_CMD_BUFFER_STATUS_INITIAL
,
842 TU_CMD_BUFFER_STATUS_RECORDING
,
843 TU_CMD_BUFFER_STATUS_EXECUTABLE
,
844 TU_CMD_BUFFER_STATUS_PENDING
,
851 struct drm_msm_gem_submit_bo
*bo_infos
;
854 #define TU_BO_LIST_FAILED (~0)
857 tu_bo_list_init(struct tu_bo_list
*list
);
859 tu_bo_list_destroy(struct tu_bo_list
*list
);
861 tu_bo_list_reset(struct tu_bo_list
*list
);
863 tu_bo_list_add(struct tu_bo_list
*list
,
864 const struct tu_bo
*bo
,
867 tu_bo_list_merge(struct tu_bo_list
*list
, const struct tu_bo_list
*other
);
871 VK_LOADER_DATA _loader_data
;
873 struct tu_device
*device
;
875 struct tu_cmd_pool
*pool
;
876 struct list_head pool_link
;
878 VkCommandBufferUsageFlags usage_flags
;
879 VkCommandBufferLevel level
;
880 enum tu_cmd_buffer_status status
;
882 struct tu_cmd_state state
;
883 struct tu_vertex_binding vertex_bindings
[MAX_VBS
];
884 uint32_t queue_family_index
;
886 uint8_t push_constants
[MAX_PUSH_CONSTANTS_SIZE
];
887 VkShaderStageFlags push_constant_stages
;
888 struct tu_descriptor_set meta_push_descriptors
;
890 struct tu_descriptor_state descriptors
[VK_PIPELINE_BIND_POINT_RANGE_SIZE
];
892 struct tu_cmd_buffer_upload upload
;
894 VkResult record_result
;
896 struct tu_bo_list bo_list
;
898 struct tu_cs tile_cs
;
901 uint32_t marker_seqno
;
903 struct tu_bo scratch_bo
;
904 uint32_t scratch_seqno
;
910 tu6_emit_event_write(struct tu_cmd_buffer
*cmd
,
912 enum vgt_event_type event
,
916 tu_get_memory_fd(struct tu_device
*device
,
917 struct tu_device_memory
*memory
,
921 * Takes x,y,z as exact numbers of invocations, instead of blocks.
923 * Limitations: Can't call normal dispatch functions without binding or
925 * the compute pipeline.
928 tu_unaligned_dispatch(struct tu_cmd_buffer
*cmd_buffer
,
938 struct tu_shader_module
;
940 #define TU_HASH_SHADER_IS_GEOM_COPY_SHADER (1 << 0)
941 #define TU_HASH_SHADER_SISCHED (1 << 1)
942 #define TU_HASH_SHADER_UNSAFE_MATH (1 << 2)
944 tu_hash_shaders(unsigned char *hash
,
945 const VkPipelineShaderStageCreateInfo
**stages
,
946 const struct tu_pipeline_layout
*layout
,
947 const struct tu_pipeline_key
*key
,
950 static inline gl_shader_stage
951 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
953 assert(__builtin_popcount(vk_stage
) == 1);
954 return ffs(vk_stage
) - 1;
957 static inline VkShaderStageFlagBits
958 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
960 return (1 << mesa_stage
);
963 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
965 #define tu_foreach_stage(stage, stage_bits) \
966 for (gl_shader_stage stage, \
967 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
968 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
970 struct tu_shader_module
972 struct nir_shader
*nir
;
973 unsigned char sha1
[20];
982 struct tu_dynamic_state dynamic_state
;
984 struct tu_pipeline_layout
*layout
;
986 bool need_indirect_descriptor_sets
;
987 VkShaderStageFlags active_stages
;
991 enum pc_di_primtype primtype
;
992 bool primitive_restart
;
997 struct tu_cs_entry state_ib
;
1002 uint32_t gras_su_cntl
;
1003 struct tu_cs_entry state_ib
;
1008 struct tu_cs_entry state_ib
;
1013 tu6_emit_viewport(struct tu_cs
*cs
, const VkViewport
*viewport
);
1016 tu6_emit_scissor(struct tu_cs
*cs
, const VkRect2D
*scissor
);
1019 tu6_emit_gras_su_cntl(struct tu_cs
*cs
,
1020 uint32_t gras_su_cntl
,
1024 tu6_emit_depth_bias(struct tu_cs
*cs
,
1025 float constant_factor
,
1027 float slope_factor
);
1030 tu6_emit_stencil_compare_mask(struct tu_cs
*cs
,
1035 tu6_emit_stencil_write_mask(struct tu_cs
*cs
, uint32_t front
, uint32_t back
);
1038 tu6_emit_stencil_reference(struct tu_cs
*cs
, uint32_t front
, uint32_t back
);
1040 struct tu_userdata_info
*
1041 tu_lookup_user_sgpr(struct tu_pipeline
*pipeline
,
1042 gl_shader_stage stage
,
1045 struct tu_shader_variant
*
1046 tu_get_shader(struct tu_pipeline
*pipeline
, gl_shader_stage stage
);
1048 struct tu_graphics_pipeline_create_info
1051 bool db_depth_clear
;
1052 bool db_stencil_clear
;
1053 bool db_depth_disable_expclear
;
1054 bool db_stencil_disable_expclear
;
1055 bool db_flush_depth_inplace
;
1056 bool db_flush_stencil_inplace
;
1057 bool db_resummarize
;
1058 uint32_t custom_blend_mode
;
1061 struct tu_native_format
1063 int vtx
; /* VFMTn_xxx or -1 */
1064 int tex
; /* TFMTn_xxx or -1 */
1065 int rb
; /* RBn_xxx or -1 */
1066 int swap
; /* enum a3xx_color_swap */
1067 bool present
; /* internal only; always true to external users */
1070 const struct tu_native_format
*
1071 tu6_get_native_format(VkFormat format
);
1074 tu_pack_clear_value(const VkClearValue
*val
,
1077 enum a6xx_2d_ifmt
tu6_rb_fmt_to_ifmt(enum a6xx_color_fmt fmt
);
1079 struct tu_image_level
1081 VkDeviceSize offset
;
1089 /* The original VkFormat provided by the client. This may not match any
1090 * of the actual surface formats.
1093 VkImageAspectFlags aspects
;
1094 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
1095 VkImageTiling tiling
; /** VkImageCreateInfo::tiling */
1096 VkImageCreateFlags flags
; /** VkImageCreateInfo::flags */
1098 uint32_t level_count
;
1099 uint32_t layer_count
;
1105 VkDeviceSize layer_size
;
1106 struct tu_image_level levels
[15];
1109 unsigned queue_family_mask
;
1113 /* For VK_ANDROID_native_buffer, the WSI image owns the memory, */
1114 VkDeviceMemory owned_memory
;
1116 /* Set when bound */
1117 const struct tu_bo
*bo
;
1118 VkDeviceSize bo_offset
;
1122 tu_image_queue_family_mask(const struct tu_image
*image
,
1124 uint32_t queue_family
);
1126 static inline uint32_t
1127 tu_get_layerCount(const struct tu_image
*image
,
1128 const VkImageSubresourceRange
*range
)
1130 return range
->layerCount
== VK_REMAINING_ARRAY_LAYERS
1131 ? image
->layer_count
- range
->baseArrayLayer
1132 : range
->layerCount
;
1135 static inline uint32_t
1136 tu_get_levelCount(const struct tu_image
*image
,
1137 const VkImageSubresourceRange
*range
)
1139 return range
->levelCount
== VK_REMAINING_MIP_LEVELS
1140 ? image
->level_count
- range
->baseMipLevel
1141 : range
->levelCount
;
1144 struct tu_image_view
1146 struct tu_image
*image
; /**< VkImageViewCreateInfo::image */
1148 VkImageViewType type
;
1149 VkImageAspectFlags aspect_mask
;
1151 uint32_t base_layer
;
1152 uint32_t layer_count
;
1154 uint32_t level_count
;
1155 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1157 uint32_t descriptor
[16];
1159 /* Descriptor for use as a storage image as opposed to a sampled image.
1160 * This has a few differences for cube maps (e.g. type).
1162 uint32_t storage_descriptor
[16];
1169 struct tu_image_create_info
1171 const VkImageCreateInfo
*vk_info
;
1173 bool no_metadata_planes
;
1177 tu_image_create(VkDevice _device
,
1178 const struct tu_image_create_info
*info
,
1179 const VkAllocationCallbacks
*alloc
,
1183 tu_image_from_gralloc(VkDevice device_h
,
1184 const VkImageCreateInfo
*base_info
,
1185 const VkNativeBufferANDROID
*gralloc_info
,
1186 const VkAllocationCallbacks
*alloc
,
1187 VkImage
*out_image_h
);
1190 tu_image_view_init(struct tu_image_view
*view
,
1191 struct tu_device
*device
,
1192 const VkImageViewCreateInfo
*pCreateInfo
);
1194 struct tu_buffer_view
1197 uint64_t range
; /**< VkBufferViewCreateInfo::range */
1201 tu_buffer_view_init(struct tu_buffer_view
*view
,
1202 struct tu_device
*device
,
1203 const VkBufferViewCreateInfo
*pCreateInfo
);
1205 static inline struct VkExtent3D
1206 tu_sanitize_image_extent(const VkImageType imageType
,
1207 const struct VkExtent3D imageExtent
)
1209 switch (imageType
) {
1210 case VK_IMAGE_TYPE_1D
:
1211 return (VkExtent3D
) { imageExtent
.width
, 1, 1 };
1212 case VK_IMAGE_TYPE_2D
:
1213 return (VkExtent3D
) { imageExtent
.width
, imageExtent
.height
, 1 };
1214 case VK_IMAGE_TYPE_3D
:
1217 unreachable("invalid image type");
1221 static inline struct VkOffset3D
1222 tu_sanitize_image_offset(const VkImageType imageType
,
1223 const struct VkOffset3D imageOffset
)
1225 switch (imageType
) {
1226 case VK_IMAGE_TYPE_1D
:
1227 return (VkOffset3D
) { imageOffset
.x
, 0, 0 };
1228 case VK_IMAGE_TYPE_2D
:
1229 return (VkOffset3D
) { imageOffset
.x
, imageOffset
.y
, 0 };
1230 case VK_IMAGE_TYPE_3D
:
1233 unreachable("invalid image type");
1237 struct tu_attachment_info
1239 struct tu_image_view
*attachment
;
1242 struct tu_framebuffer
1248 uint32_t attachment_count
;
1249 struct tu_attachment_info attachments
[0];
1252 struct tu_subpass_barrier
1254 VkPipelineStageFlags src_stage_mask
;
1255 VkAccessFlags src_access_mask
;
1256 VkAccessFlags dst_access_mask
;
1260 tu_subpass_barrier(struct tu_cmd_buffer
*cmd_buffer
,
1261 const struct tu_subpass_barrier
*barrier
);
1263 struct tu_subpass_attachment
1265 uint32_t attachment
;
1266 VkImageLayout layout
;
1271 uint32_t input_count
;
1272 uint32_t color_count
;
1273 struct tu_subpass_attachment
*input_attachments
;
1274 struct tu_subpass_attachment
*color_attachments
;
1275 struct tu_subpass_attachment
*resolve_attachments
;
1276 struct tu_subpass_attachment depth_stencil_attachment
;
1278 /** Subpass has at least one resolve attachment */
1281 struct tu_subpass_barrier start_barrier
;
1284 VkSampleCountFlagBits max_sample_count
;
1287 struct tu_render_pass_attachment
1291 VkAttachmentLoadOp load_op
;
1292 VkAttachmentLoadOp stencil_load_op
;
1293 VkImageLayout initial_layout
;
1294 VkImageLayout final_layout
;
1298 struct tu_render_pass
1300 uint32_t attachment_count
;
1301 uint32_t subpass_count
;
1302 struct tu_subpass_attachment
*subpass_attachments
;
1303 struct tu_render_pass_attachment
*attachments
;
1304 struct tu_subpass_barrier end_barrier
;
1305 struct tu_subpass subpasses
[0];
1309 tu_device_init_meta(struct tu_device
*device
);
1311 tu_device_finish_meta(struct tu_device
*device
);
1313 struct tu_query_pool
1316 uint32_t availability_offset
;
1320 uint32_t pipeline_stats_mask
;
1326 uint32_t temp_syncobj
;
1330 tu_set_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1331 VkPipelineBindPoint bind_point
,
1332 struct tu_descriptor_set
*set
,
1336 tu_update_descriptor_sets(struct tu_device
*device
,
1337 struct tu_cmd_buffer
*cmd_buffer
,
1338 VkDescriptorSet overrideSet
,
1339 uint32_t descriptorWriteCount
,
1340 const VkWriteDescriptorSet
*pDescriptorWrites
,
1341 uint32_t descriptorCopyCount
,
1342 const VkCopyDescriptorSet
*pDescriptorCopies
);
1345 tu_update_descriptor_set_with_template(
1346 struct tu_device
*device
,
1347 struct tu_cmd_buffer
*cmd_buffer
,
1348 struct tu_descriptor_set
*set
,
1349 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
1353 tu_meta_push_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1354 VkPipelineBindPoint pipelineBindPoint
,
1355 VkPipelineLayout _layout
,
1357 uint32_t descriptorWriteCount
,
1358 const VkWriteDescriptorSet
*pDescriptorWrites
);
1361 tu_drm_get_gpu_id(const struct tu_physical_device
*dev
, uint32_t *id
);
1364 tu_drm_get_gmem_size(const struct tu_physical_device
*dev
, uint32_t *size
);
1367 tu_drm_submitqueue_new(const struct tu_device
*dev
,
1369 uint32_t *queue_id
);
1372 tu_drm_submitqueue_close(const struct tu_device
*dev
, uint32_t queue_id
);
1375 tu_gem_new(const struct tu_device
*dev
, uint64_t size
, uint32_t flags
);
1377 tu_gem_import_dmabuf(const struct tu_device
*dev
,
1381 tu_gem_export_dmabuf(const struct tu_device
*dev
, uint32_t gem_handle
);
1383 tu_gem_close(const struct tu_device
*dev
, uint32_t gem_handle
);
1385 tu_gem_info_offset(const struct tu_device
*dev
, uint32_t gem_handle
);
1387 tu_gem_info_iova(const struct tu_device
*dev
, uint32_t gem_handle
);
1389 #define TU_DEFINE_HANDLE_CASTS(__tu_type, __VkType) \
1391 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1393 return (struct __tu_type *) _handle; \
1396 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1398 return (__VkType) _obj; \
1401 #define TU_DEFINE_NONDISP_HANDLE_CASTS(__tu_type, __VkType) \
1403 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1405 return (struct __tu_type *) (uintptr_t) _handle; \
1408 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1410 return (__VkType)(uintptr_t) _obj; \
1413 #define TU_FROM_HANDLE(__tu_type, __name, __handle) \
1414 struct __tu_type *__name = __tu_type##_from_handle(__handle)
1416 TU_DEFINE_HANDLE_CASTS(tu_cmd_buffer
, VkCommandBuffer
)
1417 TU_DEFINE_HANDLE_CASTS(tu_device
, VkDevice
)
1418 TU_DEFINE_HANDLE_CASTS(tu_instance
, VkInstance
)
1419 TU_DEFINE_HANDLE_CASTS(tu_physical_device
, VkPhysicalDevice
)
1420 TU_DEFINE_HANDLE_CASTS(tu_queue
, VkQueue
)
1422 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_cmd_pool
, VkCommandPool
)
1423 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer
, VkBuffer
)
1424 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer_view
, VkBufferView
)
1425 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_pool
, VkDescriptorPool
)
1426 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set
, VkDescriptorSet
)
1427 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set_layout
,
1428 VkDescriptorSetLayout
)
1429 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_update_template
,
1430 VkDescriptorUpdateTemplateKHR
)
1431 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_device_memory
, VkDeviceMemory
)
1432 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_fence
, VkFence
)
1433 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_event
, VkEvent
)
1434 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_framebuffer
, VkFramebuffer
)
1435 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image
, VkImage
)
1436 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image_view
, VkImageView
);
1437 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_cache
, VkPipelineCache
)
1438 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline
, VkPipeline
)
1439 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_layout
, VkPipelineLayout
)
1440 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_query_pool
, VkQueryPool
)
1441 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_render_pass
, VkRenderPass
)
1442 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_sampler
, VkSampler
)
1443 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_shader_module
, VkShaderModule
)
1444 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_semaphore
, VkSemaphore
)
1446 #endif /* TU_PRIVATE_H */