2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
43 #define VG(x) ((void)0)
46 #include "c11/threads.h"
47 #include "main/macros.h"
48 #include "util/list.h"
49 #include "util/macros.h"
51 #include "vk_debug_report.h"
52 #include "wsi_common.h"
54 #include "drm-uapi/msm_drm.h"
55 #include "ir3/ir3_compiler.h"
56 #include "ir3/ir3_shader.h"
58 #include "adreno_common.xml.h"
59 #include "adreno_pm4.xml.h"
61 #include "fdl/freedreno_layout.h"
63 #include "tu_descriptor_set.h"
64 #include "tu_extensions.h"
66 /* Pre-declarations needed for WSI entrypoints */
69 typedef struct xcb_connection_t xcb_connection_t
;
70 typedef uint32_t xcb_visualid_t
;
71 typedef uint32_t xcb_window_t
;
73 #include <vulkan/vk_android_native_buffer.h>
74 #include <vulkan/vk_icd.h>
75 #include <vulkan/vulkan.h>
76 #include <vulkan/vulkan_intel.h>
78 #include "tu_entrypoints.h"
81 #define MAX_VERTEX_ATTRIBS 32
83 #define MAX_VSC_PIPES 32
84 #define MAX_VIEWPORTS 1
85 #define MAX_SCISSORS 16
86 #define MAX_DISCARD_RECTANGLES 4
87 #define MAX_PUSH_CONSTANTS_SIZE 128
88 #define MAX_PUSH_DESCRIPTORS 32
89 #define MAX_DYNAMIC_UNIFORM_BUFFERS 16
90 #define MAX_DYNAMIC_STORAGE_BUFFERS 8
91 #define MAX_DYNAMIC_BUFFERS \
92 (MAX_DYNAMIC_UNIFORM_BUFFERS + MAX_DYNAMIC_STORAGE_BUFFERS)
93 #define MAX_SAMPLES_LOG2 4
94 #define NUM_META_FS_KEYS 13
95 #define TU_MAX_DRM_DEVICES 8
97 /* The Qualcomm driver exposes 0x20000058 */
98 #define MAX_STORAGE_BUFFER_RANGE 0x20000000
100 #define NUM_DEPTH_CLEAR_PIPELINES 3
103 * This is the point we switch from using CP to compute shader
104 * for certain buffer operations.
106 #define TU_BUFFER_OPS_CS_THRESHOLD 4096
108 #define A6XX_TEX_CONST_DWORDS 16
109 #define A6XX_TEX_SAMP_DWORDS 4
114 TU_MEM_HEAP_VRAM_CPU_ACCESS
,
122 TU_MEM_TYPE_GTT_WRITE_COMBINE
,
123 TU_MEM_TYPE_VRAM_CPU_ACCESS
,
124 TU_MEM_TYPE_GTT_CACHED
,
128 #define tu_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
130 static inline uint32_t
131 align_u32(uint32_t v
, uint32_t a
)
133 assert(a
!= 0 && a
== (a
& -a
));
134 return (v
+ a
- 1) & ~(a
- 1);
137 static inline uint32_t
138 align_u32_npot(uint32_t v
, uint32_t a
)
140 return (v
+ a
- 1) / a
* a
;
143 static inline uint64_t
144 align_u64(uint64_t v
, uint64_t a
)
146 assert(a
!= 0 && a
== (a
& -a
));
147 return (v
+ a
- 1) & ~(a
- 1);
150 static inline int32_t
151 align_i32(int32_t v
, int32_t a
)
153 assert(a
!= 0 && a
== (a
& -a
));
154 return (v
+ a
- 1) & ~(a
- 1);
157 /** Alignment must be a power of 2. */
159 tu_is_aligned(uintmax_t n
, uintmax_t a
)
161 assert(a
== (a
& -a
));
162 return (n
& (a
- 1)) == 0;
165 static inline uint32_t
166 round_up_u32(uint32_t v
, uint32_t a
)
168 return (v
+ a
- 1) / a
;
171 static inline uint64_t
172 round_up_u64(uint64_t v
, uint64_t a
)
174 return (v
+ a
- 1) / a
;
177 static inline uint32_t
178 tu_minify(uint32_t n
, uint32_t levels
)
180 if (unlikely(n
== 0))
183 return MAX2(n
>> levels
, 1);
186 tu_clamp_f(float f
, float min
, float max
)
199 tu_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
201 if (*inout_mask
& clear_mask
) {
202 *inout_mask
&= ~clear_mask
;
209 #define for_each_bit(b, dword) \
210 for (uint32_t __dword = (dword); \
211 (b) = __builtin_ffs(__dword) - 1, __dword; __dword &= ~(1 << (b)))
213 #define typed_memcpy(dest, src, count) \
215 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
216 memcpy((dest), (src), (count) * sizeof(*(src))); \
219 #define COND(bool, val) ((bool) ? (val) : 0)
221 /* Whenever we generate an error, pass it through this function. Useful for
222 * debugging, where we can break on it. Only call at error site, not when
223 * propagating errors. Might be useful to plug in a stack trace here.
229 __vk_errorf(struct tu_instance
*instance
,
236 #define vk_error(instance, error) \
237 __vk_errorf(instance, error, __FILE__, __LINE__, NULL);
238 #define vk_errorf(instance, error, format, ...) \
239 __vk_errorf(instance, error, __FILE__, __LINE__, format, ##__VA_ARGS__);
242 __tu_finishme(const char *file
, int line
, const char *format
, ...)
245 tu_loge(const char *format
, ...) tu_printflike(1, 2);
247 tu_loge_v(const char *format
, va_list va
);
249 tu_logi(const char *format
, ...) tu_printflike(1, 2);
251 tu_logi_v(const char *format
, va_list va
);
254 * Print a FINISHME message, including its source location.
256 #define tu_finishme(format, ...) \
258 static bool reported = false; \
260 __tu_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__); \
265 /* A non-fatal assert. Useful for debugging. */
267 #define tu_assert(x) \
269 if (unlikely(!(x))) \
270 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
276 /* Suppress -Wunused in stub functions */
277 #define tu_use_args(...) __tu_use_args(0, ##__VA_ARGS__)
279 __tu_use_args(int ignore
, ...)
285 tu_finishme("stub %s", __func__); \
289 tu_lookup_entrypoint_unchecked(const char *name
);
291 tu_lookup_entrypoint_checked(
293 uint32_t core_version
,
294 const struct tu_instance_extension_table
*instance
,
295 const struct tu_device_extension_table
*device
);
297 struct tu_physical_device
299 VK_LOADER_DATA _loader_data
;
301 struct tu_instance
*instance
;
304 char name
[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE
];
305 uint8_t driver_uuid
[VK_UUID_SIZE
];
306 uint8_t device_uuid
[VK_UUID_SIZE
];
307 uint8_t cache_uuid
[VK_UUID_SIZE
];
309 struct wsi_device wsi_device
;
316 uint32_t tile_align_w
;
317 uint32_t tile_align_h
;
319 /* This is the drivers on-disk cache used as a fallback as opposed to
320 * the pipeline cache defined by apps.
322 struct disk_cache
*disk_cache
;
324 struct tu_device_extension_table supported_extensions
;
329 TU_DEBUG_STARTUP
= 1 << 0,
330 TU_DEBUG_NIR
= 1 << 1,
331 TU_DEBUG_IR3
= 1 << 2,
332 TU_DEBUG_NOBIN
= 1 << 3,
337 VK_LOADER_DATA _loader_data
;
339 VkAllocationCallbacks alloc
;
341 uint32_t api_version
;
342 int physical_device_count
;
343 struct tu_physical_device physical_devices
[TU_MAX_DRM_DEVICES
];
345 enum tu_debug_flags debug_flags
;
347 struct vk_debug_report_instance debug_report_callbacks
;
349 struct tu_instance_extension_table enabled_extensions
;
353 tu_wsi_init(struct tu_physical_device
*physical_device
);
355 tu_wsi_finish(struct tu_physical_device
*physical_device
);
358 tu_instance_extension_supported(const char *name
);
360 tu_physical_device_api_version(struct tu_physical_device
*dev
);
362 tu_physical_device_extension_supported(struct tu_physical_device
*dev
,
367 struct tu_pipeline_cache
369 struct tu_device
*device
;
370 pthread_mutex_t mutex
;
374 uint32_t kernel_count
;
375 struct cache_entry
**hash_table
;
378 VkAllocationCallbacks alloc
;
381 struct tu_pipeline_key
386 tu_pipeline_cache_init(struct tu_pipeline_cache
*cache
,
387 struct tu_device
*device
);
389 tu_pipeline_cache_finish(struct tu_pipeline_cache
*cache
);
391 tu_pipeline_cache_load(struct tu_pipeline_cache
*cache
,
395 struct tu_shader_variant
;
398 tu_create_shader_variants_from_pipeline_cache(
399 struct tu_device
*device
,
400 struct tu_pipeline_cache
*cache
,
401 const unsigned char *sha1
,
402 struct tu_shader_variant
**variants
);
405 tu_pipeline_cache_insert_shaders(struct tu_device
*device
,
406 struct tu_pipeline_cache
*cache
,
407 const unsigned char *sha1
,
408 struct tu_shader_variant
**variants
,
409 const void *const *codes
,
410 const unsigned *code_sizes
);
414 VkAllocationCallbacks alloc
;
416 struct tu_pipeline_cache cache
;
420 #define TU_QUEUE_GENERAL 0
422 #define TU_MAX_QUEUE_FAMILIES 1
426 struct wsi_fence
*fence_wsi
;
432 tu_fence_init(struct tu_fence
*fence
, bool signaled
);
434 tu_fence_finish(struct tu_fence
*fence
);
436 tu_fence_update_fd(struct tu_fence
*fence
, int fd
);
438 tu_fence_copy(struct tu_fence
*fence
, const struct tu_fence
*src
);
440 tu_fence_signal(struct tu_fence
*fence
);
442 tu_fence_wait_idle(struct tu_fence
*fence
);
446 VK_LOADER_DATA _loader_data
;
447 struct tu_device
*device
;
448 uint32_t queue_family_index
;
450 VkDeviceQueueCreateFlags flags
;
452 uint32_t msm_queue_id
;
453 struct tu_fence submit_fence
;
466 VK_LOADER_DATA _loader_data
;
468 VkAllocationCallbacks alloc
;
470 struct tu_instance
*instance
;
472 struct tu_meta_state meta_state
;
474 struct tu_queue
*queues
[TU_MAX_QUEUE_FAMILIES
];
475 int queue_count
[TU_MAX_QUEUE_FAMILIES
];
477 struct tu_physical_device
*physical_device
;
479 struct ir3_compiler
*compiler
;
481 /* Backup in-memory cache to be used if the app doesn't provide one */
482 struct tu_pipeline_cache
*mem_cache
;
484 struct tu_bo vsc_data
;
485 struct tu_bo vsc_data2
;
486 uint32_t vsc_data_pitch
;
487 uint32_t vsc_data2_pitch
;
489 struct list_head shader_slabs
;
490 mtx_t shader_slab_mutex
;
492 struct tu_device_extension_table enabled_extensions
;
496 tu_bo_init_new(struct tu_device
*dev
, struct tu_bo
*bo
, uint64_t size
);
498 tu_bo_init_dmabuf(struct tu_device
*dev
,
503 tu_bo_export_dmabuf(struct tu_device
*dev
, struct tu_bo
*bo
);
505 tu_bo_finish(struct tu_device
*dev
, struct tu_bo
*bo
);
507 tu_bo_map(struct tu_device
*dev
, struct tu_bo
*bo
);
512 const struct tu_bo
*bo
;
518 struct ts_cs_memory
{
527 * A command stream in TU_CS_MODE_GROW mode grows automatically whenever it
528 * is full. tu_cs_begin must be called before command packet emission and
529 * tu_cs_end must be called after.
531 * This mode may create multiple entries internally. The entries must be
532 * submitted together.
537 * A command stream in TU_CS_MODE_EXTERNAL mode wraps an external,
538 * fixed-size buffer. tu_cs_begin and tu_cs_end are optional and have no
541 * This mode does not create any entry or any BO.
546 * A command stream in TU_CS_MODE_SUB_STREAM mode does not support direct
547 * command packet emission. tu_cs_begin_sub_stream must be called to get a
548 * sub-stream to emit comamnd packets to. When done with the sub-stream,
549 * tu_cs_end_sub_stream must be called.
551 * This mode does not create any entry internally.
553 TU_CS_MODE_SUB_STREAM
,
560 uint32_t *reserved_end
;
563 enum tu_cs_mode mode
;
564 uint32_t next_bo_size
;
566 struct tu_cs_entry
*entries
;
567 uint32_t entry_count
;
568 uint32_t entry_capacity
;
572 uint32_t bo_capacity
;
575 struct tu_device_memory
580 /* for dedicated allocations */
581 struct tu_image
*image
;
582 struct tu_buffer
*buffer
;
589 struct tu_descriptor_range
595 struct tu_descriptor_set
597 const struct tu_descriptor_set_layout
*layout
;
601 uint32_t *mapped_ptr
;
602 struct tu_descriptor_range
*dynamic_descriptors
;
604 struct tu_bo
*descriptors
[0];
607 struct tu_push_descriptor_set
609 struct tu_descriptor_set set
;
613 struct tu_descriptor_pool_entry
617 struct tu_descriptor_set
*set
;
620 struct tu_descriptor_pool
623 uint64_t current_offset
;
626 uint8_t *host_memory_base
;
627 uint8_t *host_memory_ptr
;
628 uint8_t *host_memory_end
;
630 uint32_t entry_count
;
631 uint32_t max_entry_count
;
632 struct tu_descriptor_pool_entry entries
[0];
635 struct tu_descriptor_update_template_entry
637 VkDescriptorType descriptor_type
;
639 /* The number of descriptors to update */
640 uint32_t descriptor_count
;
642 /* Into mapped_ptr or dynamic_descriptors, in units of the respective array
646 /* In dwords. Not valid/used for dynamic descriptors */
649 uint32_t buffer_offset
;
651 /* Only valid for combined image samplers and samplers */
652 uint16_t has_sampler
;
658 /* For push descriptors */
659 const uint32_t *immutable_samplers
;
662 struct tu_descriptor_update_template
664 uint32_t entry_count
;
665 VkPipelineBindPoint bind_point
;
666 struct tu_descriptor_update_template_entry entry
[0];
673 VkBufferUsageFlags usage
;
674 VkBufferCreateFlags flags
;
677 VkDeviceSize bo_offset
;
680 static inline uint64_t
681 tu_buffer_iova(struct tu_buffer
*buffer
)
683 return buffer
->bo
->iova
+ buffer
->bo_offset
;
686 enum tu_dynamic_state_bits
688 TU_DYNAMIC_VIEWPORT
= 1 << 0,
689 TU_DYNAMIC_SCISSOR
= 1 << 1,
690 TU_DYNAMIC_LINE_WIDTH
= 1 << 2,
691 TU_DYNAMIC_DEPTH_BIAS
= 1 << 3,
692 TU_DYNAMIC_BLEND_CONSTANTS
= 1 << 4,
693 TU_DYNAMIC_DEPTH_BOUNDS
= 1 << 5,
694 TU_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6,
695 TU_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7,
696 TU_DYNAMIC_STENCIL_REFERENCE
= 1 << 8,
697 TU_DYNAMIC_DISCARD_RECTANGLE
= 1 << 9,
698 TU_DYNAMIC_ALL
= (1 << 10) - 1,
701 struct tu_vertex_binding
703 struct tu_buffer
*buffer
;
707 struct tu_viewport_state
710 VkViewport viewports
[MAX_VIEWPORTS
];
713 struct tu_scissor_state
716 VkRect2D scissors
[MAX_SCISSORS
];
719 struct tu_discard_rectangle_state
722 VkRect2D rectangles
[MAX_DISCARD_RECTANGLES
];
725 struct tu_dynamic_state
728 * Bitmask of (1 << VK_DYNAMIC_STATE_*).
729 * Defines the set of saved dynamic state.
733 struct tu_viewport_state viewport
;
735 struct tu_scissor_state scissor
;
746 float blend_constants
[4];
758 } stencil_compare_mask
;
764 } stencil_write_mask
;
772 struct tu_discard_rectangle_state discard_rectangle
;
775 extern const struct tu_dynamic_state default_dynamic_state
;
778 tu_get_debug_option_name(int id
);
781 tu_get_perftest_option_name(int id
);
783 struct tu_descriptor_state
785 struct tu_descriptor_set
*sets
[MAX_SETS
];
787 struct tu_push_descriptor_set push_set
;
789 uint64_t dynamic_buffers
[MAX_DYNAMIC_BUFFERS
];
800 struct tu_tiling_config
802 VkRect2D render_area
;
804 /* position and size of the first tile */
806 /* number of tiles */
807 VkExtent2D tile_count
;
809 /* size of the first VSC pipe */
811 /* number of VSC pipes */
812 VkExtent2D pipe_count
;
814 /* pipe register values */
815 uint32_t pipe_config
[MAX_VSC_PIPES
];
816 uint32_t pipe_sizes
[MAX_VSC_PIPES
];
819 enum tu_cmd_dirty_bits
821 TU_CMD_DIRTY_PIPELINE
= 1 << 0,
822 TU_CMD_DIRTY_COMPUTE_PIPELINE
= 1 << 1,
823 TU_CMD_DIRTY_VERTEX_BUFFERS
= 1 << 2,
824 TU_CMD_DIRTY_DESCRIPTOR_SETS
= 1 << 3,
825 TU_CMD_DIRTY_PUSH_CONSTANTS
= 1 << 4,
827 TU_CMD_DIRTY_DYNAMIC_LINE_WIDTH
= 1 << 16,
828 TU_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 17,
829 TU_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 18,
830 TU_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE
= 1 << 19,
837 struct tu_pipeline
*pipeline
;
838 struct tu_pipeline
*compute_pipeline
;
843 struct tu_buffer
*buffers
[MAX_VBS
];
844 VkDeviceSize offsets
[MAX_VBS
];
847 struct tu_dynamic_state dynamic
;
850 struct tu_buffer
*index_buffer
;
851 uint64_t index_offset
;
853 uint32_t max_index_count
;
856 const struct tu_render_pass
*pass
;
857 const struct tu_subpass
*subpass
;
858 const struct tu_framebuffer
*framebuffer
;
860 struct tu_tiling_config tiling_config
;
862 struct tu_cs_entry tile_load_ib
;
863 struct tu_cs_entry tile_store_ib
;
868 VkAllocationCallbacks alloc
;
869 struct list_head cmd_buffers
;
870 struct list_head free_cmd_buffers
;
871 uint32_t queue_family_index
;
874 struct tu_cmd_buffer_upload
879 struct list_head list
;
882 enum tu_cmd_buffer_status
884 TU_CMD_BUFFER_STATUS_INVALID
,
885 TU_CMD_BUFFER_STATUS_INITIAL
,
886 TU_CMD_BUFFER_STATUS_RECORDING
,
887 TU_CMD_BUFFER_STATUS_EXECUTABLE
,
888 TU_CMD_BUFFER_STATUS_PENDING
,
895 struct drm_msm_gem_submit_bo
*bo_infos
;
898 #define TU_BO_LIST_FAILED (~0)
901 tu_bo_list_init(struct tu_bo_list
*list
);
903 tu_bo_list_destroy(struct tu_bo_list
*list
);
905 tu_bo_list_reset(struct tu_bo_list
*list
);
907 tu_bo_list_add(struct tu_bo_list
*list
,
908 const struct tu_bo
*bo
,
911 tu_bo_list_merge(struct tu_bo_list
*list
, const struct tu_bo_list
*other
);
915 VK_LOADER_DATA _loader_data
;
917 struct tu_device
*device
;
919 struct tu_cmd_pool
*pool
;
920 struct list_head pool_link
;
922 VkCommandBufferUsageFlags usage_flags
;
923 VkCommandBufferLevel level
;
924 enum tu_cmd_buffer_status status
;
926 struct tu_cmd_state state
;
927 struct tu_vertex_binding vertex_bindings
[MAX_VBS
];
928 uint32_t queue_family_index
;
930 uint32_t push_constants
[MAX_PUSH_CONSTANTS_SIZE
/ 4];
931 VkShaderStageFlags push_constant_stages
;
932 struct tu_descriptor_set meta_push_descriptors
;
934 struct tu_descriptor_state descriptors
[VK_PIPELINE_BIND_POINT_RANGE_SIZE
];
936 struct tu_cmd_buffer_upload upload
;
938 VkResult record_result
;
940 struct tu_bo_list bo_list
;
942 struct tu_cs draw_cs
;
943 struct tu_cs draw_epilogue_cs
;
947 uint32_t marker_seqno
;
949 struct tu_bo scratch_bo
;
950 uint32_t scratch_seqno
;
951 #define VSC_OVERFLOW 0x8
952 #define VSC_SCRATCH 0x10
954 struct tu_bo vsc_data
;
955 struct tu_bo vsc_data2
;
956 uint32_t vsc_data_pitch
;
957 uint32_t vsc_data2_pitch
;
963 /* Temporary struct for tracking a register state to be written, used by
964 * a6xx-pack.h and tu_cs_emit_regs()
966 struct tu_reg_value
{
977 tu6_emit_event_write(struct tu_cmd_buffer
*cmd
,
979 enum vgt_event_type event
,
983 tu_get_memory_fd(struct tu_device
*device
,
984 struct tu_device_memory
*memory
,
987 static inline struct tu_descriptor_state
*
988 tu_get_descriptors_state(struct tu_cmd_buffer
*cmd_buffer
,
989 VkPipelineBindPoint bind_point
)
991 return &cmd_buffer
->descriptors
[bind_point
];
995 * Takes x,y,z as exact numbers of invocations, instead of blocks.
997 * Limitations: Can't call normal dispatch functions without binding or
999 * the compute pipeline.
1002 tu_unaligned_dispatch(struct tu_cmd_buffer
*cmd_buffer
,
1012 struct tu_shader_module
;
1014 #define TU_HASH_SHADER_IS_GEOM_COPY_SHADER (1 << 0)
1015 #define TU_HASH_SHADER_SISCHED (1 << 1)
1016 #define TU_HASH_SHADER_UNSAFE_MATH (1 << 2)
1018 tu_hash_shaders(unsigned char *hash
,
1019 const VkPipelineShaderStageCreateInfo
**stages
,
1020 const struct tu_pipeline_layout
*layout
,
1021 const struct tu_pipeline_key
*key
,
1024 static inline gl_shader_stage
1025 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
1027 assert(__builtin_popcount(vk_stage
) == 1);
1028 return ffs(vk_stage
) - 1;
1031 static inline VkShaderStageFlagBits
1032 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
1034 return (1 << mesa_stage
);
1037 #define TU_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
1039 #define tu_foreach_stage(stage, stage_bits) \
1040 for (gl_shader_stage stage, \
1041 __tmp = (gl_shader_stage)((stage_bits) &TU_STAGE_MASK); \
1042 stage = __builtin_ffs(__tmp) - 1, __tmp; __tmp &= ~(1 << (stage)))
1044 struct tu_shader_module
1046 unsigned char sha1
[20];
1049 const uint32_t *code
[0];
1052 struct tu_shader_compile_options
1054 struct ir3_shader_key key
;
1057 bool include_binning_pass
;
1060 struct tu_descriptor_map
1062 /* TODO: avoid fixed size array/justify the size */
1063 unsigned num
; /* number of array entries */
1064 unsigned num_desc
; /* Number of descriptors (sum of array_size[]) */
1073 struct ir3_shader ir3_shader
;
1075 struct tu_descriptor_map texture_map
;
1076 struct tu_descriptor_map sampler_map
;
1077 struct tu_descriptor_map ubo_map
;
1078 struct tu_descriptor_map ssbo_map
;
1079 struct tu_descriptor_map image_map
;
1081 /* This may be true for vertex shaders. When true, variants[1] is the
1082 * binning variant and binning_binary is non-NULL.
1084 bool has_binning_pass
;
1087 void *binning_binary
;
1089 struct ir3_shader_variant variants
[0];
1093 tu_shader_create(struct tu_device
*dev
,
1094 gl_shader_stage stage
,
1095 const VkPipelineShaderStageCreateInfo
*stage_info
,
1096 struct tu_pipeline_layout
*layout
,
1097 const VkAllocationCallbacks
*alloc
);
1100 tu_shader_destroy(struct tu_device
*dev
,
1101 struct tu_shader
*shader
,
1102 const VkAllocationCallbacks
*alloc
);
1105 tu_shader_compile_options_init(
1106 struct tu_shader_compile_options
*options
,
1107 const VkGraphicsPipelineCreateInfo
*pipeline_info
);
1110 tu_shader_compile(struct tu_device
*dev
,
1111 struct tu_shader
*shader
,
1112 const struct tu_shader
*next_stage
,
1113 const struct tu_shader_compile_options
*options
,
1114 const VkAllocationCallbacks
*alloc
);
1116 struct tu_program_descriptor_linkage
1118 struct ir3_ubo_analysis_state ubo_state
;
1119 struct ir3_const_state const_state
;
1123 struct tu_descriptor_map texture_map
;
1124 struct tu_descriptor_map sampler_map
;
1125 struct tu_descriptor_map ubo_map
;
1126 struct tu_descriptor_map ssbo_map
;
1127 struct tu_descriptor_map image_map
;
1134 struct tu_dynamic_state dynamic_state
;
1136 struct tu_pipeline_layout
*layout
;
1138 bool need_indirect_descriptor_sets
;
1139 VkShaderStageFlags active_stages
;
1143 struct tu_bo binary_bo
;
1144 struct tu_cs_entry state_ib
;
1145 struct tu_cs_entry binning_state_ib
;
1147 struct tu_program_descriptor_linkage link
[MESA_SHADER_STAGES
];
1152 uint8_t bindings
[MAX_VERTEX_ATTRIBS
];
1153 uint16_t strides
[MAX_VERTEX_ATTRIBS
];
1154 uint16_t offsets
[MAX_VERTEX_ATTRIBS
];
1157 uint8_t binning_bindings
[MAX_VERTEX_ATTRIBS
];
1158 uint16_t binning_strides
[MAX_VERTEX_ATTRIBS
];
1159 uint16_t binning_offsets
[MAX_VERTEX_ATTRIBS
];
1160 uint32_t binning_count
;
1162 struct tu_cs_entry state_ib
;
1163 struct tu_cs_entry binning_state_ib
;
1168 enum pc_di_primtype primtype
;
1169 bool primitive_restart
;
1174 struct tu_cs_entry state_ib
;
1179 uint32_t gras_su_cntl
;
1180 struct tu_cs_entry state_ib
;
1185 struct tu_cs_entry state_ib
;
1190 struct tu_cs_entry state_ib
;
1195 uint32_t local_size
[3];
1200 tu6_emit_viewport(struct tu_cs
*cs
, const VkViewport
*viewport
);
1203 tu6_emit_scissor(struct tu_cs
*cs
, const VkRect2D
*scissor
);
1206 tu6_emit_gras_su_cntl(struct tu_cs
*cs
,
1207 uint32_t gras_su_cntl
,
1211 tu6_emit_depth_bias(struct tu_cs
*cs
,
1212 float constant_factor
,
1214 float slope_factor
);
1217 tu6_emit_stencil_compare_mask(struct tu_cs
*cs
,
1222 tu6_emit_stencil_write_mask(struct tu_cs
*cs
, uint32_t front
, uint32_t back
);
1225 tu6_emit_stencil_reference(struct tu_cs
*cs
, uint32_t front
, uint32_t back
);
1228 tu6_emit_blend_constants(struct tu_cs
*cs
, const float constants
[4]);
1230 struct tu_userdata_info
*
1231 tu_lookup_user_sgpr(struct tu_pipeline
*pipeline
,
1232 gl_shader_stage stage
,
1235 struct tu_shader_variant
*
1236 tu_get_shader(struct tu_pipeline
*pipeline
, gl_shader_stage stage
);
1238 struct tu_graphics_pipeline_create_info
1241 bool db_depth_clear
;
1242 bool db_stencil_clear
;
1243 bool db_depth_disable_expclear
;
1244 bool db_stencil_disable_expclear
;
1245 bool db_flush_depth_inplace
;
1246 bool db_flush_stencil_inplace
;
1247 bool db_resummarize
;
1248 uint32_t custom_blend_mode
;
1251 struct tu_native_format
1253 int vtx
; /* VFMTn_xxx or -1 */
1254 int tex
; /* TFMTn_xxx or -1 */
1255 int rb
; /* RBn_xxx or -1 */
1256 int swap
; /* enum a3xx_color_swap */
1257 bool present
; /* internal only; always true to external users */
1260 const struct tu_native_format
*
1261 tu6_get_native_format(VkFormat format
);
1264 tu_pack_clear_value(const VkClearValue
*val
,
1269 tu_2d_clear_color(const VkClearColorValue
*val
, VkFormat format
, uint32_t buf
[4]);
1272 tu_2d_clear_zs(const VkClearDepthStencilValue
*val
, VkFormat format
, uint32_t buf
[4]);
1274 enum a6xx_2d_ifmt
tu6_fmt_to_ifmt(enum a6xx_format fmt
);
1275 enum a6xx_depth_format
tu6_pipe2depth(VkFormat format
);
1277 struct tu_image_level
1279 VkDeviceSize offset
;
1287 /* The original VkFormat provided by the client. This may not match any
1288 * of the actual surface formats.
1291 VkImageAspectFlags aspects
;
1292 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
1293 VkImageTiling tiling
; /** VkImageCreateInfo::tiling */
1294 VkImageCreateFlags flags
; /** VkImageCreateInfo::flags */
1296 uint32_t level_count
;
1297 uint32_t layer_count
;
1298 VkSampleCountFlagBits samples
;
1303 struct fdl_layout layout
;
1305 unsigned queue_family_mask
;
1309 /* For VK_ANDROID_native_buffer, the WSI image owns the memory, */
1310 VkDeviceMemory owned_memory
;
1312 /* Set when bound */
1314 VkDeviceSize bo_offset
;
1318 tu_image_queue_family_mask(const struct tu_image
*image
,
1320 uint32_t queue_family
);
1322 static inline uint32_t
1323 tu_get_layerCount(const struct tu_image
*image
,
1324 const VkImageSubresourceRange
*range
)
1326 return range
->layerCount
== VK_REMAINING_ARRAY_LAYERS
1327 ? image
->layer_count
- range
->baseArrayLayer
1328 : range
->layerCount
;
1331 static inline uint32_t
1332 tu_get_levelCount(const struct tu_image
*image
,
1333 const VkImageSubresourceRange
*range
)
1335 return range
->levelCount
== VK_REMAINING_MIP_LEVELS
1336 ? image
->level_count
- range
->baseMipLevel
1337 : range
->levelCount
;
1340 static inline VkDeviceSize
1341 tu_layer_size(struct tu_image
*image
, int level
)
1343 return fdl_layer_stride(&image
->layout
, level
);
1346 static inline uint32_t
1347 tu_image_stride(struct tu_image
*image
, int level
)
1349 return image
->layout
.slices
[level
].pitch
* image
->layout
.cpp
;
1352 static inline uint64_t
1353 tu_image_base(struct tu_image
*image
, int level
, int layer
)
1355 return image
->bo
->iova
+ image
->bo_offset
+
1356 fdl_surface_offset(&image
->layout
, level
, layer
);
1359 #define tu_image_base_ref(image, level, layer) \
1361 .bo_offset = (image->bo_offset + fdl_surface_offset(&image->layout, \
1364 #define tu_image_view_base_ref(iview) \
1365 tu_image_base_ref(iview->image, iview->base_mip, iview->base_layer)
1367 static inline VkDeviceSize
1368 tu_image_ubwc_size(struct tu_image
*image
, int level
)
1370 return image
->layout
.ubwc_layer_size
;
1373 static inline uint32_t
1374 tu_image_ubwc_pitch(struct tu_image
*image
, int level
)
1376 return image
->layout
.ubwc_slices
[level
].pitch
;
1379 static inline uint64_t
1380 tu_image_ubwc_surface_offset(struct tu_image
*image
, int level
, int layer
)
1382 return image
->layout
.ubwc_slices
[level
].offset
+
1383 layer
* tu_image_ubwc_size(image
, level
);
1386 static inline uint64_t
1387 tu_image_ubwc_base(struct tu_image
*image
, int level
, int layer
)
1389 return image
->bo
->iova
+ image
->bo_offset
+
1390 tu_image_ubwc_surface_offset(image
, level
, layer
);
1393 #define tu_image_ubwc_base_ref(image, level, layer) \
1395 .bo_offset = (image->bo_offset + tu_image_ubwc_surface_offset(image, \
1398 #define tu_image_view_ubwc_base_ref(iview) \
1399 tu_image_ubwc_base_ref(iview->image, iview->base_mip, iview->base_layer)
1402 tu6_get_image_tile_mode(struct tu_image
*image
, int level
);
1403 enum a3xx_msaa_samples
1404 tu_msaa_samples(uint32_t samples
);
1406 struct tu_image_view
1408 struct tu_image
*image
; /**< VkImageViewCreateInfo::image */
1410 VkImageViewType type
;
1411 VkImageAspectFlags aspect_mask
;
1413 uint32_t base_layer
;
1414 uint32_t layer_count
;
1416 uint32_t level_count
;
1417 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1419 uint32_t descriptor
[A6XX_TEX_CONST_DWORDS
];
1421 /* Descriptor for use as a storage image as opposed to a sampled image.
1422 * This has a few differences for cube maps (e.g. type).
1424 uint32_t storage_descriptor
[A6XX_TEX_CONST_DWORDS
];
1429 uint32_t state
[A6XX_TEX_SAMP_DWORDS
];
1432 VkBorderColor border
;
1436 tu_image_create(VkDevice _device
,
1437 const VkImageCreateInfo
*pCreateInfo
,
1438 const VkAllocationCallbacks
*alloc
,
1443 tu_image_from_gralloc(VkDevice device_h
,
1444 const VkImageCreateInfo
*base_info
,
1445 const VkNativeBufferANDROID
*gralloc_info
,
1446 const VkAllocationCallbacks
*alloc
,
1447 VkImage
*out_image_h
);
1450 tu_image_view_init(struct tu_image_view
*view
,
1451 struct tu_device
*device
,
1452 const VkImageViewCreateInfo
*pCreateInfo
);
1454 struct tu_buffer_view
1456 uint32_t descriptor
[A6XX_TEX_CONST_DWORDS
];
1458 struct tu_buffer
*buffer
;
1461 tu_buffer_view_init(struct tu_buffer_view
*view
,
1462 struct tu_device
*device
,
1463 const VkBufferViewCreateInfo
*pCreateInfo
);
1465 static inline struct VkExtent3D
1466 tu_sanitize_image_extent(const VkImageType imageType
,
1467 const struct VkExtent3D imageExtent
)
1469 switch (imageType
) {
1470 case VK_IMAGE_TYPE_1D
:
1471 return (VkExtent3D
) { imageExtent
.width
, 1, 1 };
1472 case VK_IMAGE_TYPE_2D
:
1473 return (VkExtent3D
) { imageExtent
.width
, imageExtent
.height
, 1 };
1474 case VK_IMAGE_TYPE_3D
:
1477 unreachable("invalid image type");
1481 static inline struct VkOffset3D
1482 tu_sanitize_image_offset(const VkImageType imageType
,
1483 const struct VkOffset3D imageOffset
)
1485 switch (imageType
) {
1486 case VK_IMAGE_TYPE_1D
:
1487 return (VkOffset3D
) { imageOffset
.x
, 0, 0 };
1488 case VK_IMAGE_TYPE_2D
:
1489 return (VkOffset3D
) { imageOffset
.x
, imageOffset
.y
, 0 };
1490 case VK_IMAGE_TYPE_3D
:
1493 unreachable("invalid image type");
1497 struct tu_attachment_info
1499 struct tu_image_view
*attachment
;
1502 struct tu_framebuffer
1508 uint32_t attachment_count
;
1509 struct tu_attachment_info attachments
[0];
1512 struct tu_subpass_attachment
1514 uint32_t attachment
;
1519 uint32_t input_count
;
1520 uint32_t color_count
;
1521 struct tu_subpass_attachment
*input_attachments
;
1522 struct tu_subpass_attachment
*color_attachments
;
1523 struct tu_subpass_attachment
*resolve_attachments
;
1524 struct tu_subpass_attachment depth_stencil_attachment
;
1526 VkSampleCountFlagBits samples
;
1529 struct tu_render_pass_attachment
1533 VkAttachmentLoadOp load_op
;
1534 VkAttachmentLoadOp stencil_load_op
;
1535 VkAttachmentStoreOp store_op
;
1536 VkAttachmentStoreOp stencil_store_op
;
1537 int32_t gmem_offset
;
1540 struct tu_render_pass
1542 uint32_t attachment_count
;
1543 uint32_t subpass_count
;
1544 uint32_t gmem_pixels
;
1545 struct tu_subpass_attachment
*subpass_attachments
;
1546 struct tu_render_pass_attachment
*attachments
;
1547 struct tu_subpass subpasses
[0];
1551 tu_device_init_meta(struct tu_device
*device
);
1553 tu_device_finish_meta(struct tu_device
*device
);
1555 struct tu_query_pool
1560 uint32_t pipeline_statistics
;
1567 uint32_t temp_syncobj
;
1571 tu_set_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1572 VkPipelineBindPoint bind_point
,
1573 struct tu_descriptor_set
*set
,
1577 tu_update_descriptor_sets(struct tu_device
*device
,
1578 struct tu_cmd_buffer
*cmd_buffer
,
1579 VkDescriptorSet overrideSet
,
1580 uint32_t descriptorWriteCount
,
1581 const VkWriteDescriptorSet
*pDescriptorWrites
,
1582 uint32_t descriptorCopyCount
,
1583 const VkCopyDescriptorSet
*pDescriptorCopies
);
1586 tu_update_descriptor_set_with_template(
1587 struct tu_device
*device
,
1588 struct tu_cmd_buffer
*cmd_buffer
,
1589 struct tu_descriptor_set
*set
,
1590 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1594 tu_meta_push_descriptor_set(struct tu_cmd_buffer
*cmd_buffer
,
1595 VkPipelineBindPoint pipelineBindPoint
,
1596 VkPipelineLayout _layout
,
1598 uint32_t descriptorWriteCount
,
1599 const VkWriteDescriptorSet
*pDescriptorWrites
);
1602 tu_drm_get_gpu_id(const struct tu_physical_device
*dev
, uint32_t *id
);
1605 tu_drm_get_gmem_size(const struct tu_physical_device
*dev
, uint32_t *size
);
1608 tu_drm_submitqueue_new(const struct tu_device
*dev
,
1610 uint32_t *queue_id
);
1613 tu_drm_submitqueue_close(const struct tu_device
*dev
, uint32_t queue_id
);
1616 tu_gem_new(const struct tu_device
*dev
, uint64_t size
, uint32_t flags
);
1618 tu_gem_import_dmabuf(const struct tu_device
*dev
,
1622 tu_gem_export_dmabuf(const struct tu_device
*dev
, uint32_t gem_handle
);
1624 tu_gem_close(const struct tu_device
*dev
, uint32_t gem_handle
);
1626 tu_gem_info_offset(const struct tu_device
*dev
, uint32_t gem_handle
);
1628 tu_gem_info_iova(const struct tu_device
*dev
, uint32_t gem_handle
);
1630 #define TU_DEFINE_HANDLE_CASTS(__tu_type, __VkType) \
1632 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1634 return (struct __tu_type *) _handle; \
1637 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1639 return (__VkType) _obj; \
1642 #define TU_DEFINE_NONDISP_HANDLE_CASTS(__tu_type, __VkType) \
1644 static inline struct __tu_type *__tu_type##_from_handle(__VkType _handle) \
1646 return (struct __tu_type *) (uintptr_t) _handle; \
1649 static inline __VkType __tu_type##_to_handle(struct __tu_type *_obj) \
1651 return (__VkType)(uintptr_t) _obj; \
1654 #define TU_FROM_HANDLE(__tu_type, __name, __handle) \
1655 struct __tu_type *__name = __tu_type##_from_handle(__handle)
1657 TU_DEFINE_HANDLE_CASTS(tu_cmd_buffer
, VkCommandBuffer
)
1658 TU_DEFINE_HANDLE_CASTS(tu_device
, VkDevice
)
1659 TU_DEFINE_HANDLE_CASTS(tu_instance
, VkInstance
)
1660 TU_DEFINE_HANDLE_CASTS(tu_physical_device
, VkPhysicalDevice
)
1661 TU_DEFINE_HANDLE_CASTS(tu_queue
, VkQueue
)
1663 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_cmd_pool
, VkCommandPool
)
1664 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer
, VkBuffer
)
1665 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_buffer_view
, VkBufferView
)
1666 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_pool
, VkDescriptorPool
)
1667 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set
, VkDescriptorSet
)
1668 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_set_layout
,
1669 VkDescriptorSetLayout
)
1670 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_descriptor_update_template
,
1671 VkDescriptorUpdateTemplate
)
1672 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_device_memory
, VkDeviceMemory
)
1673 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_fence
, VkFence
)
1674 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_event
, VkEvent
)
1675 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_framebuffer
, VkFramebuffer
)
1676 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image
, VkImage
)
1677 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_image_view
, VkImageView
);
1678 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_cache
, VkPipelineCache
)
1679 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline
, VkPipeline
)
1680 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_pipeline_layout
, VkPipelineLayout
)
1681 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_query_pool
, VkQueryPool
)
1682 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_render_pass
, VkRenderPass
)
1683 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_sampler
, VkSampler
)
1684 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_shader_module
, VkShaderModule
)
1685 TU_DEFINE_NONDISP_HANDLE_CASTS(tu_semaphore
, VkSemaphore
)
1687 #endif /* TU_PRIVATE_H */