2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
38 #define __gen_validate_value(x) VALGRIND_CHECK_MEM_IS_DEFINED(&(x), sizeof(x))
43 #include "brw_device_info.h"
44 #include "util/macros.h"
45 #include "util/list.h"
47 /* Pre-declarations needed for WSI entrypoints */
50 typedef struct xcb_connection_t xcb_connection_t
;
51 typedef uint32_t xcb_visualid_t
;
52 typedef uint32_t xcb_window_t
;
54 #define VK_USE_PLATFORM_XCB_KHR
55 #define VK_USE_PLATFORM_WAYLAND_KHR
58 #include <vulkan/vulkan.h>
59 #include <vulkan/vulkan_intel.h>
61 #include "anv_entrypoints.h"
62 #include "anv_gen_macros.h"
63 #include "brw_context.h"
70 #define ICD_LOADER_MAGIC 0x01CDC0DE
72 typedef union _VK_LOADER_DATA
{
73 uintptr_t loaderMagic
;
77 #define anv_noreturn __attribute__((__noreturn__))
78 #define anv_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
80 #define MIN(a, b) ((a) < (b) ? (a) : (b))
81 #define MAX(a, b) ((a) > (b) ? (a) : (b))
83 static inline uint32_t
84 align_u32(uint32_t v
, uint32_t a
)
86 return (v
+ a
- 1) & ~(a
- 1);
90 align_i32(int32_t v
, int32_t a
)
92 return (v
+ a
- 1) & ~(a
- 1);
95 /** Alignment must be a power of 2. */
97 anv_is_aligned(uintmax_t n
, uintmax_t a
)
99 assert(a
== (a
& -a
));
100 return (n
& (a
- 1)) == 0;
103 static inline uint32_t
104 anv_minify(uint32_t n
, uint32_t levels
)
106 if (unlikely(n
== 0))
109 return MAX(n
>> levels
, 1);
113 anv_clamp_f(float f
, float min
, float max
)
126 anv_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
128 if (*inout_mask
& clear_mask
) {
129 *inout_mask
&= ~clear_mask
;
136 #define for_each_bit(b, dword) \
137 for (uint32_t __dword = (dword); \
138 (b) = __builtin_ffs(__dword) - 1, __dword; \
139 __dword &= ~(1 << (b)))
141 #define typed_memcpy(dest, src, count) ({ \
142 static_assert(sizeof(*src) == sizeof(*dest), ""); \
143 memcpy((dest), (src), (count) * sizeof(*(src))); \
146 /* Define no kernel as 1, since that's an illegal offset for a kernel */
150 VkStructureType sType
;
154 /* Whenever we generate an error, pass it through this function. Useful for
155 * debugging, where we can break on it. Only call at error site, not when
156 * propagating errors. Might be useful to plug in a stack trace here.
159 VkResult
__vk_errorf(VkResult error
, const char *file
, int line
, const char *format
, ...);
162 #define vk_error(error) __vk_errorf(error, __FILE__, __LINE__, NULL);
163 #define vk_errorf(error, format, ...) __vk_errorf(error, __FILE__, __LINE__, format, ## __VA_ARGS__);
165 #define vk_error(error) error
166 #define vk_errorf(error, format, ...) error
169 void __anv_finishme(const char *file
, int line
, const char *format
, ...)
170 anv_printflike(3, 4);
171 void anv_loge(const char *format
, ...) anv_printflike(1, 2);
172 void anv_loge_v(const char *format
, va_list va
);
175 * Print a FINISHME message, including its source location.
177 #define anv_finishme(format, ...) \
178 __anv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
180 /* A non-fatal assert. Useful for debugging. */
182 #define anv_assert(x) ({ \
183 if (unlikely(!(x))) \
184 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
187 #define anv_assert(x)
191 * If a block of code is annotated with anv_validate, then the block runs only
195 #define anv_validate if (1)
197 #define anv_validate if (0)
200 void anv_abortf(const char *format
, ...) anv_noreturn
anv_printflike(1, 2);
201 void anv_abortfv(const char *format
, va_list va
) anv_noreturn
;
203 #define stub_return(v) \
205 anv_finishme("stub %s", __func__); \
211 anv_finishme("stub %s", __func__); \
216 * A dynamically growable, circular buffer. Elements are added at head and
217 * removed from tail. head and tail are free-running uint32_t indices and we
218 * only compute the modulo with size when accessing the array. This way,
219 * number of bytes in the queue is always head - tail, even in case of
226 uint32_t element_size
;
231 int anv_vector_init(struct anv_vector
*queue
, uint32_t element_size
, uint32_t size
);
232 void *anv_vector_add(struct anv_vector
*queue
);
233 void *anv_vector_remove(struct anv_vector
*queue
);
236 anv_vector_length(struct anv_vector
*queue
)
238 return (queue
->head
- queue
->tail
) / queue
->element_size
;
242 anv_vector_head(struct anv_vector
*vector
)
244 assert(vector
->tail
< vector
->head
);
245 return (void *)((char *)vector
->data
+
246 ((vector
->head
- vector
->element_size
) &
247 (vector
->size
- 1)));
251 anv_vector_tail(struct anv_vector
*vector
)
253 return (void *)((char *)vector
->data
+ (vector
->tail
& (vector
->size
- 1)));
257 anv_vector_finish(struct anv_vector
*queue
)
262 #define anv_vector_foreach(elem, queue) \
263 static_assert(__builtin_types_compatible_p(__typeof__(queue), struct anv_vector *), ""); \
264 for (uint32_t __anv_vector_offset = (queue)->tail; \
265 elem = (queue)->data + (__anv_vector_offset & ((queue)->size - 1)), __anv_vector_offset < (queue)->head; \
266 __anv_vector_offset += (queue)->element_size)
271 /* Index into the current validation list. This is used by the
272 * validation list building alrogithm to track which buffers are already
273 * in the validation list so that we can ensure uniqueness.
277 /* Last known offset. This value is provided by the kernel when we
278 * execbuf and is used as the presumed offset for the next bunch of
287 /* Represents a lock-free linked list of "free" things. This is used by
288 * both the block pool and the state pools. Unfortunately, in order to
289 * solve the ABA problem, we can't use a single uint32_t head.
291 union anv_free_list
{
295 /* A simple count that is incremented every time the head changes. */
301 #define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })
303 struct anv_block_state
{
313 struct anv_block_pool
{
314 struct anv_device
*device
;
318 /* The offset from the start of the bo to the "center" of the block
319 * pool. Pointers to allocated blocks are given by
320 * bo.map + center_bo_offset + offsets.
322 uint32_t center_bo_offset
;
324 /* Current memory map of the block pool. This pointer may or may not
325 * point to the actual beginning of the block pool memory. If
326 * anv_block_pool_alloc_back has ever been called, then this pointer
327 * will point to the "center" position of the buffer and all offsets
328 * (negative or positive) given out by the block pool alloc functions
329 * will be valid relative to this pointer.
331 * In particular, map == bo.map + center_offset
337 * Array of mmaps and gem handles owned by the block pool, reclaimed when
338 * the block pool is destroyed.
340 struct anv_vector mmap_cleanups
;
344 union anv_free_list free_list
;
345 struct anv_block_state state
;
347 union anv_free_list back_free_list
;
348 struct anv_block_state back_state
;
351 /* Block pools are backed by a fixed-size 2GB memfd */
352 #define BLOCK_POOL_MEMFD_SIZE (1ull << 32)
354 /* The center of the block pool is also the middle of the memfd. This may
355 * change in the future if we decide differently for some reason.
357 #define BLOCK_POOL_MEMFD_CENTER (BLOCK_POOL_MEMFD_SIZE / 2)
359 static inline uint32_t
360 anv_block_pool_size(struct anv_block_pool
*pool
)
362 return pool
->state
.end
+ pool
->back_state
.end
;
371 struct anv_fixed_size_state_pool
{
373 union anv_free_list free_list
;
374 struct anv_block_state block
;
377 #define ANV_MIN_STATE_SIZE_LOG2 6
378 #define ANV_MAX_STATE_SIZE_LOG2 10
380 #define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2)
382 struct anv_state_pool
{
383 struct anv_block_pool
*block_pool
;
384 struct anv_fixed_size_state_pool buckets
[ANV_STATE_BUCKETS
];
387 struct anv_state_stream
{
388 struct anv_block_pool
*block_pool
;
390 uint32_t current_block
;
394 void anv_block_pool_init(struct anv_block_pool
*pool
,
395 struct anv_device
*device
, uint32_t block_size
);
396 void anv_block_pool_finish(struct anv_block_pool
*pool
);
397 int32_t anv_block_pool_alloc(struct anv_block_pool
*pool
);
398 int32_t anv_block_pool_alloc_back(struct anv_block_pool
*pool
);
399 void anv_block_pool_free(struct anv_block_pool
*pool
, int32_t offset
);
400 void anv_state_pool_init(struct anv_state_pool
*pool
,
401 struct anv_block_pool
*block_pool
);
402 void anv_state_pool_finish(struct anv_state_pool
*pool
);
403 struct anv_state
anv_state_pool_alloc(struct anv_state_pool
*pool
,
404 size_t state_size
, size_t alignment
);
405 void anv_state_pool_free(struct anv_state_pool
*pool
, struct anv_state state
);
406 void anv_state_stream_init(struct anv_state_stream
*stream
,
407 struct anv_block_pool
*block_pool
);
408 void anv_state_stream_finish(struct anv_state_stream
*stream
);
409 struct anv_state
anv_state_stream_alloc(struct anv_state_stream
*stream
,
410 uint32_t size
, uint32_t alignment
);
413 * Implements a pool of re-usable BOs. The interface is identical to that
414 * of block_pool except that each block is its own BO.
417 struct anv_device
*device
;
424 void anv_bo_pool_init(struct anv_bo_pool
*pool
,
425 struct anv_device
*device
, uint32_t block_size
);
426 void anv_bo_pool_finish(struct anv_bo_pool
*pool
);
427 VkResult
anv_bo_pool_alloc(struct anv_bo_pool
*pool
, struct anv_bo
*bo
);
428 void anv_bo_pool_free(struct anv_bo_pool
*pool
, const struct anv_bo
*bo
);
431 void *anv_resolve_entrypoint(uint32_t index
);
433 extern struct anv_dispatch_table dtable
;
435 #define ANV_CALL(func) ({ \
436 if (dtable.func == NULL) { \
437 size_t idx = offsetof(struct anv_dispatch_table, func) / sizeof(void *); \
438 dtable.entrypoints[idx] = anv_resolve_entrypoint(idx); \
444 anv_alloc(const VkAllocationCallbacks
*alloc
,
445 size_t size
, size_t align
,
446 VkSystemAllocationScope scope
)
448 return alloc
->pfnAllocation(alloc
->pUserData
, size
, align
, scope
);
452 anv_realloc(const VkAllocationCallbacks
*alloc
,
453 void *ptr
, size_t size
, size_t align
,
454 VkSystemAllocationScope scope
)
456 return alloc
->pfnReallocation(alloc
->pUserData
, ptr
, size
, align
, scope
);
460 anv_free(const VkAllocationCallbacks
*alloc
, void *data
)
462 alloc
->pfnFree(alloc
->pUserData
, data
);
466 anv_alloc2(const VkAllocationCallbacks
*parent_alloc
,
467 const VkAllocationCallbacks
*alloc
,
468 size_t size
, size_t align
,
469 VkSystemAllocationScope scope
)
472 return anv_alloc(alloc
, size
, align
, scope
);
474 return anv_alloc(parent_alloc
, size
, align
, scope
);
478 anv_free2(const VkAllocationCallbacks
*parent_alloc
,
479 const VkAllocationCallbacks
*alloc
,
483 anv_free(alloc
, data
);
485 anv_free(parent_alloc
, data
);
488 struct anv_physical_device
{
489 VK_LOADER_DATA _loader_data
;
491 struct anv_instance
* instance
;
495 const struct brw_device_info
* info
;
496 uint64_t aperture_size
;
497 struct brw_compiler
* compiler
;
498 struct isl_device isl_dev
;
501 struct anv_instance
{
502 VK_LOADER_DATA _loader_data
;
504 VkAllocationCallbacks alloc
;
507 int physicalDeviceCount
;
508 struct anv_physical_device physicalDevice
;
513 VkResult
anv_init_wsi(struct anv_instance
*instance
);
514 void anv_finish_wsi(struct anv_instance
*instance
);
516 struct anv_meta_state
{
518 struct anv_pipeline
*color_pipeline
;
519 struct anv_pipeline
*depth_only_pipeline
;
520 struct anv_pipeline
*stencil_only_pipeline
;
521 struct anv_pipeline
*depthstencil_pipeline
;
525 VkRenderPass render_pass
;
527 /** Pipeline that blits from a 2D image. */
528 VkPipeline pipeline_2d_src
;
530 /** Pipeline that blits from a 3D image. */
531 VkPipeline pipeline_3d_src
;
533 VkPipelineLayout pipeline_layout
;
534 VkDescriptorSetLayout ds_layout
;
539 VK_LOADER_DATA _loader_data
;
541 struct anv_device
* device
;
543 struct anv_state_pool
* pool
;
547 VK_LOADER_DATA _loader_data
;
549 VkAllocationCallbacks alloc
;
551 struct anv_instance
* instance
;
553 struct brw_device_info info
;
554 struct isl_device isl_dev
;
558 struct anv_bo_pool batch_bo_pool
;
560 struct anv_block_pool dynamic_state_block_pool
;
561 struct anv_state_pool dynamic_state_pool
;
563 struct anv_block_pool instruction_block_pool
;
564 struct anv_block_pool surface_state_block_pool
;
565 struct anv_state_pool surface_state_pool
;
567 struct anv_bo workaround_bo
;
569 struct anv_meta_state meta_state
;
571 struct anv_state border_colors
;
573 struct anv_queue queue
;
575 struct anv_block_pool scratch_block_pool
;
577 pthread_mutex_t mutex
;
580 void* anv_gem_mmap(struct anv_device
*device
,
581 uint32_t gem_handle
, uint64_t offset
, uint64_t size
);
582 void anv_gem_munmap(void *p
, uint64_t size
);
583 uint32_t anv_gem_create(struct anv_device
*device
, size_t size
);
584 void anv_gem_close(struct anv_device
*device
, int gem_handle
);
585 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
586 int anv_gem_wait(struct anv_device
*device
, int gem_handle
, int64_t *timeout_ns
);
587 int anv_gem_execbuffer(struct anv_device
*device
,
588 struct drm_i915_gem_execbuffer2
*execbuf
);
589 int anv_gem_set_tiling(struct anv_device
*device
, int gem_handle
,
590 uint32_t stride
, uint32_t tiling
);
591 int anv_gem_create_context(struct anv_device
*device
);
592 int anv_gem_destroy_context(struct anv_device
*device
, int context
);
593 int anv_gem_get_param(int fd
, uint32_t param
);
594 int anv_gem_get_aperture(int fd
, uint64_t *size
);
595 int anv_gem_handle_to_fd(struct anv_device
*device
, int gem_handle
);
596 int anv_gem_fd_to_handle(struct anv_device
*device
, int fd
);
597 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
599 VkResult
anv_bo_init_new(struct anv_bo
*bo
, struct anv_device
*device
, uint64_t size
);
601 struct anv_reloc_list
{
604 struct drm_i915_gem_relocation_entry
* relocs
;
605 struct anv_bo
** reloc_bos
;
608 VkResult
anv_reloc_list_init(struct anv_reloc_list
*list
,
609 const VkAllocationCallbacks
*alloc
);
610 void anv_reloc_list_finish(struct anv_reloc_list
*list
,
611 const VkAllocationCallbacks
*alloc
);
613 uint64_t anv_reloc_list_add(struct anv_reloc_list
*list
,
614 const VkAllocationCallbacks
*alloc
,
615 uint32_t offset
, struct anv_bo
*target_bo
,
618 struct anv_batch_bo
{
619 /* Link in the anv_cmd_buffer.owned_batch_bos list */
620 struct list_head link
;
624 /* Bytes actually consumed in this batch BO */
627 /* Last seen surface state block pool bo offset */
628 uint32_t last_ss_pool_bo_offset
;
630 struct anv_reloc_list relocs
;
634 const VkAllocationCallbacks
* alloc
;
640 struct anv_reloc_list
* relocs
;
642 /* This callback is called (with the associated user data) in the event
643 * that the batch runs out of space.
645 VkResult (*extend_cb
)(struct anv_batch
*, void *);
649 void *anv_batch_emit_dwords(struct anv_batch
*batch
, int num_dwords
);
650 void anv_batch_emit_batch(struct anv_batch
*batch
, struct anv_batch
*other
);
651 uint64_t anv_batch_emit_reloc(struct anv_batch
*batch
,
652 void *location
, struct anv_bo
*bo
, uint32_t offset
);
659 #define __gen_address_type struct anv_address
660 #define __gen_user_data struct anv_batch
662 static inline uint64_t
663 __gen_combine_address(struct anv_batch
*batch
, void *location
,
664 const struct anv_address address
, uint32_t delta
)
666 if (address
.bo
== NULL
) {
667 return address
.offset
+ delta
;
669 assert(batch
->start
<= location
&& location
< batch
->end
);
671 return anv_batch_emit_reloc(batch
, location
, address
.bo
, address
.offset
+ delta
);
675 /* Wrapper macros needed to work around preprocessor argument issues. In
676 * particular, arguments don't get pre-evaluated if they are concatenated.
677 * This means that, if you pass GENX(3DSTATE_PS) into the emit macro, the
678 * GENX macro won't get evaluated if the emit macro contains "cmd ## foo".
679 * We can work around this easily enough with these helpers.
681 #define __anv_cmd_length(cmd) cmd ## _length
682 #define __anv_cmd_length_bias(cmd) cmd ## _length_bias
683 #define __anv_cmd_header(cmd) cmd ## _header
684 #define __anv_cmd_pack(cmd) cmd ## _pack
686 #define anv_batch_emit(batch, cmd, ...) do { \
687 void *__dst = anv_batch_emit_dwords(batch, __anv_cmd_length(cmd)); \
688 struct cmd __template = { \
689 __anv_cmd_header(cmd), \
692 __anv_cmd_pack(cmd)(batch, __dst, &__template); \
693 VG(VALGRIND_CHECK_MEM_IS_DEFINED(__dst, __anv_cmd_length(cmd) * 4)); \
696 #define anv_batch_emitn(batch, n, cmd, ...) ({ \
697 void *__dst = anv_batch_emit_dwords(batch, n); \
698 struct cmd __template = { \
699 __anv_cmd_header(cmd), \
700 .DwordLength = n - __anv_cmd_length_bias(cmd), \
703 __anv_cmd_pack(cmd)(batch, __dst, &__template); \
707 #define anv_batch_emit_merge(batch, dwords0, dwords1) \
711 static_assert(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1), "mismatch merge"); \
712 dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0)); \
713 for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++) \
714 dw[i] = (dwords0)[i] | (dwords1)[i]; \
715 VG(VALGRIND_CHECK_MEM_IS_DEFINED(dw, ARRAY_SIZE(dwords0) * 4));\
718 #define GEN7_MOCS (struct GEN7_MEMORY_OBJECT_CONTROL_STATE) { \
719 .GraphicsDataTypeGFDT = 0, \
720 .LLCCacheabilityControlLLCCC = 0, \
721 .L3CacheabilityControlL3CC = 1, \
724 #define GEN75_MOCS (struct GEN75_MEMORY_OBJECT_CONTROL_STATE) { \
725 .LLCeLLCCacheabilityControlLLCCC = 0, \
726 .L3CacheabilityControlL3CC = 1, \
729 #define GEN8_MOCS { \
730 .MemoryTypeLLCeLLCCacheabilityControl = WB, \
731 .TargetCache = L3DefertoPATforLLCeLLCselection, \
735 /* Skylake: MOCS is now an index into an array of 62 different caching
736 * configurations programmed by the kernel.
739 #define GEN9_MOCS { \
740 /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */ \
741 .IndextoMOCSTables = 2 \
744 #define GEN9_MOCS_PTE { \
745 /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */ \
746 .IndextoMOCSTables = 1 \
749 struct anv_device_memory
{
751 VkDeviceSize map_size
;
756 * Header for Vertex URB Entry (VUE)
758 struct anv_vue_header
{
760 uint32_t RTAIndex
; /* RenderTargetArrayIndex */
761 uint32_t ViewportIndex
;
765 struct anv_descriptor_set_binding_layout
{
766 /* Number of array elements in this binding */
769 /* Index into the flattend descriptor set */
770 uint16_t descriptor_index
;
772 /* Index into the dynamic state array for a dynamic buffer */
773 int16_t dynamic_offset_index
;
776 /* Index into the binding table for the associated surface */
777 int16_t surface_index
;
779 /* Index into the sampler table for the associated sampler */
780 int16_t sampler_index
;
781 } stage
[MESA_SHADER_STAGES
];
783 /* Immutable samplers (or NULL if no immutable samplers) */
784 struct anv_sampler
**immutable_samplers
;
787 struct anv_descriptor_set_layout
{
788 /* Number of bindings in this descriptor set */
789 uint16_t binding_count
;
791 /* Total size of the descriptor set with room for all array entries */
794 /* Shader stages affected by this descriptor set */
795 uint16_t shader_stages
;
797 /* Number of dynamic offsets used by this descriptor set */
798 uint16_t dynamic_offset_count
;
800 /* Bindings in this descriptor set */
801 struct anv_descriptor_set_binding_layout binding
[0];
804 struct anv_descriptor
{
805 VkDescriptorType type
;
810 struct anv_image_view
*image_view
;
812 struct anv_sampler
*sampler
;
816 struct anv_buffer
*buffer
;
823 struct anv_descriptor_set
{
824 const struct anv_descriptor_set_layout
*layout
;
825 struct anv_descriptor descriptors
[0];
829 anv_descriptor_set_create(struct anv_device
*device
,
830 const struct anv_descriptor_set_layout
*layout
,
831 struct anv_descriptor_set
**out_set
);
834 anv_descriptor_set_destroy(struct anv_device
*device
,
835 struct anv_descriptor_set
*set
);
840 #define MAX_VIEWPORTS 16
841 #define MAX_SCISSORS 16
842 #define MAX_PUSH_CONSTANTS_SIZE 128
843 #define MAX_DYNAMIC_BUFFERS 16
846 struct anv_pipeline_binding
{
847 /* The descriptor set this surface corresponds to */
850 /* Offset into the descriptor set */
854 struct anv_pipeline_layout
{
856 struct anv_descriptor_set_layout
*layout
;
857 uint32_t dynamic_offset_start
;
859 uint32_t surface_start
;
860 uint32_t sampler_start
;
861 } stage
[MESA_SHADER_STAGES
];
867 bool has_dynamic_offsets
;
868 uint32_t surface_count
;
869 struct anv_pipeline_binding
*surface_to_descriptor
;
870 uint32_t sampler_count
;
871 struct anv_pipeline_binding
*sampler_to_descriptor
;
872 } stage
[MESA_SHADER_STAGES
];
874 struct anv_pipeline_binding entries
[0];
878 struct anv_device
* device
;
886 enum anv_cmd_dirty_bits
{
887 ANV_CMD_DIRTY_DYNAMIC_VIEWPORT
= 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */
888 ANV_CMD_DIRTY_DYNAMIC_SCISSOR
= 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */
889 ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH
= 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */
890 ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS
= 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */
891 ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS
= 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */
892 ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS
= 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */
893 ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */
894 ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */
895 ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE
= 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */
896 ANV_CMD_DIRTY_DYNAMIC_ALL
= (1 << 9) - 1,
897 ANV_CMD_DIRTY_PIPELINE
= 1 << 9,
898 ANV_CMD_DIRTY_INDEX_BUFFER
= 1 << 10,
899 ANV_CMD_DIRTY_RENDER_TARGETS
= 1 << 11,
901 typedef uint32_t anv_cmd_dirty_mask_t
;
903 struct anv_vertex_binding
{
904 struct anv_buffer
* buffer
;
908 struct anv_push_constants
{
909 /* Current allocated size of this push constants data structure.
910 * Because a decent chunk of it may not be used (images on SKL, for
911 * instance), we won't actually allocate the entire structure up-front.
915 /* Push constant data provided by the client through vkPushConstants */
916 uint8_t client_data
[MAX_PUSH_CONSTANTS_SIZE
];
918 /* Our hardware only provides zero-based vertex and instance id so, in
919 * order to satisfy the vulkan requirements, we may have to push one or
920 * both of these into the shader.
922 uint32_t base_vertex
;
923 uint32_t base_instance
;
925 /* Offsets and ranges for dynamically bound buffers */
929 } dynamic
[MAX_DYNAMIC_BUFFERS
];
931 /* Image data for image_load_store on pre-SKL */
932 struct brw_image_param images
[MAX_IMAGES
];
935 struct anv_dynamic_state
{
938 VkViewport viewports
[MAX_VIEWPORTS
];
943 VkRect2D scissors
[MAX_SCISSORS
];
954 float blend_constants
[4];
964 } stencil_compare_mask
;
969 } stencil_write_mask
;
977 extern const struct anv_dynamic_state default_dynamic_state
;
979 void anv_dynamic_state_copy(struct anv_dynamic_state
*dest
,
980 const struct anv_dynamic_state
*src
,
983 /** State required while building cmd buffer */
984 struct anv_cmd_state
{
985 uint32_t current_pipeline
;
987 anv_cmd_dirty_mask_t dirty
;
988 anv_cmd_dirty_mask_t compute_dirty
;
989 VkShaderStageFlags descriptors_dirty
;
990 VkShaderStageFlags push_constants_dirty
;
991 uint32_t scratch_size
;
992 struct anv_pipeline
* pipeline
;
993 struct anv_pipeline
* compute_pipeline
;
994 struct anv_framebuffer
* framebuffer
;
995 struct anv_render_pass
* pass
;
996 struct anv_subpass
* subpass
;
997 uint32_t restart_index
;
998 struct anv_vertex_binding vertex_bindings
[MAX_VBS
];
999 struct anv_descriptor_set
* descriptors
[MAX_SETS
];
1000 struct anv_push_constants
* push_constants
[MESA_SHADER_STAGES
];
1001 struct anv_dynamic_state dynamic
;
1004 struct anv_buffer
* index_buffer
;
1005 uint32_t index_type
; /**< 3DSTATE_INDEX_BUFFER.IndexFormat */
1006 uint32_t index_offset
;
1010 struct anv_cmd_pool
{
1011 VkAllocationCallbacks alloc
;
1012 struct list_head cmd_buffers
;
1015 #define ANV_CMD_BUFFER_BATCH_SIZE 8192
1017 enum anv_cmd_buffer_exec_mode
{
1018 ANV_CMD_BUFFER_EXEC_MODE_PRIMARY
,
1019 ANV_CMD_BUFFER_EXEC_MODE_EMIT
,
1020 ANV_CMD_BUFFER_EXEC_MODE_CHAIN
,
1021 ANV_CMD_BUFFER_EXEC_MODE_COPY_AND_CHAIN
,
1024 struct anv_cmd_buffer
{
1025 VK_LOADER_DATA _loader_data
;
1027 struct anv_device
* device
;
1029 struct anv_cmd_pool
* pool
;
1030 struct list_head pool_link
;
1032 struct anv_batch batch
;
1034 /* Fields required for the actual chain of anv_batch_bo's.
1036 * These fields are initialized by anv_cmd_buffer_init_batch_bo_chain().
1038 struct list_head batch_bos
;
1039 enum anv_cmd_buffer_exec_mode exec_mode
;
1041 /* A vector of anv_batch_bo pointers for every batch or surface buffer
1042 * referenced by this command buffer
1044 * initialized by anv_cmd_buffer_init_batch_bo_chain()
1046 struct anv_vector seen_bbos
;
1048 /* A vector of int32_t's for every block of binding tables.
1050 * initialized by anv_cmd_buffer_init_batch_bo_chain()
1052 struct anv_vector bt_blocks
;
1054 struct anv_reloc_list surface_relocs
;
1056 /* Information needed for execbuf
1058 * These fields are generated by anv_cmd_buffer_prepare_execbuf().
1061 struct drm_i915_gem_execbuffer2 execbuf
;
1063 struct drm_i915_gem_exec_object2
* objects
;
1065 struct anv_bo
** bos
;
1067 /* Allocated length of the 'objects' and 'bos' arrays */
1068 uint32_t array_length
;
1073 /* Serial for tracking buffer completion */
1076 /* Stream objects for storing temporary data */
1077 struct anv_state_stream surface_state_stream
;
1078 struct anv_state_stream dynamic_state_stream
;
1080 VkCommandBufferUsageFlags usage_flags
;
1081 VkCommandBufferLevel level
;
1083 struct anv_cmd_state state
;
1086 VkResult
anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1087 void anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1088 void anv_cmd_buffer_reset_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1089 void anv_cmd_buffer_end_batch_buffer(struct anv_cmd_buffer
*cmd_buffer
);
1090 void anv_cmd_buffer_add_secondary(struct anv_cmd_buffer
*primary
,
1091 struct anv_cmd_buffer
*secondary
);
1092 void anv_cmd_buffer_prepare_execbuf(struct anv_cmd_buffer
*cmd_buffer
);
1094 VkResult
anv_cmd_buffer_emit_binding_table(struct anv_cmd_buffer
*cmd_buffer
,
1095 unsigned stage
, struct anv_state
*bt_state
);
1096 VkResult
anv_cmd_buffer_emit_samplers(struct anv_cmd_buffer
*cmd_buffer
,
1097 unsigned stage
, struct anv_state
*state
);
1098 void gen7_cmd_buffer_flush_descriptor_sets(struct anv_cmd_buffer
*cmd_buffer
);
1100 struct anv_state
anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer
*cmd_buffer
,
1101 uint32_t *a
, uint32_t dwords
,
1102 uint32_t alignment
);
1103 struct anv_state
anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer
*cmd_buffer
,
1104 uint32_t *a
, uint32_t *b
,
1105 uint32_t dwords
, uint32_t alignment
);
1106 void anv_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1107 struct anv_subpass
*subpass
);
1110 anv_cmd_buffer_surface_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1112 anv_cmd_buffer_alloc_binding_table(struct anv_cmd_buffer
*cmd_buffer
,
1113 uint32_t entries
, uint32_t *state_offset
);
1115 anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer
*cmd_buffer
);
1117 anv_cmd_buffer_alloc_dynamic_state(struct anv_cmd_buffer
*cmd_buffer
,
1118 uint32_t size
, uint32_t alignment
);
1121 anv_cmd_buffer_new_binding_table_block(struct anv_cmd_buffer
*cmd_buffer
);
1123 void gen8_cmd_buffer_emit_viewport(struct anv_cmd_buffer
*cmd_buffer
);
1124 void gen7_cmd_buffer_emit_scissor(struct anv_cmd_buffer
*cmd_buffer
);
1126 void gen7_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1127 void gen75_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1128 void gen8_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1129 void gen9_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1131 void anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1133 void gen7_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1134 struct anv_subpass
*subpass
);
1136 void gen8_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1137 struct anv_subpass
*subpass
);
1138 void gen9_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1139 struct anv_subpass
*subpass
);
1141 void anv_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1142 struct anv_subpass
*subpass
);
1145 anv_cmd_buffer_push_constants(struct anv_cmd_buffer
*cmd_buffer
,
1146 gl_shader_stage stage
);
1148 void anv_cmd_buffer_clear_attachments(struct anv_cmd_buffer
*cmd_buffer
,
1149 struct anv_render_pass
*pass
,
1150 const VkClearValue
*clear_values
);
1151 const struct anv_image_view
*
1152 anv_cmd_buffer_get_depth_stencil_view(const struct anv_cmd_buffer
*cmd_buffer
);
1154 void anv_cmd_buffer_dump(struct anv_cmd_buffer
*cmd_buffer
);
1158 struct drm_i915_gem_execbuffer2 execbuf
;
1159 struct drm_i915_gem_exec_object2 exec2_objects
[1];
1165 struct anv_shader_module
{
1166 struct nir_shader
* nir
;
1172 static inline gl_shader_stage
1173 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
1175 assert(__builtin_popcount(vk_stage
) == 1);
1176 return ffs(vk_stage
) - 1;
1179 static inline VkShaderStageFlagBits
1180 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
1182 return (1 << mesa_stage
);
1185 #define anv_foreach_stage(stage, stage_bits) \
1186 for (gl_shader_stage stage, __tmp = (gl_shader_stage)(stage_bits);\
1187 stage = __builtin_ffs(__tmp) - 1, __tmp; \
1188 __tmp &= ~(1 << (stage)))
1190 struct anv_pipeline
{
1191 struct anv_device
* device
;
1192 struct anv_batch batch
;
1193 uint32_t batch_data
[512];
1194 struct anv_reloc_list batch_relocs
;
1195 uint32_t dynamic_state_mask
;
1196 struct anv_dynamic_state dynamic_state
;
1198 struct anv_pipeline_layout
* layout
;
1201 struct brw_vs_prog_data vs_prog_data
;
1202 struct brw_wm_prog_data wm_prog_data
;
1203 struct brw_gs_prog_data gs_prog_data
;
1204 struct brw_cs_prog_data cs_prog_data
;
1205 bool writes_point_size
;
1206 struct brw_stage_prog_data
* prog_data
[MESA_SHADER_STAGES
];
1207 uint32_t scratch_start
[MESA_SHADER_STAGES
];
1208 uint32_t total_scratch
;
1212 uint32_t nr_vs_entries
;
1215 uint32_t nr_gs_entries
;
1218 VkShaderStageFlags active_stages
;
1219 struct anv_state_stream program_stream
;
1220 struct anv_state blend_state
;
1227 uint32_t ps_grf_start0
;
1228 uint32_t ps_grf_start2
;
1230 uint32_t gs_vertex_count
;
1234 uint32_t binding_stride
[MAX_VBS
];
1235 bool instancing_enable
[MAX_VBS
];
1236 bool primitive_restart
;
1239 uint32_t cs_thread_width_max
;
1240 uint32_t cs_right_mask
;
1244 uint32_t depth_stencil_state
[3];
1250 uint32_t wm_depth_stencil
[3];
1254 uint32_t wm_depth_stencil
[4];
1258 struct anv_graphics_pipeline_create_info
{
1260 bool disable_viewport
;
1261 bool disable_scissor
;
1267 anv_pipeline_init(struct anv_pipeline
*pipeline
, struct anv_device
*device
,
1268 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1269 const struct anv_graphics_pipeline_create_info
*extra
,
1270 const VkAllocationCallbacks
*alloc
);
1273 anv_pipeline_compile_cs(struct anv_pipeline
*pipeline
,
1274 const VkComputePipelineCreateInfo
*info
,
1275 struct anv_shader_module
*module
,
1276 const char *entrypoint_name
);
1279 anv_graphics_pipeline_create(VkDevice device
,
1280 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1281 const struct anv_graphics_pipeline_create_info
*extra
,
1282 const VkAllocationCallbacks
*alloc
,
1283 VkPipeline
*pPipeline
);
1286 gen7_graphics_pipeline_create(VkDevice _device
,
1287 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1288 const struct anv_graphics_pipeline_create_info
*extra
,
1289 const VkAllocationCallbacks
*alloc
,
1290 VkPipeline
*pPipeline
);
1293 gen75_graphics_pipeline_create(VkDevice _device
,
1294 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1295 const struct anv_graphics_pipeline_create_info
*extra
,
1296 const VkAllocationCallbacks
*alloc
,
1297 VkPipeline
*pPipeline
);
1300 gen8_graphics_pipeline_create(VkDevice _device
,
1301 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1302 const struct anv_graphics_pipeline_create_info
*extra
,
1303 const VkAllocationCallbacks
*alloc
,
1304 VkPipeline
*pPipeline
);
1306 gen9_graphics_pipeline_create(VkDevice _device
,
1307 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1308 const struct anv_graphics_pipeline_create_info
*extra
,
1309 const VkAllocationCallbacks
*alloc
,
1310 VkPipeline
*pPipeline
);
1312 gen7_compute_pipeline_create(VkDevice _device
,
1313 const VkComputePipelineCreateInfo
*pCreateInfo
,
1314 const VkAllocationCallbacks
*alloc
,
1315 VkPipeline
*pPipeline
);
1317 gen75_compute_pipeline_create(VkDevice _device
,
1318 const VkComputePipelineCreateInfo
*pCreateInfo
,
1319 const VkAllocationCallbacks
*alloc
,
1320 VkPipeline
*pPipeline
);
1323 gen8_compute_pipeline_create(VkDevice _device
,
1324 const VkComputePipelineCreateInfo
*pCreateInfo
,
1325 const VkAllocationCallbacks
*alloc
,
1326 VkPipeline
*pPipeline
);
1328 gen9_compute_pipeline_create(VkDevice _device
,
1329 const VkComputePipelineCreateInfo
*pCreateInfo
,
1330 const VkAllocationCallbacks
*alloc
,
1331 VkPipeline
*pPipeline
);
1334 const VkFormat vk_format
;
1336 enum isl_format surface_format
; /**< RENDER_SURFACE_STATE.SurfaceFormat */
1337 const struct isl_format_layout
*isl_layout
;
1338 uint8_t num_channels
;
1339 uint16_t depth_format
; /**< 3DSTATE_DEPTH_BUFFER.SurfaceFormat */
1344 * Stencil formats are often a special case. To reduce the number of lookups
1345 * into the VkFormat-to-anv_format translation table when working with
1346 * stencil, here is the handle to the table's entry for VK_FORMAT_S8_UINT.
1348 extern const struct anv_format
*const anv_format_s8_uint
;
1350 const struct anv_format
*
1351 anv_format_for_vk_format(VkFormat format
);
1354 anv_format_is_color(const struct anv_format
*format
)
1356 return !format
->depth_format
&& !format
->has_stencil
;
1360 anv_format_is_depth_or_stencil(const struct anv_format
*format
)
1362 return format
->depth_format
|| format
->has_stencil
;
1365 struct anv_image_view_info
{
1366 uint8_t surface_type
; /**< RENDER_SURFACE_STATE.SurfaceType */
1367 bool is_array
:1; /**< RENDER_SURFACE_STATE.SurfaceArray */
1368 bool is_cube
:1; /**< RENDER_SURFACE_STATE.CubeFaceEnable* */
1371 struct anv_image_view_info
1372 anv_image_view_info_for_vk_image_view_type(VkImageViewType type
);
1375 * Subsurface of an anv_image.
1377 struct anv_surface
{
1378 struct isl_surf isl
;
1381 * Offset from VkImage's base address, as bound by vkBindImageMemory().
1388 const struct anv_format
*format
;
1391 uint32_t array_size
;
1392 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
1397 /* Set when bound */
1399 VkDeviceSize offset
;
1401 uint8_t surface_type
; /**< RENDER_SURFACE_STATE.SurfaceType */
1403 bool needs_nonrt_surface_state
:1;
1404 bool needs_color_rt_surface_state
:1;
1409 * For each foo, anv_image::foo_surface is valid if and only if
1410 * anv_image::format has a foo aspect.
1412 * The hardware requires that the depth buffer and stencil buffer be
1413 * separate surfaces. From Vulkan's perspective, though, depth and stencil
1414 * reside in the same VkImage. To satisfy both the hardware and Vulkan, we
1415 * allocate the depth and stencil buffers as separate surfaces in the same
1419 struct anv_surface color_surface
;
1422 struct anv_surface depth_surface
;
1423 struct anv_surface stencil_surface
;
1428 struct anv_image_view
{
1429 const struct anv_image
*image
; /**< VkImageViewCreateInfo::image */
1430 const struct anv_format
*format
; /**< VkImageViewCreateInfo::format */
1432 uint32_t offset
; /**< Offset into bo. */
1433 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1435 /** RENDER_SURFACE_STATE when using image as a color render target. */
1436 struct anv_state color_rt_surface_state
;
1438 /** RENDER_SURFACE_STATE when using image as a non render target. */
1439 struct anv_state nonrt_surface_state
;
1442 struct anv_image_create_info
{
1443 const VkImageCreateInfo
*vk_info
;
1445 enum isl_tiling tiling
;
1449 VkResult
anv_image_create(VkDevice _device
,
1450 const struct anv_image_create_info
*info
,
1451 const VkAllocationCallbacks
* alloc
,
1454 struct anv_surface
*
1455 anv_image_get_surface_for_aspect_mask(struct anv_image
*image
,
1456 VkImageAspectFlags aspect_mask
);
1458 void anv_image_view_init(struct anv_image_view
*view
,
1459 struct anv_device
*device
,
1460 const VkImageViewCreateInfo
* pCreateInfo
,
1461 struct anv_cmd_buffer
*cmd_buffer
);
1464 gen7_image_view_init(struct anv_image_view
*iview
,
1465 struct anv_device
*device
,
1466 const VkImageViewCreateInfo
* pCreateInfo
,
1467 struct anv_cmd_buffer
*cmd_buffer
);
1470 gen75_image_view_init(struct anv_image_view
*iview
,
1471 struct anv_device
*device
,
1472 const VkImageViewCreateInfo
* pCreateInfo
,
1473 struct anv_cmd_buffer
*cmd_buffer
);
1476 gen8_image_view_init(struct anv_image_view
*iview
,
1477 struct anv_device
*device
,
1478 const VkImageViewCreateInfo
* pCreateInfo
,
1479 struct anv_cmd_buffer
*cmd_buffer
);
1482 gen9_image_view_init(struct anv_image_view
*iview
,
1483 struct anv_device
*device
,
1484 const VkImageViewCreateInfo
* pCreateInfo
,
1485 struct anv_cmd_buffer
*cmd_buffer
);
1487 void anv_fill_buffer_surface_state(struct anv_device
*device
, void *state
,
1488 const struct anv_format
*format
,
1489 uint32_t offset
, uint32_t range
,
1492 void gen7_fill_buffer_surface_state(void *state
, const struct anv_format
*format
,
1493 uint32_t offset
, uint32_t range
,
1495 void gen75_fill_buffer_surface_state(void *state
, const struct anv_format
*format
,
1496 uint32_t offset
, uint32_t range
,
1498 void gen8_fill_buffer_surface_state(void *state
, const struct anv_format
*format
,
1499 uint32_t offset
, uint32_t range
,
1501 void gen9_fill_buffer_surface_state(void *state
, const struct anv_format
*format
,
1502 uint32_t offset
, uint32_t range
,
1505 struct anv_sampler
{
1509 struct anv_framebuffer
{
1514 uint32_t attachment_count
;
1515 const struct anv_image_view
* attachments
[0];
1518 struct anv_subpass
{
1519 uint32_t input_count
;
1520 uint32_t * input_attachments
;
1521 uint32_t color_count
;
1522 uint32_t * color_attachments
;
1523 uint32_t * resolve_attachments
;
1524 uint32_t depth_stencil_attachment
;
1527 struct anv_render_pass_attachment
{
1528 const struct anv_format
*format
;
1530 VkAttachmentLoadOp load_op
;
1531 VkAttachmentLoadOp stencil_load_op
;
1534 struct anv_render_pass
{
1535 uint32_t attachment_count
;
1536 uint32_t subpass_count
;
1537 struct anv_render_pass_attachment
* attachments
;
1538 struct anv_subpass subpasses
[0];
1541 extern struct anv_render_pass anv_meta_dummy_renderpass
;
1543 struct anv_query_pool_slot
{
1549 struct anv_query_pool
{
1555 void anv_device_init_meta(struct anv_device
*device
);
1556 void anv_device_finish_meta(struct anv_device
*device
);
1558 void *anv_lookup_entrypoint(const char *name
);
1560 void anv_dump_image_to_ppm(struct anv_device
*device
,
1561 struct anv_image
*image
, unsigned miplevel
,
1562 unsigned array_layer
, const char *filename
);
1564 #define ANV_DEFINE_HANDLE_CASTS(__anv_type, __VkType) \
1566 static inline struct __anv_type * \
1567 __anv_type ## _from_handle(__VkType _handle) \
1569 return (struct __anv_type *) _handle; \
1572 static inline __VkType \
1573 __anv_type ## _to_handle(struct __anv_type *_obj) \
1575 return (__VkType) _obj; \
1578 #define ANV_DEFINE_NONDISP_HANDLE_CASTS(__anv_type, __VkType) \
1580 static inline struct __anv_type * \
1581 __anv_type ## _from_handle(__VkType _handle) \
1583 return (struct __anv_type *)(uintptr_t) _handle; \
1586 static inline __VkType \
1587 __anv_type ## _to_handle(struct __anv_type *_obj) \
1589 return (__VkType)(uintptr_t) _obj; \
1592 #define ANV_FROM_HANDLE(__anv_type, __name, __handle) \
1593 struct __anv_type *__name = __anv_type ## _from_handle(__handle)
1595 ANV_DEFINE_HANDLE_CASTS(anv_cmd_buffer
, VkCommandBuffer
)
1596 ANV_DEFINE_HANDLE_CASTS(anv_device
, VkDevice
)
1597 ANV_DEFINE_HANDLE_CASTS(anv_instance
, VkInstance
)
1598 ANV_DEFINE_HANDLE_CASTS(anv_physical_device
, VkPhysicalDevice
)
1599 ANV_DEFINE_HANDLE_CASTS(anv_queue
, VkQueue
)
1601 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_cmd_pool
, VkCommandPool
)
1602 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer
, VkBuffer
)
1603 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set
, VkDescriptorSet
)
1604 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set_layout
, VkDescriptorSetLayout
)
1605 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_device_memory
, VkDeviceMemory
)
1606 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_fence
, VkFence
)
1607 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_framebuffer
, VkFramebuffer
)
1608 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image
, VkImage
)
1609 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image_view
, VkImageView
);
1610 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline
, VkPipeline
)
1611 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline_layout
, VkPipelineLayout
)
1612 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_query_pool
, VkQueryPool
)
1613 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_render_pass
, VkRenderPass
)
1614 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_sampler
, VkSampler
)
1615 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_shader_module
, VkShaderModule
)
1617 #define ANV_DEFINE_STRUCT_CASTS(__anv_type, __VkType) \
1619 static inline const __VkType * \
1620 __anv_type ## _to_ ## __VkType(const struct __anv_type *__anv_obj) \
1622 return (const __VkType *) __anv_obj; \
1625 #define ANV_COMMON_TO_STRUCT(__VkType, __vk_name, __common_name) \
1626 const __VkType *__vk_name = anv_common_to_ ## __VkType(__common_name)
1628 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkMemoryBarrier
)
1629 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkBufferMemoryBarrier
)
1630 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkImageMemoryBarrier
)