2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
38 #define __gen_validate_value(x) VALGRIND_CHECK_MEM_IS_DEFINED(&(x), sizeof(x))
43 #include "brw_device_info.h"
44 #include "util/macros.h"
45 #include "util/list.h"
47 /* Pre-declarations needed for WSI entrypoints */
50 typedef struct xcb_connection_t xcb_connection_t
;
51 typedef uint32_t xcb_visualid_t
;
52 typedef uint32_t xcb_window_t
;
54 #define VK_USE_PLATFORM_XCB_KHR
55 #define VK_USE_PLATFORM_WAYLAND_KHR
58 #include <vulkan/vulkan.h>
59 #include <vulkan/vulkan_intel.h>
60 #include <vulkan/vk_icd.h>
62 #include "anv_entrypoints.h"
63 #include "anv_gen_macros.h"
64 #include "brw_context.h"
74 #define MAX_VIEWPORTS 16
75 #define MAX_SCISSORS 16
76 #define MAX_PUSH_CONSTANTS_SIZE 128
77 #define MAX_DYNAMIC_BUFFERS 16
79 #define MAX_SAMPLES_LOG2 4 /* SKL supports 16 samples */
81 #define anv_noreturn __attribute__((__noreturn__))
82 #define anv_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
84 #define MIN(a, b) ((a) < (b) ? (a) : (b))
85 #define MAX(a, b) ((a) > (b) ? (a) : (b))
87 static inline uint32_t
88 align_u32(uint32_t v
, uint32_t a
)
90 assert(a
!= 0 && a
== (a
& -a
));
91 return (v
+ a
- 1) & ~(a
- 1);
94 static inline uint64_t
95 align_u64(uint64_t v
, uint64_t a
)
97 assert(a
!= 0 && a
== (a
& -a
));
98 return (v
+ a
- 1) & ~(a
- 1);
101 static inline int32_t
102 align_i32(int32_t v
, int32_t a
)
104 assert(a
!= 0 && a
== (a
& -a
));
105 return (v
+ a
- 1) & ~(a
- 1);
108 /** Alignment must be a power of 2. */
110 anv_is_aligned(uintmax_t n
, uintmax_t a
)
112 assert(a
== (a
& -a
));
113 return (n
& (a
- 1)) == 0;
116 static inline uint32_t
117 anv_minify(uint32_t n
, uint32_t levels
)
119 if (unlikely(n
== 0))
122 return MAX(n
>> levels
, 1);
126 anv_clamp_f(float f
, float min
, float max
)
139 anv_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
141 if (*inout_mask
& clear_mask
) {
142 *inout_mask
&= ~clear_mask
;
149 #define for_each_bit(b, dword) \
150 for (uint32_t __dword = (dword); \
151 (b) = __builtin_ffs(__dword) - 1, __dword; \
152 __dword &= ~(1 << (b)))
154 #define typed_memcpy(dest, src, count) ({ \
155 static_assert(sizeof(*src) == sizeof(*dest), ""); \
156 memcpy((dest), (src), (count) * sizeof(*(src))); \
159 #define zero(x) (memset(&(x), 0, sizeof(x)))
161 /* Define no kernel as 1, since that's an illegal offset for a kernel */
165 VkStructureType sType
;
169 /* Whenever we generate an error, pass it through this function. Useful for
170 * debugging, where we can break on it. Only call at error site, not when
171 * propagating errors. Might be useful to plug in a stack trace here.
174 VkResult
__vk_errorf(VkResult error
, const char *file
, int line
, const char *format
, ...);
177 #define vk_error(error) __vk_errorf(error, __FILE__, __LINE__, NULL);
178 #define vk_errorf(error, format, ...) __vk_errorf(error, __FILE__, __LINE__, format, ## __VA_ARGS__);
180 #define vk_error(error) error
181 #define vk_errorf(error, format, ...) error
184 void __anv_finishme(const char *file
, int line
, const char *format
, ...)
185 anv_printflike(3, 4);
186 void anv_loge(const char *format
, ...) anv_printflike(1, 2);
187 void anv_loge_v(const char *format
, va_list va
);
190 * Print a FINISHME message, including its source location.
192 #define anv_finishme(format, ...) \
193 __anv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
195 /* A non-fatal assert. Useful for debugging. */
197 #define anv_assert(x) ({ \
198 if (unlikely(!(x))) \
199 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
202 #define anv_assert(x)
206 * If a block of code is annotated with anv_validate, then the block runs only
210 #define anv_validate if (1)
212 #define anv_validate if (0)
215 void anv_abortf(const char *format
, ...) anv_noreturn
anv_printflike(1, 2);
216 void anv_abortfv(const char *format
, va_list va
) anv_noreturn
;
218 #define stub_return(v) \
220 anv_finishme("stub %s", __func__); \
226 anv_finishme("stub %s", __func__); \
231 * A dynamically growable, circular buffer. Elements are added at head and
232 * removed from tail. head and tail are free-running uint32_t indices and we
233 * only compute the modulo with size when accessing the array. This way,
234 * number of bytes in the queue is always head - tail, even in case of
241 uint32_t element_size
;
246 int anv_vector_init(struct anv_vector
*queue
, uint32_t element_size
, uint32_t size
);
247 void *anv_vector_add(struct anv_vector
*queue
);
248 void *anv_vector_remove(struct anv_vector
*queue
);
251 anv_vector_length(struct anv_vector
*queue
)
253 return (queue
->head
- queue
->tail
) / queue
->element_size
;
257 anv_vector_head(struct anv_vector
*vector
)
259 assert(vector
->tail
< vector
->head
);
260 return (void *)((char *)vector
->data
+
261 ((vector
->head
- vector
->element_size
) &
262 (vector
->size
- 1)));
266 anv_vector_tail(struct anv_vector
*vector
)
268 return (void *)((char *)vector
->data
+ (vector
->tail
& (vector
->size
- 1)));
272 anv_vector_finish(struct anv_vector
*queue
)
277 #define anv_vector_foreach(elem, queue) \
278 static_assert(__builtin_types_compatible_p(__typeof__(queue), struct anv_vector *), ""); \
279 for (uint32_t __anv_vector_offset = (queue)->tail; \
280 elem = (queue)->data + (__anv_vector_offset & ((queue)->size - 1)), __anv_vector_offset < (queue)->head; \
281 __anv_vector_offset += (queue)->element_size)
286 /* Index into the current validation list. This is used by the
287 * validation list building alrogithm to track which buffers are already
288 * in the validation list so that we can ensure uniqueness.
292 /* Last known offset. This value is provided by the kernel when we
293 * execbuf and is used as the presumed offset for the next bunch of
302 /* Represents a lock-free linked list of "free" things. This is used by
303 * both the block pool and the state pools. Unfortunately, in order to
304 * solve the ABA problem, we can't use a single uint32_t head.
306 union anv_free_list
{
310 /* A simple count that is incremented every time the head changes. */
316 #define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })
318 struct anv_block_state
{
328 struct anv_block_pool
{
329 struct anv_device
*device
;
333 /* The offset from the start of the bo to the "center" of the block
334 * pool. Pointers to allocated blocks are given by
335 * bo.map + center_bo_offset + offsets.
337 uint32_t center_bo_offset
;
339 /* Current memory map of the block pool. This pointer may or may not
340 * point to the actual beginning of the block pool memory. If
341 * anv_block_pool_alloc_back has ever been called, then this pointer
342 * will point to the "center" position of the buffer and all offsets
343 * (negative or positive) given out by the block pool alloc functions
344 * will be valid relative to this pointer.
346 * In particular, map == bo.map + center_offset
352 * Array of mmaps and gem handles owned by the block pool, reclaimed when
353 * the block pool is destroyed.
355 struct anv_vector mmap_cleanups
;
359 union anv_free_list free_list
;
360 struct anv_block_state state
;
362 union anv_free_list back_free_list
;
363 struct anv_block_state back_state
;
366 /* Block pools are backed by a fixed-size 2GB memfd */
367 #define BLOCK_POOL_MEMFD_SIZE (1ull << 32)
369 /* The center of the block pool is also the middle of the memfd. This may
370 * change in the future if we decide differently for some reason.
372 #define BLOCK_POOL_MEMFD_CENTER (BLOCK_POOL_MEMFD_SIZE / 2)
374 static inline uint32_t
375 anv_block_pool_size(struct anv_block_pool
*pool
)
377 return pool
->state
.end
+ pool
->back_state
.end
;
386 struct anv_fixed_size_state_pool
{
388 union anv_free_list free_list
;
389 struct anv_block_state block
;
392 #define ANV_MIN_STATE_SIZE_LOG2 6
393 #define ANV_MAX_STATE_SIZE_LOG2 10
395 #define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2)
397 struct anv_state_pool
{
398 struct anv_block_pool
*block_pool
;
399 struct anv_fixed_size_state_pool buckets
[ANV_STATE_BUCKETS
];
402 struct anv_state_stream_block
;
404 struct anv_state_stream
{
405 struct anv_block_pool
*block_pool
;
407 /* The current working block */
408 struct anv_state_stream_block
*block
;
410 /* Offset at which the current block starts */
412 /* Offset at which to allocate the next state */
414 /* Offset at which the current block ends */
418 #define CACHELINE_SIZE 64
419 #define CACHELINE_MASK 63
422 anv_clflush_range(void *start
, size_t size
)
424 void *p
= (void *) (((uintptr_t) start
) & ~CACHELINE_MASK
);
425 void *end
= start
+ size
;
427 __builtin_ia32_mfence();
429 __builtin_ia32_clflush(p
);
435 anv_state_clflush(struct anv_state state
)
437 anv_clflush_range(state
.map
, state
.alloc_size
);
440 void anv_block_pool_init(struct anv_block_pool
*pool
,
441 struct anv_device
*device
, uint32_t block_size
);
442 void anv_block_pool_finish(struct anv_block_pool
*pool
);
443 int32_t anv_block_pool_alloc(struct anv_block_pool
*pool
);
444 int32_t anv_block_pool_alloc_back(struct anv_block_pool
*pool
);
445 void anv_block_pool_free(struct anv_block_pool
*pool
, int32_t offset
);
446 void anv_state_pool_init(struct anv_state_pool
*pool
,
447 struct anv_block_pool
*block_pool
);
448 void anv_state_pool_finish(struct anv_state_pool
*pool
);
449 struct anv_state
anv_state_pool_alloc(struct anv_state_pool
*pool
,
450 size_t state_size
, size_t alignment
);
451 void anv_state_pool_free(struct anv_state_pool
*pool
, struct anv_state state
);
452 void anv_state_stream_init(struct anv_state_stream
*stream
,
453 struct anv_block_pool
*block_pool
);
454 void anv_state_stream_finish(struct anv_state_stream
*stream
);
455 struct anv_state
anv_state_stream_alloc(struct anv_state_stream
*stream
,
456 uint32_t size
, uint32_t alignment
);
459 * Implements a pool of re-usable BOs. The interface is identical to that
460 * of block_pool except that each block is its own BO.
463 struct anv_device
*device
;
470 void anv_bo_pool_init(struct anv_bo_pool
*pool
,
471 struct anv_device
*device
, uint32_t block_size
);
472 void anv_bo_pool_finish(struct anv_bo_pool
*pool
);
473 VkResult
anv_bo_pool_alloc(struct anv_bo_pool
*pool
, struct anv_bo
*bo
);
474 void anv_bo_pool_free(struct anv_bo_pool
*pool
, const struct anv_bo
*bo
);
477 void *anv_resolve_entrypoint(uint32_t index
);
479 extern struct anv_dispatch_table dtable
;
481 #define ANV_CALL(func) ({ \
482 if (dtable.func == NULL) { \
483 size_t idx = offsetof(struct anv_dispatch_table, func) / sizeof(void *); \
484 dtable.entrypoints[idx] = anv_resolve_entrypoint(idx); \
490 anv_alloc(const VkAllocationCallbacks
*alloc
,
491 size_t size
, size_t align
,
492 VkSystemAllocationScope scope
)
494 return alloc
->pfnAllocation(alloc
->pUserData
, size
, align
, scope
);
498 anv_realloc(const VkAllocationCallbacks
*alloc
,
499 void *ptr
, size_t size
, size_t align
,
500 VkSystemAllocationScope scope
)
502 return alloc
->pfnReallocation(alloc
->pUserData
, ptr
, size
, align
, scope
);
506 anv_free(const VkAllocationCallbacks
*alloc
, void *data
)
508 alloc
->pfnFree(alloc
->pUserData
, data
);
512 anv_alloc2(const VkAllocationCallbacks
*parent_alloc
,
513 const VkAllocationCallbacks
*alloc
,
514 size_t size
, size_t align
,
515 VkSystemAllocationScope scope
)
518 return anv_alloc(alloc
, size
, align
, scope
);
520 return anv_alloc(parent_alloc
, size
, align
, scope
);
524 anv_free2(const VkAllocationCallbacks
*parent_alloc
,
525 const VkAllocationCallbacks
*alloc
,
529 anv_free(alloc
, data
);
531 anv_free(parent_alloc
, data
);
534 struct anv_physical_device
{
535 VK_LOADER_DATA _loader_data
;
537 struct anv_instance
* instance
;
541 const struct brw_device_info
* info
;
542 uint64_t aperture_size
;
543 struct brw_compiler
* compiler
;
544 struct isl_device isl_dev
;
547 struct anv_wsi_interaface
;
549 #define VK_ICD_WSI_PLATFORM_MAX 5
551 struct anv_instance
{
552 VK_LOADER_DATA _loader_data
;
554 VkAllocationCallbacks alloc
;
557 int physicalDeviceCount
;
558 struct anv_physical_device physicalDevice
;
560 struct anv_wsi_interface
* wsi
[VK_ICD_WSI_PLATFORM_MAX
];
563 VkResult
anv_init_wsi(struct anv_instance
*instance
);
564 void anv_finish_wsi(struct anv_instance
*instance
);
566 struct anv_meta_state
{
567 VkAllocationCallbacks alloc
;
570 * Use array element `i` for images with `2^i` samples.
574 * Pipeline N is used to clear color attachment N of the current
577 * HACK: We use one pipeline per color attachment to work around the
578 * compiler's inability to dynamically set the render target index of
579 * the render target write message.
581 struct anv_pipeline
*color_pipelines
[MAX_RTS
];
583 struct anv_pipeline
*depth_only_pipeline
;
584 struct anv_pipeline
*stencil_only_pipeline
;
585 struct anv_pipeline
*depthstencil_pipeline
;
586 } clear
[1 + MAX_SAMPLES_LOG2
];
589 VkRenderPass render_pass
;
591 /** Pipeline that blits from a 1D image. */
592 VkPipeline pipeline_1d_src
;
594 /** Pipeline that blits from a 2D image. */
595 VkPipeline pipeline_2d_src
;
597 /** Pipeline that blits from a 3D image. */
598 VkPipeline pipeline_3d_src
;
600 VkPipelineLayout pipeline_layout
;
601 VkDescriptorSetLayout ds_layout
;
605 /** Pipeline [i] resolves an image with 2^(i+1) samples. */
606 VkPipeline pipelines
[MAX_SAMPLES_LOG2
];
609 VkPipelineLayout pipeline_layout
;
610 VkDescriptorSetLayout ds_layout
;
615 VK_LOADER_DATA _loader_data
;
617 struct anv_device
* device
;
619 struct anv_state_pool
* pool
;
622 struct anv_pipeline_cache
{
623 struct anv_device
* device
;
624 struct anv_state_stream program_stream
;
625 pthread_mutex_t mutex
;
629 uint32_t kernel_count
;
633 void anv_pipeline_cache_init(struct anv_pipeline_cache
*cache
,
634 struct anv_device
*device
);
635 void anv_pipeline_cache_finish(struct anv_pipeline_cache
*cache
);
636 uint32_t anv_pipeline_cache_search(struct anv_pipeline_cache
*cache
,
637 const unsigned char *sha1
, void *prog_data
);
638 uint32_t anv_pipeline_cache_upload_kernel(struct anv_pipeline_cache
*cache
,
639 const unsigned char *sha1
,
642 const void *prog_data
,
643 size_t prog_data_size
);
646 VK_LOADER_DATA _loader_data
;
648 VkAllocationCallbacks alloc
;
650 struct anv_instance
* instance
;
652 struct brw_device_info info
;
653 struct isl_device isl_dev
;
657 struct anv_bo_pool batch_bo_pool
;
659 struct anv_block_pool dynamic_state_block_pool
;
660 struct anv_state_pool dynamic_state_pool
;
662 struct anv_block_pool instruction_block_pool
;
663 struct anv_pipeline_cache default_pipeline_cache
;
665 struct anv_block_pool surface_state_block_pool
;
666 struct anv_state_pool surface_state_pool
;
668 struct anv_bo workaround_bo
;
670 struct anv_meta_state meta_state
;
672 struct anv_state border_colors
;
674 struct anv_queue queue
;
676 struct anv_block_pool scratch_block_pool
;
678 pthread_mutex_t mutex
;
681 VkResult
gen7_init_device_state(struct anv_device
*device
);
682 VkResult
gen75_init_device_state(struct anv_device
*device
);
683 VkResult
gen8_init_device_state(struct anv_device
*device
);
684 VkResult
gen9_init_device_state(struct anv_device
*device
);
686 void anv_device_get_cache_uuid(void *uuid
);
689 void* anv_gem_mmap(struct anv_device
*device
,
690 uint32_t gem_handle
, uint64_t offset
, uint64_t size
, uint32_t flags
);
691 void anv_gem_munmap(void *p
, uint64_t size
);
692 uint32_t anv_gem_create(struct anv_device
*device
, size_t size
);
693 void anv_gem_close(struct anv_device
*device
, uint32_t gem_handle
);
694 uint32_t anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
695 int anv_gem_wait(struct anv_device
*device
, uint32_t gem_handle
, int64_t *timeout_ns
);
696 int anv_gem_execbuffer(struct anv_device
*device
,
697 struct drm_i915_gem_execbuffer2
*execbuf
);
698 int anv_gem_set_tiling(struct anv_device
*device
, uint32_t gem_handle
,
699 uint32_t stride
, uint32_t tiling
);
700 int anv_gem_create_context(struct anv_device
*device
);
701 int anv_gem_destroy_context(struct anv_device
*device
, int context
);
702 int anv_gem_get_param(int fd
, uint32_t param
);
703 bool anv_gem_get_bit6_swizzle(int fd
, uint32_t tiling
);
704 int anv_gem_get_aperture(int fd
, uint64_t *size
);
705 int anv_gem_handle_to_fd(struct anv_device
*device
, uint32_t gem_handle
);
706 uint32_t anv_gem_fd_to_handle(struct anv_device
*device
, int fd
);
707 int anv_gem_set_caching(struct anv_device
*device
, uint32_t gem_handle
, uint32_t caching
);
708 int anv_gem_set_domain(struct anv_device
*device
, uint32_t gem_handle
,
709 uint32_t read_domains
, uint32_t write_domain
);
711 VkResult
anv_bo_init_new(struct anv_bo
*bo
, struct anv_device
*device
, uint64_t size
);
713 struct anv_reloc_list
{
716 struct drm_i915_gem_relocation_entry
* relocs
;
717 struct anv_bo
** reloc_bos
;
720 VkResult
anv_reloc_list_init(struct anv_reloc_list
*list
,
721 const VkAllocationCallbacks
*alloc
);
722 void anv_reloc_list_finish(struct anv_reloc_list
*list
,
723 const VkAllocationCallbacks
*alloc
);
725 uint64_t anv_reloc_list_add(struct anv_reloc_list
*list
,
726 const VkAllocationCallbacks
*alloc
,
727 uint32_t offset
, struct anv_bo
*target_bo
,
730 struct anv_batch_bo
{
731 /* Link in the anv_cmd_buffer.owned_batch_bos list */
732 struct list_head link
;
736 /* Bytes actually consumed in this batch BO */
739 /* Last seen surface state block pool bo offset */
740 uint32_t last_ss_pool_bo_offset
;
742 struct anv_reloc_list relocs
;
746 const VkAllocationCallbacks
* alloc
;
752 struct anv_reloc_list
* relocs
;
754 /* This callback is called (with the associated user data) in the event
755 * that the batch runs out of space.
757 VkResult (*extend_cb
)(struct anv_batch
*, void *);
761 void *anv_batch_emit_dwords(struct anv_batch
*batch
, int num_dwords
);
762 void anv_batch_emit_batch(struct anv_batch
*batch
, struct anv_batch
*other
);
763 uint64_t anv_batch_emit_reloc(struct anv_batch
*batch
,
764 void *location
, struct anv_bo
*bo
, uint32_t offset
);
765 VkResult
anv_device_submit_simple_batch(struct anv_device
*device
,
766 struct anv_batch
*batch
);
773 #define __gen_address_type struct anv_address
774 #define __gen_user_data struct anv_batch
776 static inline uint64_t
777 __gen_combine_address(struct anv_batch
*batch
, void *location
,
778 const struct anv_address address
, uint32_t delta
)
780 if (address
.bo
== NULL
) {
781 return address
.offset
+ delta
;
783 assert(batch
->start
<= location
&& location
< batch
->end
);
785 return anv_batch_emit_reloc(batch
, location
, address
.bo
, address
.offset
+ delta
);
789 /* Wrapper macros needed to work around preprocessor argument issues. In
790 * particular, arguments don't get pre-evaluated if they are concatenated.
791 * This means that, if you pass GENX(3DSTATE_PS) into the emit macro, the
792 * GENX macro won't get evaluated if the emit macro contains "cmd ## foo".
793 * We can work around this easily enough with these helpers.
795 #define __anv_cmd_length(cmd) cmd ## _length
796 #define __anv_cmd_length_bias(cmd) cmd ## _length_bias
797 #define __anv_cmd_header(cmd) cmd ## _header
798 #define __anv_cmd_pack(cmd) cmd ## _pack
800 #define anv_batch_emit(batch, cmd, ...) do { \
801 void *__dst = anv_batch_emit_dwords(batch, __anv_cmd_length(cmd)); \
802 struct cmd __template = { \
803 __anv_cmd_header(cmd), \
806 __anv_cmd_pack(cmd)(batch, __dst, &__template); \
807 VG(VALGRIND_CHECK_MEM_IS_DEFINED(__dst, __anv_cmd_length(cmd) * 4)); \
810 #define anv_batch_emitn(batch, n, cmd, ...) ({ \
811 void *__dst = anv_batch_emit_dwords(batch, n); \
812 struct cmd __template = { \
813 __anv_cmd_header(cmd), \
814 .DWordLength = n - __anv_cmd_length_bias(cmd), \
817 __anv_cmd_pack(cmd)(batch, __dst, &__template); \
821 #define anv_batch_emit_merge(batch, dwords0, dwords1) \
825 static_assert(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1), "mismatch merge"); \
826 dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0)); \
827 for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++) \
828 dw[i] = (dwords0)[i] | (dwords1)[i]; \
829 VG(VALGRIND_CHECK_MEM_IS_DEFINED(dw, ARRAY_SIZE(dwords0) * 4));\
832 #define anv_state_pool_emit(pool, cmd, align, ...) ({ \
833 const uint32_t __size = __anv_cmd_length(cmd) * 4; \
834 struct anv_state __state = \
835 anv_state_pool_alloc((pool), __size, align); \
836 struct cmd __template = { \
839 __anv_cmd_pack(cmd)(NULL, __state.map, &__template); \
840 VG(VALGRIND_CHECK_MEM_IS_DEFINED(__state.map, __anv_cmd_length(cmd) * 4)); \
841 if (!(pool)->block_pool->device->info.has_llc) \
842 anv_state_clflush(__state); \
846 #define GEN7_MOCS (struct GEN7_MEMORY_OBJECT_CONTROL_STATE) { \
847 .GraphicsDataTypeGFDT = 0, \
848 .LLCCacheabilityControlLLCCC = 0, \
849 .L3CacheabilityControlL3CC = 1, \
852 #define GEN75_MOCS (struct GEN75_MEMORY_OBJECT_CONTROL_STATE) { \
853 .LLCeLLCCacheabilityControlLLCCC = 0, \
854 .L3CacheabilityControlL3CC = 1, \
857 #define GEN8_MOCS { \
858 .MemoryTypeLLCeLLCCacheabilityControl = WB, \
859 .TargetCache = L3DefertoPATforLLCeLLCselection, \
863 /* Skylake: MOCS is now an index into an array of 62 different caching
864 * configurations programmed by the kernel.
867 #define GEN9_MOCS { \
868 /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */ \
869 .IndextoMOCSTables = 2 \
872 #define GEN9_MOCS_PTE { \
873 /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */ \
874 .IndextoMOCSTables = 1 \
877 struct anv_device_memory
{
880 VkDeviceSize map_size
;
885 * Header for Vertex URB Entry (VUE)
887 struct anv_vue_header
{
889 uint32_t RTAIndex
; /* RenderTargetArrayIndex */
890 uint32_t ViewportIndex
;
894 struct anv_descriptor_set_binding_layout
{
895 /* Number of array elements in this binding */
898 /* Index into the flattend descriptor set */
899 uint16_t descriptor_index
;
901 /* Index into the dynamic state array for a dynamic buffer */
902 int16_t dynamic_offset_index
;
904 /* Index into the descriptor set buffer views */
905 int16_t buffer_index
;
908 /* Index into the binding table for the associated surface */
909 int16_t surface_index
;
911 /* Index into the sampler table for the associated sampler */
912 int16_t sampler_index
;
914 /* Index into the image table for the associated image */
916 } stage
[MESA_SHADER_STAGES
];
918 /* Immutable samplers (or NULL if no immutable samplers) */
919 struct anv_sampler
**immutable_samplers
;
922 struct anv_descriptor_set_layout
{
923 /* Number of bindings in this descriptor set */
924 uint16_t binding_count
;
926 /* Total size of the descriptor set with room for all array entries */
929 /* Shader stages affected by this descriptor set */
930 uint16_t shader_stages
;
932 /* Number of buffers in this descriptor set */
933 uint16_t buffer_count
;
935 /* Number of dynamic offsets used by this descriptor set */
936 uint16_t dynamic_offset_count
;
938 /* Bindings in this descriptor set */
939 struct anv_descriptor_set_binding_layout binding
[0];
942 struct anv_descriptor
{
943 VkDescriptorType type
;
947 struct anv_image_view
*image_view
;
948 struct anv_sampler
*sampler
;
951 struct anv_buffer_view
*buffer_view
;
955 struct anv_descriptor_set
{
956 const struct anv_descriptor_set_layout
*layout
;
957 uint32_t buffer_count
;
958 struct anv_buffer_view
*buffer_views
;
959 struct anv_descriptor descriptors
[0];
963 anv_descriptor_set_create(struct anv_device
*device
,
964 const struct anv_descriptor_set_layout
*layout
,
965 struct anv_descriptor_set
**out_set
);
968 anv_descriptor_set_destroy(struct anv_device
*device
,
969 struct anv_descriptor_set
*set
);
971 struct anv_pipeline_binding
{
972 /* The descriptor set this surface corresponds to */
975 /* Offset into the descriptor set */
979 struct anv_pipeline_layout
{
981 struct anv_descriptor_set_layout
*layout
;
982 uint32_t dynamic_offset_start
;
984 uint32_t surface_start
;
985 uint32_t sampler_start
;
986 uint32_t image_start
;
987 } stage
[MESA_SHADER_STAGES
];
993 bool has_dynamic_offsets
;
994 uint32_t surface_count
;
995 struct anv_pipeline_binding
*surface_to_descriptor
;
996 uint32_t sampler_count
;
997 struct anv_pipeline_binding
*sampler_to_descriptor
;
998 uint32_t image_count
;
999 } stage
[MESA_SHADER_STAGES
];
1001 struct anv_pipeline_binding entries
[0];
1005 struct anv_device
* device
;
1008 VkBufferUsageFlags usage
;
1010 /* Set when bound */
1012 VkDeviceSize offset
;
1015 enum anv_cmd_dirty_bits
{
1016 ANV_CMD_DIRTY_DYNAMIC_VIEWPORT
= 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */
1017 ANV_CMD_DIRTY_DYNAMIC_SCISSOR
= 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */
1018 ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH
= 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */
1019 ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS
= 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */
1020 ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS
= 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */
1021 ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS
= 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */
1022 ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK
= 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */
1023 ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK
= 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */
1024 ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE
= 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */
1025 ANV_CMD_DIRTY_DYNAMIC_ALL
= (1 << 9) - 1,
1026 ANV_CMD_DIRTY_PIPELINE
= 1 << 9,
1027 ANV_CMD_DIRTY_INDEX_BUFFER
= 1 << 10,
1028 ANV_CMD_DIRTY_RENDER_TARGETS
= 1 << 11,
1030 typedef uint32_t anv_cmd_dirty_mask_t
;
1032 struct anv_vertex_binding
{
1033 struct anv_buffer
* buffer
;
1034 VkDeviceSize offset
;
1037 struct anv_push_constants
{
1038 /* Current allocated size of this push constants data structure.
1039 * Because a decent chunk of it may not be used (images on SKL, for
1040 * instance), we won't actually allocate the entire structure up-front.
1044 /* Push constant data provided by the client through vkPushConstants */
1045 uint8_t client_data
[MAX_PUSH_CONSTANTS_SIZE
];
1047 /* Our hardware only provides zero-based vertex and instance id so, in
1048 * order to satisfy the vulkan requirements, we may have to push one or
1049 * both of these into the shader.
1051 uint32_t base_vertex
;
1052 uint32_t base_instance
;
1054 /* Offsets and ranges for dynamically bound buffers */
1058 } dynamic
[MAX_DYNAMIC_BUFFERS
];
1060 /* Image data for image_load_store on pre-SKL */
1061 struct brw_image_param images
[MAX_IMAGES
];
1064 struct anv_dynamic_state
{
1067 VkViewport viewports
[MAX_VIEWPORTS
];
1072 VkRect2D scissors
[MAX_SCISSORS
];
1083 float blend_constants
[4];
1093 } stencil_compare_mask
;
1098 } stencil_write_mask
;
1103 } stencil_reference
;
1106 extern const struct anv_dynamic_state default_dynamic_state
;
1108 void anv_dynamic_state_copy(struct anv_dynamic_state
*dest
,
1109 const struct anv_dynamic_state
*src
,
1110 uint32_t copy_mask
);
1113 * Attachment state when recording a renderpass instance.
1115 * The clear value is valid only if there exists a pending clear.
1117 struct anv_attachment_state
{
1118 VkImageAspectFlags pending_clear_aspects
;
1119 VkClearValue clear_value
;
1122 /** State required while building cmd buffer */
1123 struct anv_cmd_state
{
1124 /* PIPELINE_SELECT.PipelineSelection */
1125 uint32_t current_pipeline
;
1126 uint32_t current_l3_config
;
1128 anv_cmd_dirty_mask_t dirty
;
1129 anv_cmd_dirty_mask_t compute_dirty
;
1130 uint32_t num_workgroups_offset
;
1131 struct anv_bo
*num_workgroups_bo
;
1132 VkShaderStageFlags descriptors_dirty
;
1133 VkShaderStageFlags push_constants_dirty
;
1134 uint32_t scratch_size
;
1135 struct anv_pipeline
* pipeline
;
1136 struct anv_pipeline
* compute_pipeline
;
1137 struct anv_framebuffer
* framebuffer
;
1138 struct anv_render_pass
* pass
;
1139 struct anv_subpass
* subpass
;
1140 uint32_t restart_index
;
1141 struct anv_vertex_binding vertex_bindings
[MAX_VBS
];
1142 struct anv_descriptor_set
* descriptors
[MAX_SETS
];
1143 struct anv_push_constants
* push_constants
[MESA_SHADER_STAGES
];
1144 struct anv_state binding_tables
[MESA_SHADER_STAGES
];
1145 struct anv_state samplers
[MESA_SHADER_STAGES
];
1146 struct anv_dynamic_state dynamic
;
1150 * Array length is anv_cmd_state::pass::attachment_count. Array content is
1151 * valid only when recording a render pass instance.
1153 struct anv_attachment_state
* attachments
;
1156 struct anv_buffer
* index_buffer
;
1157 uint32_t index_type
; /**< 3DSTATE_INDEX_BUFFER.IndexFormat */
1158 uint32_t index_offset
;
1162 struct anv_cmd_pool
{
1163 VkAllocationCallbacks alloc
;
1164 struct list_head cmd_buffers
;
1167 #define ANV_CMD_BUFFER_BATCH_SIZE 8192
1169 enum anv_cmd_buffer_exec_mode
{
1170 ANV_CMD_BUFFER_EXEC_MODE_PRIMARY
,
1171 ANV_CMD_BUFFER_EXEC_MODE_EMIT
,
1172 ANV_CMD_BUFFER_EXEC_MODE_CHAIN
,
1173 ANV_CMD_BUFFER_EXEC_MODE_COPY_AND_CHAIN
,
1176 struct anv_cmd_buffer
{
1177 VK_LOADER_DATA _loader_data
;
1179 struct anv_device
* device
;
1181 struct anv_cmd_pool
* pool
;
1182 struct list_head pool_link
;
1184 struct anv_batch batch
;
1186 /* Fields required for the actual chain of anv_batch_bo's.
1188 * These fields are initialized by anv_cmd_buffer_init_batch_bo_chain().
1190 struct list_head batch_bos
;
1191 enum anv_cmd_buffer_exec_mode exec_mode
;
1193 /* A vector of anv_batch_bo pointers for every batch or surface buffer
1194 * referenced by this command buffer
1196 * initialized by anv_cmd_buffer_init_batch_bo_chain()
1198 struct anv_vector seen_bbos
;
1200 /* A vector of int32_t's for every block of binding tables.
1202 * initialized by anv_cmd_buffer_init_batch_bo_chain()
1204 struct anv_vector bt_blocks
;
1206 struct anv_reloc_list surface_relocs
;
1208 /* Information needed for execbuf
1210 * These fields are generated by anv_cmd_buffer_prepare_execbuf().
1213 struct drm_i915_gem_execbuffer2 execbuf
;
1215 struct drm_i915_gem_exec_object2
* objects
;
1217 struct anv_bo
** bos
;
1219 /* Allocated length of the 'objects' and 'bos' arrays */
1220 uint32_t array_length
;
1225 /* Serial for tracking buffer completion */
1228 /* Stream objects for storing temporary data */
1229 struct anv_state_stream surface_state_stream
;
1230 struct anv_state_stream dynamic_state_stream
;
1232 VkCommandBufferUsageFlags usage_flags
;
1233 VkCommandBufferLevel level
;
1235 struct anv_cmd_state state
;
1238 VkResult
anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1239 void anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1240 void anv_cmd_buffer_reset_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
1241 void anv_cmd_buffer_end_batch_buffer(struct anv_cmd_buffer
*cmd_buffer
);
1242 void anv_cmd_buffer_add_secondary(struct anv_cmd_buffer
*primary
,
1243 struct anv_cmd_buffer
*secondary
);
1244 void anv_cmd_buffer_prepare_execbuf(struct anv_cmd_buffer
*cmd_buffer
);
1246 VkResult
anv_cmd_buffer_emit_binding_table(struct anv_cmd_buffer
*cmd_buffer
,
1247 unsigned stage
, struct anv_state
*bt_state
);
1248 VkResult
anv_cmd_buffer_emit_samplers(struct anv_cmd_buffer
*cmd_buffer
,
1249 unsigned stage
, struct anv_state
*state
);
1250 uint32_t gen7_cmd_buffer_flush_descriptor_sets(struct anv_cmd_buffer
*cmd_buffer
);
1251 void gen7_cmd_buffer_emit_descriptor_pointers(struct anv_cmd_buffer
*cmd_buffer
,
1254 struct anv_state
anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer
*cmd_buffer
,
1255 const void *data
, uint32_t size
, uint32_t alignment
);
1256 struct anv_state
anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer
*cmd_buffer
,
1257 uint32_t *a
, uint32_t *b
,
1258 uint32_t dwords
, uint32_t alignment
);
1261 anv_cmd_buffer_surface_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1263 anv_cmd_buffer_alloc_binding_table(struct anv_cmd_buffer
*cmd_buffer
,
1264 uint32_t entries
, uint32_t *state_offset
);
1266 anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer
*cmd_buffer
);
1268 anv_cmd_buffer_alloc_dynamic_state(struct anv_cmd_buffer
*cmd_buffer
,
1269 uint32_t size
, uint32_t alignment
);
1272 anv_cmd_buffer_new_binding_table_block(struct anv_cmd_buffer
*cmd_buffer
);
1274 void gen8_cmd_buffer_emit_viewport(struct anv_cmd_buffer
*cmd_buffer
);
1275 void gen7_cmd_buffer_emit_scissor(struct anv_cmd_buffer
*cmd_buffer
);
1277 void gen7_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1278 void gen75_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1279 void gen8_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1280 void gen9_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1282 void anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
1284 void anv_cmd_state_setup_attachments(struct anv_cmd_buffer
*cmd_buffer
,
1285 const VkRenderPassBeginInfo
*info
);
1287 void gen7_cmd_buffer_set_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1288 struct anv_subpass
*subpass
);
1289 void gen75_cmd_buffer_set_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1290 struct anv_subpass
*subpass
);
1291 void gen8_cmd_buffer_set_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1292 struct anv_subpass
*subpass
);
1293 void gen9_cmd_buffer_set_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1294 struct anv_subpass
*subpass
);
1295 void anv_cmd_buffer_set_subpass(struct anv_cmd_buffer
*cmd_buffer
,
1296 struct anv_subpass
*subpass
);
1298 void gen7_flush_pipeline_select_3d(struct anv_cmd_buffer
*cmd_buffer
);
1299 void gen75_flush_pipeline_select_3d(struct anv_cmd_buffer
*cmd_buffer
);
1300 void gen8_flush_pipeline_select_3d(struct anv_cmd_buffer
*cmd_buffer
);
1301 void gen9_flush_pipeline_select_3d(struct anv_cmd_buffer
*cmd_buffer
);
1303 void gen7_cmd_buffer_flush_state(struct anv_cmd_buffer
*cmd_buffer
);
1304 void gen75_cmd_buffer_flush_state(struct anv_cmd_buffer
*cmd_buffer
);
1305 void gen8_cmd_buffer_flush_state(struct anv_cmd_buffer
*cmd_buffer
);
1306 void gen9_cmd_buffer_flush_state(struct anv_cmd_buffer
*cmd_buffer
);
1308 void gen7_cmd_buffer_flush_compute_state(struct anv_cmd_buffer
*cmd_buffer
);
1309 void gen75_cmd_buffer_flush_compute_state(struct anv_cmd_buffer
*cmd_buffer
);
1310 void gen8_cmd_buffer_flush_compute_state(struct anv_cmd_buffer
*cmd_buffer
);
1311 void gen9_cmd_buffer_flush_compute_state(struct anv_cmd_buffer
*cmd_buffer
);
1314 anv_cmd_buffer_push_constants(struct anv_cmd_buffer
*cmd_buffer
,
1315 gl_shader_stage stage
);
1317 anv_cmd_buffer_cs_push_constants(struct anv_cmd_buffer
*cmd_buffer
);
1319 void anv_cmd_buffer_clear_subpass(struct anv_cmd_buffer
*cmd_buffer
);
1320 void anv_cmd_buffer_resolve_subpass(struct anv_cmd_buffer
*cmd_buffer
);
1322 const struct anv_image_view
*
1323 anv_cmd_buffer_get_depth_stencil_view(const struct anv_cmd_buffer
*cmd_buffer
);
1325 void anv_cmd_buffer_dump(struct anv_cmd_buffer
*cmd_buffer
);
1329 struct drm_i915_gem_execbuffer2 execbuf
;
1330 struct drm_i915_gem_exec_object2 exec2_objects
[1];
1336 struct anv_state state
;
1341 struct anv_shader_module
{
1342 struct nir_shader
* nir
;
1344 unsigned char sha1
[20];
1349 void anv_hash_shader(unsigned char *hash
, const void *key
, size_t key_size
,
1350 struct anv_shader_module
*module
,
1351 const char *entrypoint
,
1352 const VkSpecializationInfo
*spec_info
);
1354 static inline gl_shader_stage
1355 vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage
)
1357 assert(__builtin_popcount(vk_stage
) == 1);
1358 return ffs(vk_stage
) - 1;
1361 static inline VkShaderStageFlagBits
1362 mesa_to_vk_shader_stage(gl_shader_stage mesa_stage
)
1364 return (1 << mesa_stage
);
1367 #define ANV_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
1369 #define anv_foreach_stage(stage, stage_bits) \
1370 for (gl_shader_stage stage, \
1371 __tmp = (gl_shader_stage)((stage_bits) & ANV_STAGE_MASK); \
1372 stage = __builtin_ffs(__tmp) - 1, __tmp; \
1373 __tmp &= ~(1 << (stage)))
1375 struct anv_pipeline
{
1376 struct anv_device
* device
;
1377 struct anv_batch batch
;
1378 uint32_t batch_data
[512];
1379 struct anv_reloc_list batch_relocs
;
1380 uint32_t dynamic_state_mask
;
1381 struct anv_dynamic_state dynamic_state
;
1383 struct anv_pipeline_layout
* layout
;
1386 struct brw_vs_prog_data vs_prog_data
;
1387 struct brw_wm_prog_data wm_prog_data
;
1388 struct brw_gs_prog_data gs_prog_data
;
1389 struct brw_cs_prog_data cs_prog_data
;
1390 bool writes_point_size
;
1391 struct brw_stage_prog_data
* prog_data
[MESA_SHADER_STAGES
];
1392 uint32_t scratch_start
[MESA_SHADER_STAGES
];
1393 uint32_t total_scratch
;
1397 uint32_t nr_vs_entries
;
1400 uint32_t nr_gs_entries
;
1403 VkShaderStageFlags active_stages
;
1404 struct anv_state blend_state
;
1411 uint32_t ps_grf_start0
;
1412 uint32_t ps_grf_start2
;
1417 uint32_t binding_stride
[MAX_VBS
];
1418 bool instancing_enable
[MAX_VBS
];
1419 bool primitive_restart
;
1422 uint32_t cs_thread_width_max
;
1423 uint32_t cs_right_mask
;
1427 uint32_t depth_stencil_state
[3];
1433 uint32_t wm_depth_stencil
[3];
1437 uint32_t wm_depth_stencil
[4];
1441 struct anv_graphics_pipeline_create_info
{
1443 * If non-negative, overrides the color attachment count of the pipeline's
1446 int8_t color_attachment_count
;
1449 bool disable_viewport
;
1450 bool disable_scissor
;
1456 anv_pipeline_init(struct anv_pipeline
*pipeline
, struct anv_device
*device
,
1457 struct anv_pipeline_cache
*cache
,
1458 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1459 const struct anv_graphics_pipeline_create_info
*extra
,
1460 const VkAllocationCallbacks
*alloc
);
1463 anv_pipeline_compile_cs(struct anv_pipeline
*pipeline
,
1464 struct anv_pipeline_cache
*cache
,
1465 const VkComputePipelineCreateInfo
*info
,
1466 struct anv_shader_module
*module
,
1467 const char *entrypoint
,
1468 const VkSpecializationInfo
*spec_info
);
1471 anv_graphics_pipeline_create(VkDevice device
,
1472 VkPipelineCache cache
,
1473 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1474 const struct anv_graphics_pipeline_create_info
*extra
,
1475 const VkAllocationCallbacks
*alloc
,
1476 VkPipeline
*pPipeline
);
1479 gen7_graphics_pipeline_create(VkDevice _device
,
1480 struct anv_pipeline_cache
*cache
,
1481 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1482 const struct anv_graphics_pipeline_create_info
*extra
,
1483 const VkAllocationCallbacks
*alloc
,
1484 VkPipeline
*pPipeline
);
1487 gen75_graphics_pipeline_create(VkDevice _device
,
1488 struct anv_pipeline_cache
*cache
,
1489 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1490 const struct anv_graphics_pipeline_create_info
*extra
,
1491 const VkAllocationCallbacks
*alloc
,
1492 VkPipeline
*pPipeline
);
1495 gen8_graphics_pipeline_create(VkDevice _device
,
1496 struct anv_pipeline_cache
*cache
,
1497 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1498 const struct anv_graphics_pipeline_create_info
*extra
,
1499 const VkAllocationCallbacks
*alloc
,
1500 VkPipeline
*pPipeline
);
1502 gen9_graphics_pipeline_create(VkDevice _device
,
1503 struct anv_pipeline_cache
*cache
,
1504 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
1505 const struct anv_graphics_pipeline_create_info
*extra
,
1506 const VkAllocationCallbacks
*alloc
,
1507 VkPipeline
*pPipeline
);
1509 gen7_compute_pipeline_create(VkDevice _device
,
1510 struct anv_pipeline_cache
*cache
,
1511 const VkComputePipelineCreateInfo
*pCreateInfo
,
1512 const VkAllocationCallbacks
*alloc
,
1513 VkPipeline
*pPipeline
);
1515 gen75_compute_pipeline_create(VkDevice _device
,
1516 struct anv_pipeline_cache
*cache
,
1517 const VkComputePipelineCreateInfo
*pCreateInfo
,
1518 const VkAllocationCallbacks
*alloc
,
1519 VkPipeline
*pPipeline
);
1522 gen8_compute_pipeline_create(VkDevice _device
,
1523 struct anv_pipeline_cache
*cache
,
1524 const VkComputePipelineCreateInfo
*pCreateInfo
,
1525 const VkAllocationCallbacks
*alloc
,
1526 VkPipeline
*pPipeline
);
1528 gen9_compute_pipeline_create(VkDevice _device
,
1529 struct anv_pipeline_cache
*cache
,
1530 const VkComputePipelineCreateInfo
*pCreateInfo
,
1531 const VkAllocationCallbacks
*alloc
,
1532 VkPipeline
*pPipeline
);
1534 struct anv_format_swizzle
{
1542 const VkFormat vk_format
;
1544 enum isl_format isl_format
; /**< RENDER_SURFACE_STATE.SurfaceFormat */
1545 const struct isl_format_layout
*isl_layout
;
1546 struct anv_format_swizzle swizzle
;
1551 const struct anv_format
*
1552 anv_format_for_vk_format(VkFormat format
);
1555 anv_get_isl_format(VkFormat format
, VkImageAspectFlags aspect
,
1556 VkImageTiling tiling
, struct anv_format_swizzle
*swizzle
);
1559 anv_format_is_color(const struct anv_format
*format
)
1561 return !format
->has_depth
&& !format
->has_stencil
;
1565 anv_format_is_depth_or_stencil(const struct anv_format
*format
)
1567 return format
->has_depth
|| format
->has_stencil
;
1571 * Subsurface of an anv_image.
1573 struct anv_surface
{
1574 struct isl_surf isl
;
1577 * Offset from VkImage's base address, as bound by vkBindImageMemory().
1584 /* The original VkFormat provided by the client. This may not match any
1585 * of the actual surface formats.
1588 const struct anv_format
*format
;
1591 uint32_t array_size
;
1592 uint32_t samples
; /**< VkImageCreateInfo::samples */
1593 VkImageUsageFlags usage
; /**< Superset of VkImageCreateInfo::usage. */
1594 VkImageTiling tiling
; /** VkImageCreateInfo::tiling */
1599 /* Set when bound */
1601 VkDeviceSize offset
;
1606 * For each foo, anv_image::foo_surface is valid if and only if
1607 * anv_image::format has a foo aspect.
1609 * The hardware requires that the depth buffer and stencil buffer be
1610 * separate surfaces. From Vulkan's perspective, though, depth and stencil
1611 * reside in the same VkImage. To satisfy both the hardware and Vulkan, we
1612 * allocate the depth and stencil buffers as separate surfaces in the same
1616 struct anv_surface color_surface
;
1619 struct anv_surface depth_surface
;
1620 struct anv_surface stencil_surface
;
1625 struct anv_image_view
{
1626 const struct anv_image
*image
; /**< VkImageViewCreateInfo::image */
1628 uint32_t offset
; /**< Offset into bo. */
1630 VkImageAspectFlags aspect_mask
;
1632 VkComponentMapping swizzle
;
1633 enum isl_format format
;
1634 uint32_t base_layer
;
1636 VkExtent3D level_0_extent
; /**< Extent of ::image's level 0 adjusted for ::vk_format. */
1637 VkExtent3D extent
; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */
1639 /** RENDER_SURFACE_STATE when using image as a color render target. */
1640 struct anv_state color_rt_surface_state
;
1642 /** RENDER_SURFACE_STATE when using image as a sampler surface. */
1643 struct anv_state sampler_surface_state
;
1645 /** RENDER_SURFACE_STATE when using image as a storage image. */
1646 struct anv_state storage_surface_state
;
1649 struct anv_image_create_info
{
1650 const VkImageCreateInfo
*vk_info
;
1651 isl_tiling_flags_t isl_tiling_flags
;
1655 VkResult
anv_image_create(VkDevice _device
,
1656 const struct anv_image_create_info
*info
,
1657 const VkAllocationCallbacks
* alloc
,
1660 struct anv_surface
*
1661 anv_image_get_surface_for_aspect_mask(struct anv_image
*image
,
1662 VkImageAspectFlags aspect_mask
);
1664 void anv_image_view_init(struct anv_image_view
*view
,
1665 struct anv_device
*device
,
1666 const VkImageViewCreateInfo
* pCreateInfo
,
1667 struct anv_cmd_buffer
*cmd_buffer
,
1671 anv_fill_image_surface_state(struct anv_device
*device
, struct anv_state state
,
1672 struct anv_image_view
*iview
,
1673 const VkImageViewCreateInfo
*pCreateInfo
,
1674 VkImageUsageFlagBits usage
);
1676 gen7_fill_image_surface_state(struct anv_device
*device
, void *state_map
,
1677 struct anv_image_view
*iview
,
1678 const VkImageViewCreateInfo
*pCreateInfo
,
1679 VkImageUsageFlagBits usage
);
1681 gen75_fill_image_surface_state(struct anv_device
*device
, void *state_map
,
1682 struct anv_image_view
*iview
,
1683 const VkImageViewCreateInfo
*pCreateInfo
,
1684 VkImageUsageFlagBits usage
);
1686 gen8_fill_image_surface_state(struct anv_device
*device
, void *state_map
,
1687 struct anv_image_view
*iview
,
1688 const VkImageViewCreateInfo
*pCreateInfo
,
1689 VkImageUsageFlagBits usage
);
1691 gen9_fill_image_surface_state(struct anv_device
*device
, void *state_map
,
1692 struct anv_image_view
*iview
,
1693 const VkImageViewCreateInfo
*pCreateInfo
,
1694 VkImageUsageFlagBits usage
);
1696 struct anv_buffer_view
{
1697 enum isl_format format
; /**< VkBufferViewCreateInfo::format */
1699 uint32_t offset
; /**< Offset into bo. */
1700 uint64_t range
; /**< VkBufferViewCreateInfo::range */
1702 struct anv_state surface_state
;
1703 struct anv_state storage_surface_state
;
1706 const struct anv_format
*
1707 anv_format_for_descriptor_type(VkDescriptorType type
);
1709 void anv_fill_buffer_surface_state(struct anv_device
*device
,
1710 struct anv_state state
,
1711 enum isl_format format
,
1712 uint32_t offset
, uint32_t range
,
1715 void gen7_fill_buffer_surface_state(void *state
, enum isl_format format
,
1716 uint32_t offset
, uint32_t range
,
1718 void gen75_fill_buffer_surface_state(void *state
, enum isl_format format
,
1719 uint32_t offset
, uint32_t range
,
1721 void gen8_fill_buffer_surface_state(void *state
, enum isl_format format
,
1722 uint32_t offset
, uint32_t range
,
1724 void gen9_fill_buffer_surface_state(void *state
, enum isl_format format
,
1725 uint32_t offset
, uint32_t range
,
1728 void anv_image_view_fill_image_param(struct anv_device
*device
,
1729 struct anv_image_view
*view
,
1730 struct brw_image_param
*param
);
1731 void anv_buffer_view_fill_image_param(struct anv_device
*device
,
1732 struct anv_buffer_view
*view
,
1733 struct brw_image_param
*param
);
1735 struct anv_sampler
{
1739 struct anv_framebuffer
{
1744 uint32_t attachment_count
;
1745 struct anv_image_view
* attachments
[0];
1748 struct anv_subpass
{
1749 uint32_t input_count
;
1750 uint32_t * input_attachments
;
1751 uint32_t color_count
;
1752 uint32_t * color_attachments
;
1753 uint32_t * resolve_attachments
;
1754 uint32_t depth_stencil_attachment
;
1756 /** Subpass has at least one resolve attachment */
1760 struct anv_render_pass_attachment
{
1761 const struct anv_format
*format
;
1763 VkAttachmentLoadOp load_op
;
1764 VkAttachmentLoadOp stencil_load_op
;
1767 struct anv_render_pass
{
1768 uint32_t attachment_count
;
1769 uint32_t subpass_count
;
1770 uint32_t * subpass_attachments
;
1771 struct anv_render_pass_attachment
* attachments
;
1772 struct anv_subpass subpasses
[0];
1775 extern struct anv_render_pass anv_meta_dummy_renderpass
;
1777 struct anv_query_pool_slot
{
1783 struct anv_query_pool
{
1789 VkResult
anv_device_init_meta(struct anv_device
*device
);
1790 void anv_device_finish_meta(struct anv_device
*device
);
1792 void *anv_lookup_entrypoint(const char *name
);
1794 void anv_dump_image_to_ppm(struct anv_device
*device
,
1795 struct anv_image
*image
, unsigned miplevel
,
1796 unsigned array_layer
, const char *filename
);
1798 #define ANV_DEFINE_HANDLE_CASTS(__anv_type, __VkType) \
1800 static inline struct __anv_type * \
1801 __anv_type ## _from_handle(__VkType _handle) \
1803 return (struct __anv_type *) _handle; \
1806 static inline __VkType \
1807 __anv_type ## _to_handle(struct __anv_type *_obj) \
1809 return (__VkType) _obj; \
1812 #define ANV_DEFINE_NONDISP_HANDLE_CASTS(__anv_type, __VkType) \
1814 static inline struct __anv_type * \
1815 __anv_type ## _from_handle(__VkType _handle) \
1817 return (struct __anv_type *)(uintptr_t) _handle; \
1820 static inline __VkType \
1821 __anv_type ## _to_handle(struct __anv_type *_obj) \
1823 return (__VkType)(uintptr_t) _obj; \
1826 #define ANV_FROM_HANDLE(__anv_type, __name, __handle) \
1827 struct __anv_type *__name = __anv_type ## _from_handle(__handle)
1829 ANV_DEFINE_HANDLE_CASTS(anv_cmd_buffer
, VkCommandBuffer
)
1830 ANV_DEFINE_HANDLE_CASTS(anv_device
, VkDevice
)
1831 ANV_DEFINE_HANDLE_CASTS(anv_instance
, VkInstance
)
1832 ANV_DEFINE_HANDLE_CASTS(anv_physical_device
, VkPhysicalDevice
)
1833 ANV_DEFINE_HANDLE_CASTS(anv_queue
, VkQueue
)
1835 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_cmd_pool
, VkCommandPool
)
1836 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer
, VkBuffer
)
1837 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer_view
, VkBufferView
)
1838 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set
, VkDescriptorSet
)
1839 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set_layout
, VkDescriptorSetLayout
)
1840 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_device_memory
, VkDeviceMemory
)
1841 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_fence
, VkFence
)
1842 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_event
, VkEvent
)
1843 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_framebuffer
, VkFramebuffer
)
1844 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image
, VkImage
)
1845 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image_view
, VkImageView
);
1846 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline_cache
, VkPipelineCache
)
1847 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline
, VkPipeline
)
1848 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline_layout
, VkPipelineLayout
)
1849 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_query_pool
, VkQueryPool
)
1850 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_render_pass
, VkRenderPass
)
1851 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_sampler
, VkSampler
)
1852 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_shader_module
, VkShaderModule
)
1854 #define ANV_DEFINE_STRUCT_CASTS(__anv_type, __VkType) \
1856 static inline const __VkType * \
1857 __anv_type ## _to_ ## __VkType(const struct __anv_type *__anv_obj) \
1859 return (const __VkType *) __anv_obj; \
1862 #define ANV_COMMON_TO_STRUCT(__VkType, __vk_name, __common_name) \
1863 const __VkType *__vk_name = anv_common_to_ ## __VkType(__common_name)
1865 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkMemoryBarrier
)
1866 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkBufferMemoryBarrier
)
1867 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkImageMemoryBarrier
)