2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
37 #define __gen_validate_value(x) VALGRIND_CHECK_MEM_IS_DEFINED(&(x), sizeof(x))
42 #include "brw_device_info.h"
43 #include "util/macros.h"
46 #include <vulkan/vulkan.h>
47 #include <vulkan/vulkan_intel.h>
48 #include <vulkan/vk_wsi_lunarg.h>
50 #include "anv_entrypoints.h"
52 #include "brw_context.h"
58 #define anv_noreturn __attribute__((__noreturn__))
59 #define anv_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
61 #define MIN(a, b) ((a) < (b) ? (a) : (b))
62 #define MAX(a, b) ((a) > (b) ? (a) : (b))
64 static inline uint32_t
65 align_u32(uint32_t v
, uint32_t a
)
67 return (v
+ a
- 1) & ~(a
- 1);
71 align_i32(int32_t v
, int32_t a
)
73 return (v
+ a
- 1) & ~(a
- 1);
76 /** Alignment must be a power of 2. */
78 anv_is_aligned(uintmax_t n
, uintmax_t a
)
80 assert(a
== (a
& -a
));
81 return (n
& (a
- 1)) == 0;
84 static inline uint32_t
85 anv_minify(uint32_t n
, uint32_t levels
)
90 return MAX(n
>> levels
, 1);
94 anv_clear_mask(uint32_t *inout_mask
, uint32_t clear_mask
)
96 if (*inout_mask
& clear_mask
) {
97 *inout_mask
&= ~clear_mask
;
104 #define for_each_bit(b, dword) \
105 for (uint32_t __dword = (dword); \
106 (b) = __builtin_ffs(__dword) - 1, __dword; \
107 __dword &= ~(1 << (b)))
109 /* Define no kernel as 1, since that's an illegal offset for a kernel */
113 VkStructureType sType
;
117 /* Whenever we generate an error, pass it through this function. Useful for
118 * debugging, where we can break on it. Only call at error site, not when
119 * propagating errors. Might be useful to plug in a stack trace here.
122 static inline VkResult
123 vk_error(VkResult error
)
126 fprintf(stderr
, "vk_error: %x\n", error
);
132 void __anv_finishme(const char *file
, int line
, const char *format
, ...)
133 anv_printflike(3, 4);
134 void anv_loge(const char *format
, ...) anv_printflike(1, 2);
135 void anv_loge_v(const char *format
, va_list va
);
138 * Print a FINISHME message, including its source location.
140 #define anv_finishme(format, ...) \
141 __anv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
143 /* A non-fatal assert. Useful for debugging. */
145 #define anv_assert(x) ({ \
146 if (unlikely(!(x))) \
147 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
150 #define anv_assert(x)
153 void anv_abortf(const char *format
, ...) anv_noreturn
anv_printflike(1, 2);
154 void anv_abortfv(const char *format
, va_list va
) anv_noreturn
;
156 #define stub_return(v) \
158 anv_finishme("stub %s", __func__); \
164 anv_finishme("stub %s", __func__); \
169 * A dynamically growable, circular buffer. Elements are added at head and
170 * removed from tail. head and tail are free-running uint32_t indices and we
171 * only compute the modulo with size when accessing the array. This way,
172 * number of bytes in the queue is always head - tail, even in case of
179 uint32_t element_size
;
184 int anv_vector_init(struct anv_vector
*queue
, uint32_t element_size
, uint32_t size
);
185 void *anv_vector_add(struct anv_vector
*queue
);
186 void *anv_vector_remove(struct anv_vector
*queue
);
189 anv_vector_length(struct anv_vector
*queue
)
191 return (queue
->head
- queue
->tail
) / queue
->element_size
;
195 anv_vector_finish(struct anv_vector
*queue
)
200 #define anv_vector_foreach(elem, queue) \
201 static_assert(__builtin_types_compatible_p(__typeof__(queue), struct anv_vector *), ""); \
202 for (uint32_t __anv_vector_offset = (queue)->tail; \
203 elem = (queue)->data + (__anv_vector_offset & ((queue)->size - 1)), __anv_vector_offset < (queue)->head; \
204 __anv_vector_offset += (queue)->element_size)
212 /* This field is here for the benefit of the aub dumper. It can (and for
213 * userptr bos it must) be set to the cpu map of the buffer. Destroying
214 * the bo won't clean up the mmap, it's still the responsibility of the bo
215 * user to do that. */
219 /* Represents a lock-free linked list of "free" things. This is used by
220 * both the block pool and the state pools. Unfortunately, in order to
221 * solve the ABA problem, we can't use a single uint32_t head.
223 union anv_free_list
{
227 /* A simple count that is incremented every time the head changes. */
233 #define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })
235 struct anv_block_pool
{
236 struct anv_device
*device
;
244 * Array of mmaps and gem handles owned by the block pool, reclaimed when
245 * the block pool is destroyed.
247 struct anv_vector mmap_cleanups
;
252 union anv_free_list free_list
;
255 struct anv_block_state
{
271 struct anv_fixed_size_state_pool
{
273 union anv_free_list free_list
;
274 struct anv_block_state block
;
277 #define ANV_MIN_STATE_SIZE_LOG2 6
278 #define ANV_MAX_STATE_SIZE_LOG2 10
280 #define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2)
282 struct anv_state_pool
{
283 struct anv_block_pool
*block_pool
;
284 struct anv_fixed_size_state_pool buckets
[ANV_STATE_BUCKETS
];
287 struct anv_state_stream
{
288 struct anv_block_pool
*block_pool
;
290 uint32_t current_block
;
294 void anv_block_pool_init(struct anv_block_pool
*pool
,
295 struct anv_device
*device
, uint32_t block_size
);
296 void anv_block_pool_finish(struct anv_block_pool
*pool
);
297 uint32_t anv_block_pool_alloc(struct anv_block_pool
*pool
);
298 void anv_block_pool_free(struct anv_block_pool
*pool
, uint32_t offset
);
299 void anv_state_pool_init(struct anv_state_pool
*pool
,
300 struct anv_block_pool
*block_pool
);
301 struct anv_state
anv_state_pool_alloc(struct anv_state_pool
*pool
,
302 size_t state_size
, size_t alignment
);
303 void anv_state_pool_free(struct anv_state_pool
*pool
, struct anv_state state
);
304 void anv_state_stream_init(struct anv_state_stream
*stream
,
305 struct anv_block_pool
*block_pool
);
306 void anv_state_stream_finish(struct anv_state_stream
*stream
);
307 struct anv_state
anv_state_stream_alloc(struct anv_state_stream
*stream
,
308 uint32_t size
, uint32_t alignment
);
311 * Implements a pool of re-usable BOs. The interface is identical to that
312 * of block_pool except that each block is its own BO.
315 struct anv_device
*device
;
322 void anv_bo_pool_init(struct anv_bo_pool
*pool
,
323 struct anv_device
*device
, uint32_t block_size
);
324 void anv_bo_pool_finish(struct anv_bo_pool
*pool
);
325 VkResult
anv_bo_pool_alloc(struct anv_bo_pool
*pool
, struct anv_bo
*bo
);
326 void anv_bo_pool_free(struct anv_bo_pool
*pool
, const struct anv_bo
*bo
);
328 struct anv_physical_device
{
329 struct anv_instance
* instance
;
334 const struct brw_device_info
* info
;
338 struct anv_instance
{
339 void * pAllocUserData
;
340 PFN_vkAllocFunction pfnAlloc
;
341 PFN_vkFreeFunction pfnFree
;
343 uint32_t physicalDeviceCount
;
344 struct anv_physical_device physicalDevice
;
347 struct anv_meta_state
{
354 VkPipelineLayout pipeline_layout
;
355 VkDescriptorSetLayout ds_layout
;
359 VkDynamicRasterState rs_state
;
360 VkDynamicColorBlendState cb_state
;
361 VkDynamicDepthStencilState ds_state
;
366 struct anv_device
* device
;
368 struct anv_state_pool
* pool
;
371 * Serial number of the most recently completed batch executed on the
374 struct anv_state completed_serial
;
377 * The next batch submitted to the engine will be assigned this serial
380 uint32_t next_serial
;
382 uint32_t last_collected_serial
;
386 struct anv_instance
* instance
;
388 struct brw_device_info info
;
394 struct anv_bo_pool batch_bo_pool
;
396 struct anv_block_pool dynamic_state_block_pool
;
397 struct anv_state_pool dynamic_state_pool
;
399 struct anv_block_pool instruction_block_pool
;
400 struct anv_block_pool surface_state_block_pool
;
401 struct anv_state_pool surface_state_pool
;
403 struct anv_meta_state meta_state
;
405 struct anv_state border_colors
;
407 struct anv_queue queue
;
409 struct anv_block_pool scratch_block_pool
;
411 struct anv_compiler
* compiler
;
412 struct anv_aub_writer
* aub_writer
;
413 pthread_mutex_t mutex
;
417 anv_device_alloc(struct anv_device
* device
,
420 VkSystemAllocType allocType
);
423 anv_device_free(struct anv_device
* device
,
426 void* anv_gem_mmap(struct anv_device
*device
,
427 uint32_t gem_handle
, uint64_t offset
, uint64_t size
);
428 void anv_gem_munmap(void *p
, uint64_t size
);
429 uint32_t anv_gem_create(struct anv_device
*device
, size_t size
);
430 void anv_gem_close(struct anv_device
*device
, int gem_handle
);
431 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
432 int anv_gem_wait(struct anv_device
*device
, int gem_handle
, int64_t *timeout_ns
);
433 int anv_gem_execbuffer(struct anv_device
*device
,
434 struct drm_i915_gem_execbuffer2
*execbuf
);
435 int anv_gem_set_tiling(struct anv_device
*device
, int gem_handle
,
436 uint32_t stride
, uint32_t tiling
);
437 int anv_gem_create_context(struct anv_device
*device
);
438 int anv_gem_destroy_context(struct anv_device
*device
, int context
);
439 int anv_gem_get_param(int fd
, uint32_t param
);
440 int anv_gem_get_aperture(struct anv_physical_device
*physical_dev
, uint64_t *size
);
441 int anv_gem_handle_to_fd(struct anv_device
*device
, int gem_handle
);
442 int anv_gem_fd_to_handle(struct anv_device
*device
, int fd
);
443 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
445 VkResult
anv_bo_init_new(struct anv_bo
*bo
, struct anv_device
*device
, uint64_t size
);
447 struct anv_reloc_list
{
450 struct drm_i915_gem_relocation_entry
* relocs
;
451 struct anv_bo
** reloc_bos
;
454 VkResult
anv_reloc_list_init(struct anv_reloc_list
*list
,
455 struct anv_device
*device
);
456 void anv_reloc_list_finish(struct anv_reloc_list
*list
,
457 struct anv_device
*device
);
459 uint64_t anv_reloc_list_add(struct anv_reloc_list
*list
,
460 struct anv_device
*device
,
461 uint32_t offset
, struct anv_bo
*target_bo
,
464 struct anv_batch_bo
{
467 /* Bytes actually consumed in this batch BO */
470 /* These offsets reference the per-batch reloc list */
474 struct anv_batch_bo
* prev_batch_bo
;
478 struct anv_device
* device
;
484 struct anv_reloc_list relocs
;
486 /* This callback is called (with the associated user data) in the event
487 * that the batch runs out of space.
489 VkResult (*extend_cb
)(struct anv_batch
*, void *);
493 void *anv_batch_emit_dwords(struct anv_batch
*batch
, int num_dwords
);
494 void anv_batch_emit_batch(struct anv_batch
*batch
, struct anv_batch
*other
);
495 uint64_t anv_batch_emit_reloc(struct anv_batch
*batch
,
496 void *location
, struct anv_bo
*bo
, uint32_t offset
);
503 #define __gen_address_type struct anv_address
504 #define __gen_user_data struct anv_batch
506 static inline uint64_t
507 __gen_combine_address(struct anv_batch
*batch
, void *location
,
508 const struct anv_address address
, uint32_t delta
)
510 if (address
.bo
== NULL
) {
513 assert(batch
->start
<= location
&& location
< batch
->end
);
515 return anv_batch_emit_reloc(batch
, location
, address
.bo
, address
.offset
+ delta
);
519 #include "gen7_pack.h"
520 #include "gen75_pack.h"
521 #undef GEN8_3DSTATE_MULTISAMPLE
522 #include "gen8_pack.h"
524 #define anv_batch_emit(batch, cmd, ...) do { \
525 struct cmd __template = { \
529 void *__dst = anv_batch_emit_dwords(batch, cmd ## _length); \
530 cmd ## _pack(batch, __dst, &__template); \
531 VG(VALGRIND_CHECK_MEM_IS_DEFINED(__dst, cmd ## _length * 4)); \
534 #define anv_batch_emitn(batch, n, cmd, ...) ({ \
535 struct cmd __template = { \
537 .DwordLength = n - cmd ## _length_bias, \
540 void *__dst = anv_batch_emit_dwords(batch, n); \
541 cmd ## _pack(batch, __dst, &__template); \
545 #define anv_batch_emit_merge(batch, dwords0, dwords1) \
549 assert(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1)); \
550 dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0)); \
551 for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++) \
552 dw[i] = (dwords0)[i] | (dwords1)[i]; \
553 VG(VALGRIND_CHECK_MEM_IS_DEFINED(dw, ARRAY_SIZE(dwords0) * 4));\
556 #define GEN8_MOCS { \
557 .MemoryTypeLLCeLLCCacheabilityControl = WB, \
558 .TargetCache = L3DefertoPATforLLCeLLCselection, \
562 struct anv_device_memory
{
564 VkDeviceSize map_size
;
568 struct anv_dynamic_vp_state
{
569 struct anv_state sf_clip_vp
;
570 struct anv_state cc_vp
;
571 struct anv_state scissor
;
574 struct anv_dynamic_rs_state
{
575 uint32_t state_sf
[GEN8_3DSTATE_SF_length
];
576 uint32_t state_raster
[GEN8_3DSTATE_RASTER_length
];
579 struct anv_dynamic_ds_state
{
580 uint32_t state_wm_depth_stencil
[GEN8_3DSTATE_WM_DEPTH_STENCIL_length
];
581 uint32_t state_color_calc
[GEN8_COLOR_CALC_STATE_length
];
584 struct anv_dynamic_cb_state
{
585 uint32_t state_color_calc
[GEN8_COLOR_CALC_STATE_length
];
589 struct anv_descriptor_slot
{
594 struct anv_descriptor_set_layout
{
596 uint32_t surface_count
;
597 struct anv_descriptor_slot
*surface_start
;
598 uint32_t sampler_count
;
599 struct anv_descriptor_slot
*sampler_start
;
600 } stage
[VK_SHADER_STAGE_NUM
];
603 uint32_t num_dynamic_buffers
;
604 uint32_t shader_stages
;
605 struct anv_descriptor_slot entries
[0];
608 struct anv_descriptor
{
609 struct anv_sampler
*sampler
;
610 struct anv_surface_view
*view
;
613 struct anv_descriptor_set
{
614 struct anv_descriptor descriptors
[0];
618 anv_descriptor_set_create(struct anv_device
*device
,
619 const struct anv_descriptor_set_layout
*layout
,
620 struct anv_descriptor_set
**out_set
);
623 anv_descriptor_set_destroy(struct anv_device
*device
,
624 struct anv_descriptor_set
*set
);
630 struct anv_pipeline_layout
{
632 struct anv_descriptor_set_layout
*layout
;
633 uint32_t surface_start
[VK_SHADER_STAGE_NUM
];
634 uint32_t sampler_start
[VK_SHADER_STAGE_NUM
];
640 uint32_t surface_count
;
641 uint32_t sampler_count
;
642 } stage
[VK_SHADER_STAGE_NUM
];
646 struct anv_device
* device
;
654 #define ANV_CMD_BUFFER_PIPELINE_DIRTY (1 << 0)
655 #define ANV_CMD_BUFFER_RS_DIRTY (1 << 2)
656 #define ANV_CMD_BUFFER_DS_DIRTY (1 << 3)
657 #define ANV_CMD_BUFFER_CB_DIRTY (1 << 4)
658 #define ANV_CMD_BUFFER_VP_DIRTY (1 << 5)
659 #define ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY (1 << 6)
661 struct anv_vertex_binding
{
662 struct anv_buffer
* buffer
;
666 struct anv_descriptor_set_binding
{
667 struct anv_descriptor_set
* set
;
668 uint32_t dynamic_offsets
[128];
671 /** State required while building cmd buffer */
672 struct anv_cmd_state
{
673 uint32_t current_pipeline
;
676 uint32_t compute_dirty
;
677 uint32_t descriptors_dirty
;
678 uint32_t scratch_size
;
679 struct anv_pipeline
* pipeline
;
680 struct anv_pipeline
* compute_pipeline
;
681 struct anv_framebuffer
* framebuffer
;
682 struct anv_render_pass
* pass
;
683 struct anv_subpass
* subpass
;
684 struct anv_dynamic_rs_state
* rs_state
;
685 struct anv_dynamic_ds_state
* ds_state
;
686 struct anv_dynamic_vp_state
* vp_state
;
687 struct anv_dynamic_cb_state
* cb_state
;
688 uint32_t state_vf
[GEN8_3DSTATE_VF_length
];
689 struct anv_vertex_binding vertex_bindings
[MAX_VBS
];
690 struct anv_descriptor_set_binding descriptors
[MAX_SETS
];
693 VkResult
anv_cmd_state_init(struct anv_cmd_state
*state
);
694 void anv_cmd_state_fini(struct anv_cmd_state
*state
);
696 struct anv_cmd_buffer
{
697 struct anv_device
* device
;
699 /* Fields required for the actual chain of anv_batch_bo's.
701 * These fields are initialized by anv_cmd_buffer_init_batch_bo_chain().
703 struct anv_batch batch
;
704 struct anv_batch_bo
* last_batch_bo
;
705 struct anv_batch_bo
* surface_batch_bo
;
706 uint32_t surface_next
;
707 struct anv_reloc_list surface_relocs
;
709 /* Information needed for execbuf that's generated when the command
712 struct drm_i915_gem_execbuffer2 execbuf
;
713 struct drm_i915_gem_exec_object2
* exec2_objects
;
714 uint32_t exec2_bo_count
;
715 struct anv_bo
** exec2_bos
;
716 uint32_t exec2_array_length
;
720 /* Stream objects for storing temporary data */
721 struct anv_state_stream surface_state_stream
;
722 struct anv_state_stream dynamic_state_stream
;
724 struct anv_cmd_state state
;
727 VkResult
anv_cmd_buffer_init_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
728 void anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
729 void anv_cmd_buffer_reset_batch_bo_chain(struct anv_cmd_buffer
*cmd_buffer
);
730 void anv_cmd_buffer_emit_batch_buffer_end(struct anv_cmd_buffer
*cmd_buffer
);
731 void anv_cmd_buffer_compute_validate_list(struct anv_cmd_buffer
*cmd_buffer
);
734 anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer
*cmd_buffer
,
735 uint32_t size
, uint32_t alignment
);
737 anv_cmd_buffer_alloc_dynamic_state(struct anv_cmd_buffer
*cmd_buffer
,
738 uint32_t size
, uint32_t alignment
);
740 VkResult
anv_cmd_buffer_new_surface_state_bo(struct anv_cmd_buffer
*cmd_buffer
);
742 void anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer
*cmd_buffer
);
744 void anv_cmd_buffer_begin_subpass(struct anv_cmd_buffer
*cmd_buffer
,
745 struct anv_subpass
*subpass
);
747 void anv_cmd_buffer_clear_attachments(struct anv_cmd_buffer
*cmd_buffer
,
748 struct anv_render_pass
*pass
,
749 const VkClearValue
*clear_values
);
751 void anv_cmd_buffer_dump(struct anv_cmd_buffer
*cmd_buffer
);
752 void anv_aub_writer_destroy(struct anv_aub_writer
*writer
);
756 struct drm_i915_gem_execbuffer2 execbuf
;
757 struct drm_i915_gem_exec_object2 exec2_objects
[1];
761 struct anv_shader_module
{
767 struct anv_shader_module
* module
;
771 struct anv_pipeline
{
772 struct anv_device
* device
;
773 struct anv_batch batch
;
774 uint32_t batch_data
[256];
775 struct anv_shader
* shaders
[VK_SHADER_STAGE_NUM
];
776 struct anv_pipeline_layout
* layout
;
779 struct brw_vs_prog_data vs_prog_data
;
780 struct brw_wm_prog_data wm_prog_data
;
781 struct brw_gs_prog_data gs_prog_data
;
782 struct brw_cs_prog_data cs_prog_data
;
783 bool writes_point_size
;
784 struct brw_stage_prog_data
* prog_data
[VK_SHADER_STAGE_NUM
];
785 uint32_t scratch_start
[VK_SHADER_STAGE_NUM
];
786 uint32_t total_scratch
;
790 uint32_t nr_vs_entries
;
793 uint32_t nr_gs_entries
;
796 uint32_t active_stages
;
797 struct anv_state_stream program_stream
;
798 struct anv_state blend_state
;
803 uint32_t gs_vertex_count
;
807 uint32_t binding_stride
[MAX_VBS
];
809 uint32_t state_sf
[GEN8_3DSTATE_SF_length
];
810 uint32_t state_vf
[GEN8_3DSTATE_VF_length
];
811 uint32_t state_raster
[GEN8_3DSTATE_RASTER_length
];
812 uint32_t state_wm_depth_stencil
[GEN8_3DSTATE_WM_DEPTH_STENCIL_length
];
814 uint32_t cs_thread_width_max
;
815 uint32_t cs_right_mask
;
818 struct anv_pipeline_create_info
{
820 bool disable_viewport
;
821 bool disable_scissor
;
827 anv_pipeline_create(VkDevice device
,
828 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
829 const struct anv_pipeline_create_info
*extra
,
830 VkPipeline
*pPipeline
);
832 struct anv_compiler
*anv_compiler_create(struct anv_device
*device
);
833 void anv_compiler_destroy(struct anv_compiler
*compiler
);
834 int anv_compiler_run(struct anv_compiler
*compiler
, struct anv_pipeline
*pipeline
);
835 void anv_compiler_free(struct anv_pipeline
*pipeline
);
839 uint16_t surface_format
; /**< RENDER_SURFACE_STATE.SurfaceFormat */
840 uint8_t cpp
; /**< Bytes-per-pixel of anv_format::surface_format. */
841 uint8_t num_channels
;
842 uint16_t depth_format
; /**< 3DSTATE_DEPTH_BUFFER.SurfaceFormat */
846 const struct anv_format
*
847 anv_format_for_vk_format(VkFormat format
);
848 bool anv_is_vk_format_depth_or_stencil(VkFormat format
);
851 * A proxy for the color surfaces, depth surfaces, and stencil surfaces.
855 * Offset from VkImage's base address, as bound by vkBindImageMemory().
859 uint32_t stride
; /**< RENDER_SURFACE_STATE.SurfacePitch */
860 uint16_t qpitch
; /**< RENDER_SURFACE_STATE.QPitch */
863 * \name Alignment of miptree images, in units of pixels.
865 * These fields contain the real alignment values, not the values to be
866 * given to the GPU. For example, if h_align is 4, then program the GPU
870 uint8_t h_align
; /**< RENDER_SURFACE_STATE.SurfaceHorizontalAlignment */
871 uint8_t v_align
; /**< RENDER_SURFACE_STATE.SurfaceVerticalAlignment */
874 uint8_t tile_mode
; /**< RENDER_SURFACE_STATE.TileMode */
891 struct anv_swap_chain
*swap_chain
;
893 /** RENDER_SURFACE_STATE.SurfaceType */
896 /** Primary surface is either color or depth. */
897 struct anv_surface primary_surface
;
899 /** Stencil surface is optional. */
900 struct anv_surface stencil_surface
;
903 struct anv_surface_view
{
904 struct anv_state surface_state
; /**< RENDER_SURFACE_STATE */
906 uint32_t offset
; /**< VkBufferCreateInfo::offset */
907 uint32_t range
; /**< VkBufferCreateInfo::range */
908 VkFormat format
; /**< VkBufferCreateInfo::format */
911 struct anv_buffer_view
{
912 struct anv_surface_view view
;
915 struct anv_image_view
{
916 struct anv_surface_view view
;
920 enum anv_attachment_view_type
{
921 ANV_ATTACHMENT_VIEW_TYPE_COLOR
,
922 ANV_ATTACHMENT_VIEW_TYPE_DEPTH_STENCIL
,
925 struct anv_attachment_view
{
926 enum anv_attachment_view_type attachment_type
;
930 struct anv_color_attachment_view
{
931 struct anv_attachment_view base
;
932 struct anv_surface_view view
;
935 struct anv_depth_stencil_view
{
936 struct anv_attachment_view base
;
940 uint32_t depth_offset
; /**< Offset into bo. */
941 uint32_t depth_stride
; /**< 3DSTATE_DEPTH_BUFFER.SurfacePitch */
942 uint32_t depth_format
; /**< 3DSTATE_DEPTH_BUFFER.SurfaceFormat */
943 uint16_t depth_qpitch
; /**< 3DSTATE_DEPTH_BUFFER.SurfaceQPitch */
945 uint32_t stencil_offset
; /**< Offset into bo. */
946 uint32_t stencil_stride
; /**< 3DSTATE_STENCIL_BUFFER.SurfacePitch */
947 uint16_t stencil_qpitch
; /**< 3DSTATE_STENCIL_BUFFER.SurfaceQPitch */
950 struct anv_image_create_info
{
951 const VkImageCreateInfo
*vk_info
;
952 bool force_tile_mode
;
956 VkResult
anv_image_create(VkDevice _device
,
957 const struct anv_image_create_info
*info
,
960 void anv_image_view_init(struct anv_image_view
*view
,
961 struct anv_device
*device
,
962 const VkImageViewCreateInfo
* pCreateInfo
,
963 struct anv_cmd_buffer
*cmd_buffer
);
965 void anv_color_attachment_view_init(struct anv_color_attachment_view
*view
,
966 struct anv_device
*device
,
967 const VkAttachmentViewCreateInfo
* pCreateInfo
,
968 struct anv_cmd_buffer
*cmd_buffer
);
969 void anv_fill_buffer_surface_state(void *state
, VkFormat format
,
970 uint32_t offset
, uint32_t range
);
972 void anv_surface_view_fini(struct anv_device
*device
,
973 struct anv_surface_view
*view
);
979 struct anv_framebuffer
{
984 /* Viewport for clears */
985 VkDynamicViewportState vp_state
;
987 uint32_t attachment_count
;
988 const struct anv_attachment_view
* attachments
[0];
992 uint32_t input_count
;
993 uint32_t * input_attachments
;
994 uint32_t color_count
;
995 uint32_t * color_attachments
;
996 uint32_t * resolve_attachments
;
997 uint32_t depth_stencil_attachment
;
1000 struct anv_render_pass_attachment
{
1003 VkAttachmentLoadOp load_op
;
1004 VkAttachmentLoadOp stencil_load_op
;
1007 struct anv_render_pass
{
1008 uint32_t attachment_count
;
1009 uint32_t subpass_count
;
1011 struct anv_render_pass_attachment
* attachments
;
1012 struct anv_subpass subpasses
[0];
1015 void anv_device_init_meta(struct anv_device
*device
);
1016 void anv_device_finish_meta(struct anv_device
*device
);
1018 void *anv_lookup_entrypoint(const char *name
);
1020 #define ANV_DEFINE_HANDLE_CASTS(__anv_type, __VkType) \
1022 static inline struct __anv_type * \
1023 __anv_type ## _from_handle(__VkType _handle) \
1025 return (struct __anv_type *) _handle; \
1028 static inline __VkType \
1029 __anv_type ## _to_handle(struct __anv_type *_obj) \
1031 return (__VkType) _obj; \
1034 #define ANV_DEFINE_NONDISP_HANDLE_CASTS(__anv_type, __VkType) \
1036 static inline struct __anv_type * \
1037 __anv_type ## _from_handle(__VkType _handle) \
1039 return (struct __anv_type *) _handle.handle; \
1042 static inline __VkType \
1043 __anv_type ## _to_handle(struct __anv_type *_obj) \
1045 return (__VkType) { .handle = (uint64_t) _obj }; \
1048 #define ANV_FROM_HANDLE(__anv_type, __name, __handle) \
1049 struct __anv_type *__name = __anv_type ## _from_handle(__handle)
1051 ANV_DEFINE_HANDLE_CASTS(anv_cmd_buffer
, VkCmdBuffer
)
1052 ANV_DEFINE_HANDLE_CASTS(anv_device
, VkDevice
)
1053 ANV_DEFINE_HANDLE_CASTS(anv_instance
, VkInstance
)
1054 ANV_DEFINE_HANDLE_CASTS(anv_physical_device
, VkPhysicalDevice
)
1055 ANV_DEFINE_HANDLE_CASTS(anv_queue
, VkQueue
)
1056 ANV_DEFINE_HANDLE_CASTS(anv_swap_chain
, VkSwapChainWSI
);
1058 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_attachment_view
, VkAttachmentView
)
1059 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer
, VkBuffer
)
1060 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer_view
, VkBufferView
);
1061 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set
, VkDescriptorSet
)
1062 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set_layout
, VkDescriptorSetLayout
)
1063 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_device_memory
, VkDeviceMemory
)
1064 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_dynamic_cb_state
, VkDynamicColorBlendState
)
1065 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_dynamic_ds_state
, VkDynamicDepthStencilState
)
1066 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_dynamic_rs_state
, VkDynamicRasterState
)
1067 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_dynamic_vp_state
, VkDynamicViewportState
)
1068 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_fence
, VkFence
)
1069 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_framebuffer
, VkFramebuffer
)
1070 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image
, VkImage
)
1071 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_image_view
, VkImageView
);
1072 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline
, VkPipeline
)
1073 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_pipeline_layout
, VkPipelineLayout
)
1074 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_query_pool
, VkQueryPool
)
1075 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_render_pass
, VkRenderPass
)
1076 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_sampler
, VkSampler
)
1077 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_shader
, VkShader
)
1078 ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_shader_module
, VkShaderModule
)
1080 #define ANV_DEFINE_STRUCT_CASTS(__anv_type, __VkType) \
1082 static inline const __VkType * \
1083 __anv_type ## _to_ ## __VkType(const struct __anv_type *__anv_obj) \
1085 return (const __VkType *) __anv_obj; \
1088 #define ANV_COMMON_TO_STRUCT(__VkType, __vk_name, __common_name) \
1089 const __VkType *__vk_name = anv_common_to_ ## __VkType(__common_name)
1091 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkMemoryBarrier
)
1092 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkBufferMemoryBarrier
)
1093 ANV_DEFINE_STRUCT_CASTS(anv_common
, VkImageMemoryBarrier
)