2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
37 #define __gen_validate_value(x) VALGRIND_CHECK_MEM_IS_DEFINED(&(x), sizeof(x))
42 #include "brw_device_info.h"
43 #include "util/macros.h"
46 #include <vulkan/vulkan.h>
47 #include <vulkan/vulkan_intel.h>
48 #include <vulkan/vk_wsi_lunarg.h>
50 #include "entrypoints.h"
52 #include "brw_context.h"
58 #define anv_noreturn __attribute__((__noreturn__))
59 #define anv_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
61 static inline uint32_t
62 ALIGN_U32(uint32_t v
, uint32_t a
)
64 return (v
+ a
- 1) & ~(a
- 1);
68 ALIGN_I32(int32_t v
, int32_t a
)
70 return (v
+ a
- 1) & ~(a
- 1);
73 #define for_each_bit(b, dword) \
74 for (uint32_t __dword = (dword); \
75 (b) = __builtin_ffs(__dword) - 1, __dword; \
76 __dword &= ~(1 << (b)))
78 /* Define no kernel as 1, since that's an illegal offset for a kernel */
82 VkStructureType sType
;
86 /* Whenever we generate an error, pass it through this function. Useful for
87 * debugging, where we can break on it. Only call at error site, not when
88 * propagating errors. Might be useful to plug in a stack trace here.
91 static inline VkResult
92 vk_error(VkResult error
)
95 fprintf(stderr
, "vk_error: %x\n", error
);
101 void __anv_finishme(const char *file
, int line
, const char *format
, ...)
102 anv_printflike(3, 4);
105 * Print a FINISHME message, including its source location.
107 #define anv_finishme(format, ...) \
108 __anv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
110 /* A non-fatal assert. Useful for debugging. */
112 #define anv_assert(x) ({ \
113 if (unlikely(!(x))) \
114 fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \
117 #define anv_assert(x)
120 void anv_abortf(const char *format
, ...) anv_noreturn
anv_printflike(1, 2);
121 void anv_abortfv(const char *format
, va_list va
) anv_noreturn
;
123 #define stub_return(v) \
125 anv_finishme("stub %s", __func__); \
131 anv_finishme("stub %s", __func__); \
136 * A dynamically growable, circular buffer. Elements are added at head and
137 * removed from tail. head and tail are free-running uint32_t indices and we
138 * only compute the modulo with size when accessing the array. This way,
139 * number of bytes in the queue is always head - tail, even in case of
146 uint32_t element_size
;
151 int anv_vector_init(struct anv_vector
*queue
, uint32_t element_size
, uint32_t size
);
152 void *anv_vector_add(struct anv_vector
*queue
);
153 void *anv_vector_remove(struct anv_vector
*queue
);
156 anv_vector_length(struct anv_vector
*queue
)
158 return (queue
->head
- queue
->tail
) / queue
->element_size
;
162 anv_vector_finish(struct anv_vector
*queue
)
167 #define anv_vector_foreach(elem, queue) \
168 static_assert(__builtin_types_compatible_p(__typeof__(queue), struct anv_vector *), ""); \
169 for (uint32_t __anv_vector_offset = (queue)->tail; \
170 elem = (queue)->data + (__anv_vector_offset & ((queue)->size - 1)), __anv_vector_offset < (queue)->head; \
171 __anv_vector_offset += (queue)->element_size)
179 /* This field is here for the benefit of the aub dumper. It can (and for
180 * userptr bos it must) be set to the cpu map of the buffer. Destroying
181 * the bo won't clean up the mmap, it's still the responsibility of the bo
182 * user to do that. */
186 /* Represents a lock-free linked list of "free" things. This is used by
187 * both the block pool and the state pools. Unfortunately, in order to
188 * solve the ABA problem, we can't use a single uint32_t head.
190 union anv_free_list
{
194 /* A simple count that is incremented every time the head changes. */
200 #define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })
202 struct anv_block_pool
{
203 struct anv_device
*device
;
211 * Array of mmaps and gem handles owned by the block pool, reclaimed when
212 * the block pool is destroyed.
214 struct anv_vector mmap_cleanups
;
219 union anv_free_list free_list
;
222 struct anv_block_state
{
238 struct anv_fixed_size_state_pool
{
240 union anv_free_list free_list
;
241 struct anv_block_state block
;
244 #define ANV_MIN_STATE_SIZE_LOG2 6
245 #define ANV_MAX_STATE_SIZE_LOG2 10
247 #define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2)
249 struct anv_state_pool
{
250 struct anv_block_pool
*block_pool
;
251 struct anv_fixed_size_state_pool buckets
[ANV_STATE_BUCKETS
];
254 struct anv_state_stream
{
255 struct anv_block_pool
*block_pool
;
257 uint32_t current_block
;
261 void anv_block_pool_init(struct anv_block_pool
*pool
,
262 struct anv_device
*device
, uint32_t block_size
);
263 void anv_block_pool_finish(struct anv_block_pool
*pool
);
264 uint32_t anv_block_pool_alloc(struct anv_block_pool
*pool
);
265 void anv_block_pool_free(struct anv_block_pool
*pool
, uint32_t offset
);
266 void anv_state_pool_init(struct anv_state_pool
*pool
,
267 struct anv_block_pool
*block_pool
);
268 struct anv_state
anv_state_pool_alloc(struct anv_state_pool
*pool
,
269 size_t state_size
, size_t alignment
);
270 void anv_state_pool_free(struct anv_state_pool
*pool
, struct anv_state state
);
271 void anv_state_stream_init(struct anv_state_stream
*stream
,
272 struct anv_block_pool
*block_pool
);
273 void anv_state_stream_finish(struct anv_state_stream
*stream
);
274 struct anv_state
anv_state_stream_alloc(struct anv_state_stream
*stream
,
275 uint32_t size
, uint32_t alignment
);
278 * Implements a pool of re-usable BOs. The interface is identical to that
279 * of block_pool except that each block is its own BO.
282 struct anv_device
*device
;
289 void anv_bo_pool_init(struct anv_bo_pool
*pool
,
290 struct anv_device
*device
, uint32_t block_size
);
291 void anv_bo_pool_finish(struct anv_bo_pool
*pool
);
292 VkResult
anv_bo_pool_alloc(struct anv_bo_pool
*pool
, struct anv_bo
*bo
);
293 void anv_bo_pool_free(struct anv_bo_pool
*pool
, const struct anv_bo
*bo
);
298 typedef void (*anv_object_destructor_cb
)(struct anv_device
*,
303 anv_object_destructor_cb destructor
;
306 struct anv_physical_device
{
307 struct anv_instance
* instance
;
312 const struct brw_device_info
* info
;
315 struct anv_instance
{
316 void * pAllocUserData
;
317 PFN_vkAllocFunction pfnAlloc
;
318 PFN_vkFreeFunction pfnFree
;
320 uint32_t physicalDeviceCount
;
321 struct anv_physical_device physicalDevice
;
324 struct anv_meta_state
{
331 VkPipelineLayout pipeline_layout
;
332 VkDescriptorSetLayout ds_layout
;
336 VkDynamicRsState rs_state
;
337 VkDynamicCbState cb_state
;
338 VkDynamicDsState ds_state
;
343 struct anv_device
* device
;
345 struct anv_state_pool
* pool
;
348 * Serial number of the most recently completed batch executed on the
351 struct anv_state completed_serial
;
354 * The next batch submitted to the engine will be assigned this serial
357 uint32_t next_serial
;
359 uint32_t last_collected_serial
;
363 struct anv_instance
* instance
;
365 struct brw_device_info info
;
371 struct anv_bo_pool batch_bo_pool
;
373 struct anv_block_pool dynamic_state_block_pool
;
374 struct anv_state_pool dynamic_state_pool
;
376 struct anv_block_pool instruction_block_pool
;
377 struct anv_block_pool surface_state_block_pool
;
378 struct anv_state_pool surface_state_pool
;
380 struct anv_meta_state meta_state
;
382 struct anv_state float_border_colors
;
383 struct anv_state uint32_border_colors
;
385 struct anv_queue queue
;
387 struct anv_compiler
* compiler
;
388 struct anv_aub_writer
* aub_writer
;
389 pthread_mutex_t mutex
;
393 anv_device_alloc(struct anv_device
* device
,
396 VkSystemAllocType allocType
);
399 anv_device_free(struct anv_device
* device
,
402 void* anv_gem_mmap(struct anv_device
*device
,
403 uint32_t gem_handle
, uint64_t offset
, uint64_t size
);
404 void anv_gem_munmap(void *p
, uint64_t size
);
405 uint32_t anv_gem_create(struct anv_device
*device
, size_t size
);
406 void anv_gem_close(struct anv_device
*device
, int gem_handle
);
407 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
408 int anv_gem_wait(struct anv_device
*device
, int gem_handle
, int64_t *timeout_ns
);
409 int anv_gem_execbuffer(struct anv_device
*device
,
410 struct drm_i915_gem_execbuffer2
*execbuf
);
411 int anv_gem_set_tiling(struct anv_device
*device
, int gem_handle
,
412 uint32_t stride
, uint32_t tiling
);
413 int anv_gem_create_context(struct anv_device
*device
);
414 int anv_gem_destroy_context(struct anv_device
*device
, int context
);
415 int anv_gem_get_param(int fd
, uint32_t param
);
416 int anv_gem_get_aperture(struct anv_device
*device
, uint64_t *size
);
417 int anv_gem_handle_to_fd(struct anv_device
*device
, int gem_handle
);
418 int anv_gem_fd_to_handle(struct anv_device
*device
, int fd
);
419 int anv_gem_userptr(struct anv_device
*device
, void *mem
, size_t size
);
421 VkResult
anv_bo_init_new(struct anv_bo
*bo
, struct anv_device
*device
, uint64_t size
);
423 struct anv_reloc_list
{
426 struct drm_i915_gem_relocation_entry
* relocs
;
427 struct anv_bo
** reloc_bos
;
430 VkResult
anv_reloc_list_init(struct anv_reloc_list
*list
,
431 struct anv_device
*device
);
432 void anv_reloc_list_finish(struct anv_reloc_list
*list
,
433 struct anv_device
*device
);
435 struct anv_batch_bo
{
438 /* Bytes actually consumed in this batch BO */
441 /* These offsets reference the per-batch reloc list */
445 struct anv_batch_bo
* prev_batch_bo
;
449 struct anv_device
* device
;
455 struct anv_reloc_list relocs
;
457 /* This callback is called (with the associated user data) in the event
458 * that the batch runs out of space.
460 VkResult (*extend_cb
)(struct anv_batch
*, void *);
464 void *anv_batch_emit_dwords(struct anv_batch
*batch
, int num_dwords
);
465 void anv_batch_emit_batch(struct anv_batch
*batch
, struct anv_batch
*other
);
466 uint64_t anv_batch_emit_reloc(struct anv_batch
*batch
,
467 void *location
, struct anv_bo
*bo
, uint32_t offset
);
474 #define __gen_address_type struct anv_address
475 #define __gen_user_data struct anv_batch
477 static inline uint64_t
478 __gen_combine_address(struct anv_batch
*batch
, void *location
,
479 const struct anv_address address
, uint32_t delta
)
481 if (address
.bo
== NULL
) {
484 assert(batch
->start
<= location
&& location
< batch
->end
);
486 return anv_batch_emit_reloc(batch
, location
, address
.bo
, address
.offset
+ delta
);
490 #include "gen7_pack.h"
491 #include "gen75_pack.h"
492 #undef GEN8_3DSTATE_MULTISAMPLE
493 #include "gen8_pack.h"
495 #define anv_batch_emit(batch, cmd, ...) do { \
496 struct cmd __template = { \
500 void *__dst = anv_batch_emit_dwords(batch, cmd ## _length); \
501 cmd ## _pack(batch, __dst, &__template); \
504 #define anv_batch_emitn(batch, n, cmd, ...) ({ \
505 struct cmd __template = { \
507 .DwordLength = n - cmd ## _length_bias, \
510 void *__dst = anv_batch_emit_dwords(batch, n); \
511 cmd ## _pack(batch, __dst, &__template); \
515 #define anv_batch_emit_merge(batch, dwords0, dwords1) \
519 assert(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1)); \
520 dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0)); \
521 for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++) \
522 dw[i] = (dwords0)[i] | (dwords1)[i]; \
523 VG(VALGRIND_CHECK_MEM_IS_DEFINED(dw, ARRAY_SIZE(dwords0) * 4));\
526 #define GEN8_MOCS { \
527 .MemoryTypeLLCeLLCCacheabilityControl = WB, \
528 .TargetCache = L3DefertoPATforLLCeLLCselection, \
532 struct anv_device_memory
{
534 VkDeviceSize map_size
;
538 struct anv_dynamic_vp_state
{
539 struct anv_object base
;
540 struct anv_state sf_clip_vp
;
541 struct anv_state cc_vp
;
542 struct anv_state scissor
;
545 struct anv_dynamic_rs_state
{
546 uint32_t state_sf
[GEN8_3DSTATE_SF_length
];
547 uint32_t state_raster
[GEN8_3DSTATE_RASTER_length
];
550 struct anv_dynamic_ds_state
{
551 uint32_t state_wm_depth_stencil
[GEN8_3DSTATE_WM_DEPTH_STENCIL_length
];
552 uint32_t state_color_calc
[GEN8_COLOR_CALC_STATE_length
];
555 struct anv_dynamic_cb_state
{
556 uint32_t state_color_calc
[GEN8_COLOR_CALC_STATE_length
];
560 struct anv_descriptor_slot
{
565 struct anv_descriptor_set_layout
{
567 uint32_t surface_count
;
568 struct anv_descriptor_slot
*surface_start
;
569 uint32_t sampler_count
;
570 struct anv_descriptor_slot
*sampler_start
;
571 } stage
[VK_NUM_SHADER_STAGE
];
574 uint32_t num_dynamic_buffers
;
575 uint32_t shader_stages
;
576 struct anv_descriptor_slot entries
[0];
579 struct anv_descriptor
{
580 struct anv_sampler
*sampler
;
581 struct anv_surface_view
*view
;
584 struct anv_descriptor_set
{
585 struct anv_descriptor descriptors
[0];
592 struct anv_pipeline_layout
{
594 struct anv_descriptor_set_layout
*layout
;
595 uint32_t surface_start
[VK_NUM_SHADER_STAGE
];
596 uint32_t sampler_start
[VK_NUM_SHADER_STAGE
];
602 uint32_t surface_count
;
603 uint32_t sampler_count
;
604 } stage
[VK_NUM_SHADER_STAGE
];
608 struct anv_device
* device
;
616 #define ANV_CMD_BUFFER_PIPELINE_DIRTY (1 << 0)
617 #define ANV_CMD_BUFFER_RS_DIRTY (1 << 2)
618 #define ANV_CMD_BUFFER_DS_DIRTY (1 << 3)
619 #define ANV_CMD_BUFFER_CB_DIRTY (1 << 4)
620 #define ANV_CMD_BUFFER_VP_DIRTY (1 << 5)
622 struct anv_vertex_binding
{
623 struct anv_buffer
* buffer
;
627 struct anv_descriptor_set_binding
{
628 struct anv_descriptor_set
* set
;
629 uint32_t dynamic_offsets
[128];
632 struct anv_cmd_buffer
{
633 struct anv_object base
;
634 struct anv_device
* device
;
636 struct drm_i915_gem_execbuffer2 execbuf
;
637 struct drm_i915_gem_exec_object2
* exec2_objects
;
638 struct anv_bo
** exec2_bos
;
639 uint32_t exec2_array_length
;
644 struct anv_batch batch
;
645 struct anv_batch_bo
* last_batch_bo
;
646 struct anv_batch_bo
* surface_batch_bo
;
647 uint32_t surface_next
;
648 struct anv_reloc_list surface_relocs
;
649 struct anv_state_stream surface_state_stream
;
650 struct anv_state_stream dynamic_state_stream
;
652 /* State required while building cmd buffer */
653 uint32_t current_pipeline
;
656 uint32_t compute_dirty
;
657 uint32_t descriptors_dirty
;
658 struct anv_pipeline
* pipeline
;
659 struct anv_pipeline
* compute_pipeline
;
660 struct anv_framebuffer
* framebuffer
;
661 struct anv_dynamic_rs_state
* rs_state
;
662 struct anv_dynamic_ds_state
* ds_state
;
663 struct anv_dynamic_vp_state
* vp_state
;
664 struct anv_dynamic_cb_state
* cb_state
;
665 struct anv_vertex_binding vertex_bindings
[MAX_VBS
];
666 struct anv_descriptor_set_binding descriptors
[MAX_SETS
];
669 void anv_cmd_buffer_dump(struct anv_cmd_buffer
*cmd_buffer
);
670 void anv_aub_writer_destroy(struct anv_aub_writer
*writer
);
673 struct anv_object base
;
675 struct drm_i915_gem_execbuffer2 execbuf
;
676 struct drm_i915_gem_exec_object2 exec2_objects
[1];
685 struct anv_pipeline
{
686 struct anv_object base
;
687 struct anv_device
* device
;
688 struct anv_batch batch
;
689 uint32_t batch_data
[256];
690 struct anv_shader
* shaders
[VK_NUM_SHADER_STAGE
];
691 struct anv_pipeline_layout
* layout
;
694 struct brw_vs_prog_data vs_prog_data
;
695 struct brw_wm_prog_data wm_prog_data
;
696 struct brw_gs_prog_data gs_prog_data
;
697 struct brw_cs_prog_data cs_prog_data
;
698 struct brw_stage_prog_data
* prog_data
[VK_NUM_SHADER_STAGE
];
702 uint32_t nr_vs_entries
;
705 uint32_t nr_gs_entries
;
708 struct anv_bo vs_scratch_bo
;
709 struct anv_bo ps_scratch_bo
;
710 struct anv_bo gs_scratch_bo
;
711 struct anv_bo cs_scratch_bo
;
713 uint32_t active_stages
;
714 struct anv_state_stream program_stream
;
715 struct anv_state blend_state
;
720 uint32_t gs_vertex_count
;
724 uint32_t binding_stride
[MAX_VBS
];
726 uint32_t state_sf
[GEN8_3DSTATE_SF_length
];
727 uint32_t state_raster
[GEN8_3DSTATE_RASTER_length
];
728 uint32_t state_wm_depth_stencil
[GEN8_3DSTATE_WM_DEPTH_STENCIL_length
];
731 struct anv_pipeline_create_info
{
733 bool disable_viewport
;
734 bool disable_scissor
;
740 anv_pipeline_create(VkDevice device
,
741 const VkGraphicsPipelineCreateInfo
*pCreateInfo
,
742 const struct anv_pipeline_create_info
*extra
,
743 VkPipeline
*pPipeline
);
745 struct anv_compiler
*anv_compiler_create(struct anv_device
*device
);
746 void anv_compiler_destroy(struct anv_compiler
*compiler
);
747 int anv_compiler_run(struct anv_compiler
*compiler
, struct anv_pipeline
*pipeline
);
748 void anv_compiler_free(struct anv_pipeline
*pipeline
);
758 const struct anv_format
*
759 anv_format_for_vk_format(VkFormat format
);
770 uint32_t stencil_offset
;
771 uint32_t stencil_stride
;
777 struct anv_swap_chain
* swap_chain
;
780 * \name Alignment of miptree images, in units of pixels.
782 * These fields contain the actual alignment values, not the values the
783 * hardware expects. For example, if h_align is 4, then program the hardware
786 * \see RENDER_SURFACE_STATE.SurfaceHorizontalAlignment
787 * \see RENDER_SURFACE_STATE.SurfaceVerticalAlignment
795 struct anv_surface_view
{
796 struct anv_object base
;
798 struct anv_state surface_state
;
806 struct anv_image_create_info
{
810 VkResult
anv_image_create(VkDevice _device
,
811 const VkImageCreateInfo
*pCreateInfo
,
812 const struct anv_image_create_info
*extra
,
815 void anv_image_view_init(struct anv_surface_view
*view
,
816 struct anv_device
*device
,
817 const VkImageViewCreateInfo
* pCreateInfo
,
818 struct anv_cmd_buffer
*cmd_buffer
);
820 void anv_color_attachment_view_init(struct anv_surface_view
*view
,
821 struct anv_device
*device
,
822 const VkColorAttachmentViewCreateInfo
* pCreateInfo
,
823 struct anv_cmd_buffer
*cmd_buffer
);
825 void anv_surface_view_destroy(struct anv_device
*device
,
826 struct anv_object
*obj
, VkObjectType obj_type
);
832 struct anv_depth_stencil_view
{
835 uint32_t depth_offset
;
836 uint32_t depth_stride
;
837 uint32_t depth_format
;
839 uint32_t stencil_offset
;
840 uint32_t stencil_stride
;
843 struct anv_framebuffer
{
844 struct anv_object base
;
845 uint32_t color_attachment_count
;
846 const struct anv_surface_view
* color_attachments
[MAX_RTS
];
847 const struct anv_depth_stencil_view
* depth_stencil
;
849 uint32_t sample_count
;
854 /* Viewport for clears */
855 VkDynamicVpState vp_state
;
858 struct anv_render_pass_layer
{
859 VkAttachmentLoadOp color_load_op
;
860 VkClearColor clear_color
;
863 struct anv_render_pass
{
866 uint32_t num_clear_layers
;
868 struct anv_render_pass_layer layers
[0];
871 void anv_device_init_meta(struct anv_device
*device
);
872 void anv_device_finish_meta(struct anv_device
*device
);
875 anv_cmd_buffer_clear(struct anv_cmd_buffer
*cmd_buffer
,
876 struct anv_render_pass
*pass
);
879 anv_lookup_entrypoint(const char *name
);