vk: Store dynamic slot index with struct anv_descriptor_slot
[mesa.git] / src / vulkan / private.h
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #pragma once
25
26 #include <stdlib.h>
27 #include <stdio.h>
28 #include <stdbool.h>
29 #include <pthread.h>
30 #include <assert.h>
31 #include <i915_drm.h>
32
33 #include "brw_device_info.h"
34 #include "util/macros.h"
35
36 #define VK_PROTOTYPES
37 #include <vulkan/vulkan.h>
38 #include <vulkan/vulkan_intel.h>
39 #include <vulkan/vk_wsi_lunarg.h>
40
41 #include "entrypoints.h"
42
43 #include "brw_context.h"
44
45 #ifdef __cplusplus
46 extern "C" {
47 #endif
48
49 static inline uint32_t
50 ALIGN_U32(uint32_t v, uint32_t a)
51 {
52 return (v + a - 1) & ~(a - 1);
53 }
54
55 static inline int32_t
56 ALIGN_I32(int32_t v, int32_t a)
57 {
58 return (v + a - 1) & ~(a - 1);
59 }
60
61 #define for_each_bit(b, dword) \
62 for (uint32_t __dword = (dword); \
63 (b) = __builtin_ffs(__dword) - 1, __dword; \
64 __dword &= ~(1 << (b)))
65
66 /* Define no kernel as 1, since that's an illegal offset for a kernel */
67 #define NO_KERNEL 1
68
69 struct anv_common {
70 VkStructureType sType;
71 const void* pNext;
72 };
73
74 /* Whenever we generate an error, pass it through this function. Useful for
75 * debugging, where we can break on it. Only call at error site, not when
76 * propagating errors. Might be useful to plug in a stack trace here.
77 */
78
79 static inline VkResult
80 vk_error(VkResult error)
81 {
82 #ifdef DEBUG
83 fprintf(stderr, "vk_error: %x\n", error);
84 #endif
85
86 return error;
87 }
88
89 void __anv_finishme(const char *file, int line, const char *format, ...);
90
91 /**
92 * Print a FINISHME message, including its source location.
93 */
94 #define anv_finishme(format, ...) \
95 __anv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
96
97 #define stub_return(v) \
98 do { \
99 anv_finishme("stub %s", __func__); \
100 return (v); \
101 } while (0)
102
103 #define stub(v) \
104 do { \
105 anv_finishme("stub %s", __func__); \
106 return; \
107 } while (0)
108
109 /**
110 * A dynamically growable, circular buffer. Elements are added at head and
111 * removed from tail. head and tail are free-running uint32_t indices and we
112 * only compute the modulo with size when accessing the array. This way,
113 * number of bytes in the queue is always head - tail, even in case of
114 * wraparound.
115 */
116
117 struct anv_vector {
118 uint32_t head;
119 uint32_t tail;
120 uint32_t element_size;
121 uint32_t size;
122 void *data;
123 };
124
125 int anv_vector_init(struct anv_vector *queue, uint32_t element_size, uint32_t size);
126 void *anv_vector_add(struct anv_vector *queue);
127 void *anv_vector_remove(struct anv_vector *queue);
128
129 static inline int
130 anv_vector_length(struct anv_vector *queue)
131 {
132 return (queue->head - queue->tail) / queue->element_size;
133 }
134
135 static inline void
136 anv_vector_finish(struct anv_vector *queue)
137 {
138 free(queue->data);
139 }
140
141 #define anv_vector_foreach(elem, queue) \
142 static_assert(__builtin_types_compatible_p(__typeof__(queue), struct anv_vector *), ""); \
143 for (uint32_t __anv_vector_offset = (queue)->tail; \
144 elem = (queue)->data + (__anv_vector_offset & ((queue)->size - 1)), __anv_vector_offset < (queue)->head; \
145 __anv_vector_offset += (queue)->element_size)
146
147 struct anv_bo {
148 int gem_handle;
149 uint32_t index;
150 uint64_t offset;
151 uint64_t size;
152
153 /* This field is here for the benefit of the aub dumper. It can (and for
154 * userptr bos it must) be set to the cpu map of the buffer. Destroying
155 * the bo won't clean up the mmap, it's still the responsibility of the bo
156 * user to do that. */
157 void *map;
158 };
159
160 /* Represents a lock-free linked list of "free" things. This is used by
161 * both the block pool and the state pools. Unfortunately, in order to
162 * solve the ABA problem, we can't use a single uint32_t head.
163 */
164 union anv_free_list {
165 struct {
166 uint32_t offset;
167
168 /* A simple count that is incremented every time the head changes. */
169 uint32_t count;
170 };
171 uint64_t u64;
172 };
173
174 #define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })
175
176 struct anv_block_pool {
177 struct anv_device *device;
178
179 struct anv_bo bo;
180 void *map;
181 int fd;
182 uint32_t size;
183
184 /**
185 * Array of mmaps and gem handles owned by the block pool, reclaimed when
186 * the block pool is destroyed.
187 */
188 struct anv_vector mmap_cleanups;
189
190 uint32_t block_size;
191
192 uint32_t next_block;
193 union anv_free_list free_list;
194 };
195
196 struct anv_block_state {
197 union {
198 struct {
199 uint32_t next;
200 uint32_t end;
201 };
202 uint64_t u64;
203 };
204 };
205
206 struct anv_state {
207 uint32_t offset;
208 uint32_t alloc_size;
209 void *map;
210 };
211
212 struct anv_fixed_size_state_pool {
213 size_t state_size;
214 union anv_free_list free_list;
215 struct anv_block_state block;
216 };
217
218 #define ANV_MIN_STATE_SIZE_LOG2 6
219 #define ANV_MAX_STATE_SIZE_LOG2 10
220
221 #define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2)
222
223 struct anv_state_pool {
224 struct anv_block_pool *block_pool;
225 struct anv_fixed_size_state_pool buckets[ANV_STATE_BUCKETS];
226 };
227
228 struct anv_state_stream {
229 struct anv_block_pool *block_pool;
230 uint32_t next;
231 uint32_t current_block;
232 uint32_t end;
233 };
234
235 void anv_block_pool_init(struct anv_block_pool *pool,
236 struct anv_device *device, uint32_t block_size);
237 void anv_block_pool_init_slave(struct anv_block_pool *pool,
238 struct anv_block_pool *master_pool,
239 uint32_t num_blocks);
240 void anv_block_pool_finish(struct anv_block_pool *pool);
241 uint32_t anv_block_pool_alloc(struct anv_block_pool *pool);
242 void anv_block_pool_free(struct anv_block_pool *pool, uint32_t offset);
243 void anv_state_pool_init(struct anv_state_pool *pool,
244 struct anv_block_pool *block_pool);
245 struct anv_state anv_state_pool_alloc(struct anv_state_pool *pool,
246 size_t state_size, size_t alignment);
247 void anv_state_pool_free(struct anv_state_pool *pool, struct anv_state state);
248 void anv_state_stream_init(struct anv_state_stream *stream,
249 struct anv_block_pool *block_pool);
250 void anv_state_stream_finish(struct anv_state_stream *stream);
251 struct anv_state anv_state_stream_alloc(struct anv_state_stream *stream,
252 uint32_t size, uint32_t alignment);
253
254 /**
255 * Implements a pool of re-usable BOs. The interface is identical to that
256 * of block_pool except that each block is its own BO.
257 */
258 struct anv_bo_pool {
259 struct anv_device *device;
260
261 uint32_t bo_size;
262
263 void *free_list;
264 };
265
266 void anv_bo_pool_init(struct anv_bo_pool *pool,
267 struct anv_device *device, uint32_t block_size);
268 void anv_bo_pool_finish(struct anv_bo_pool *pool);
269 VkResult anv_bo_pool_alloc(struct anv_bo_pool *pool, struct anv_bo *bo);
270 void anv_bo_pool_free(struct anv_bo_pool *pool, const struct anv_bo *bo);
271
272 struct anv_object;
273 struct anv_device;
274
275 typedef void (*anv_object_destructor_cb)(struct anv_device *,
276 struct anv_object *,
277 VkObjectType);
278
279 struct anv_object {
280 anv_object_destructor_cb destructor;
281 };
282
283 struct anv_physical_device {
284 struct anv_instance * instance;
285 uint32_t chipset_id;
286 bool no_hw;
287 const char * path;
288 const char * name;
289 const struct brw_device_info * info;
290 };
291
292 struct anv_instance {
293 void * pAllocUserData;
294 PFN_vkAllocFunction pfnAlloc;
295 PFN_vkFreeFunction pfnFree;
296 uint32_t apiVersion;
297 uint32_t physicalDeviceCount;
298 struct anv_physical_device physicalDevice;
299 };
300
301 struct anv_meta_state {
302 struct {
303 VkPipeline pipeline;
304 } clear;
305
306 struct {
307 VkPipeline pipeline;
308 VkDescriptorSetLayout ds_layout;
309 } blit;
310
311 struct {
312 VkDynamicRsState rs_state;
313 VkDynamicCbState cb_state;
314 VkDynamicDsState ds_state;
315 } shared;
316 };
317
318 struct anv_device {
319 struct anv_instance * instance;
320 uint32_t chipset_id;
321 struct brw_device_info info;
322 int context_id;
323 int fd;
324 bool no_hw;
325 bool dump_aub;
326
327 struct anv_bo_pool batch_bo_pool;
328
329 struct anv_block_pool dynamic_state_block_pool;
330 struct anv_state_pool dynamic_state_pool;
331
332 struct anv_block_pool instruction_block_pool;
333 struct anv_block_pool surface_state_block_pool;
334 struct anv_block_pool binding_table_block_pool;
335 struct anv_state_pool surface_state_pool;
336
337 struct anv_meta_state meta_state;
338
339 struct anv_compiler * compiler;
340 struct anv_aub_writer * aub_writer;
341 pthread_mutex_t mutex;
342 };
343
344 struct anv_queue {
345 struct anv_device * device;
346
347 struct anv_state_pool * pool;
348
349 /**
350 * Serial number of the most recently completed batch executed on the
351 * engine.
352 */
353 struct anv_state completed_serial;
354
355 /**
356 * The next batch submitted to the engine will be assigned this serial
357 * number.
358 */
359 uint32_t next_serial;
360
361 uint32_t last_collected_serial;
362 };
363
364 void *
365 anv_device_alloc(struct anv_device * device,
366 size_t size,
367 size_t alignment,
368 VkSystemAllocType allocType);
369
370 void
371 anv_device_free(struct anv_device * device,
372 void * mem);
373
374 void* anv_gem_mmap(struct anv_device *device,
375 uint32_t gem_handle, uint64_t offset, uint64_t size);
376 void anv_gem_munmap(void *p, uint64_t size);
377 uint32_t anv_gem_create(struct anv_device *device, size_t size);
378 void anv_gem_close(struct anv_device *device, int gem_handle);
379 int anv_gem_userptr(struct anv_device *device, void *mem, size_t size);
380 int anv_gem_wait(struct anv_device *device, int gem_handle, int64_t *timeout_ns);
381 int anv_gem_execbuffer(struct anv_device *device,
382 struct drm_i915_gem_execbuffer2 *execbuf);
383 int anv_gem_set_tiling(struct anv_device *device, int gem_handle,
384 uint32_t stride, uint32_t tiling);
385 int anv_gem_create_context(struct anv_device *device);
386 int anv_gem_destroy_context(struct anv_device *device, int context);
387 int anv_gem_get_param(int fd, uint32_t param);
388 int anv_gem_get_aperture(struct anv_device *device, uint64_t *size);
389 int anv_gem_handle_to_fd(struct anv_device *device, int gem_handle);
390 int anv_gem_fd_to_handle(struct anv_device *device, int fd);
391 int anv_gem_userptr(struct anv_device *device, void *mem, size_t size);
392
393 VkResult anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size);
394
395 struct anv_reloc_list {
396 size_t num_relocs;
397 size_t array_length;
398 struct drm_i915_gem_relocation_entry * relocs;
399 struct anv_bo ** reloc_bos;
400 };
401
402 VkResult anv_reloc_list_init(struct anv_reloc_list *list,
403 struct anv_device *device);
404 void anv_reloc_list_finish(struct anv_reloc_list *list,
405 struct anv_device *device);
406
407 struct anv_batch_bo {
408 struct anv_bo bo;
409
410 /* Bytes actually consumed in this batch BO */
411 size_t length;
412
413 /* These offsets reference the per-batch reloc list */
414 size_t first_reloc;
415 size_t num_relocs;
416
417 struct anv_batch_bo * prev_batch_bo;
418 };
419
420 struct anv_batch {
421 struct anv_device * device;
422
423 void * start;
424 void * end;
425 void * next;
426
427 struct anv_reloc_list relocs;
428
429 /* This callback is called (with the associated user data) in the event
430 * that the batch runs out of space.
431 */
432 VkResult (*extend_cb)(struct anv_batch *, void *);
433 void * user_data;
434 };
435
436 void *anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords);
437 void anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other);
438 uint64_t anv_batch_emit_reloc(struct anv_batch *batch,
439 void *location, struct anv_bo *bo, uint32_t offset);
440
441 struct anv_address {
442 struct anv_bo *bo;
443 uint32_t offset;
444 };
445
446 #define __gen_address_type struct anv_address
447 #define __gen_user_data struct anv_batch
448
449 static inline uint64_t
450 __gen_combine_address(struct anv_batch *batch, void *location,
451 const struct anv_address address, uint32_t delta)
452 {
453 if (address.bo == NULL) {
454 return delta;
455 } else {
456 assert(batch->start <= location && location < batch->end);
457
458 return anv_batch_emit_reloc(batch, location, address.bo, address.offset + delta);
459 }
460 }
461
462 #include "gen7_pack.h"
463 #include "gen75_pack.h"
464 #undef GEN8_3DSTATE_MULTISAMPLE
465 #include "gen8_pack.h"
466
467 #define anv_batch_emit(batch, cmd, ...) do { \
468 struct cmd __template = { \
469 cmd ## _header, \
470 __VA_ARGS__ \
471 }; \
472 void *__dst = anv_batch_emit_dwords(batch, cmd ## _length); \
473 cmd ## _pack(batch, __dst, &__template); \
474 } while (0)
475
476 #define anv_batch_emitn(batch, n, cmd, ...) ({ \
477 struct cmd __template = { \
478 cmd ## _header, \
479 .DwordLength = n - cmd ## _length_bias, \
480 __VA_ARGS__ \
481 }; \
482 void *__dst = anv_batch_emit_dwords(batch, n); \
483 cmd ## _pack(batch, __dst, &__template); \
484 __dst; \
485 })
486
487 #define anv_batch_emit_merge(batch, dwords0, dwords1) \
488 do { \
489 uint32_t *dw; \
490 \
491 assert(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1)); \
492 dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0)); \
493 for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++) \
494 dw[i] = (dwords0)[i] | (dwords1)[i]; \
495 } while (0)
496
497 #define GEN8_MOCS { \
498 .MemoryTypeLLCeLLCCacheabilityControl = WB, \
499 .TargetCache = L3DefertoPATforLLCeLLCselection, \
500 .AgeforQUADLRU = 0 \
501 }
502
503 struct anv_device_memory {
504 struct anv_bo bo;
505 VkDeviceSize map_size;
506 void * map;
507 };
508
509 struct anv_dynamic_vp_state {
510 struct anv_object base;
511 struct anv_state sf_clip_vp;
512 struct anv_state cc_vp;
513 struct anv_state scissor;
514 };
515
516 struct anv_dynamic_rs_state {
517 uint32_t state_sf[GEN8_3DSTATE_SF_length];
518 uint32_t state_raster[GEN8_3DSTATE_RASTER_length];
519 };
520
521 struct anv_dynamic_ds_state {
522 uint32_t state_wm_depth_stencil[GEN8_3DSTATE_WM_DEPTH_STENCIL_length];
523 uint32_t state_color_calc[GEN8_COLOR_CALC_STATE_length];
524 };
525
526 struct anv_dynamic_cb_state {
527 uint32_t state_color_calc[GEN8_COLOR_CALC_STATE_length];
528
529 };
530
531 struct anv_query_pool_slot {
532 uint64_t begin;
533 uint64_t end;
534 uint64_t available;
535 };
536
537 struct anv_query_pool {
538 struct anv_object base;
539 VkQueryType type;
540 uint32_t slots;
541 struct anv_bo bo;
542 };
543
544 struct anv_descriptor_slot {
545 int8_t dynamic_slot;
546 uint8_t index;
547 } entries[0];
548
549 struct anv_descriptor_set_layout {
550 struct {
551 uint32_t surface_count;
552 struct anv_descriptor_slot *surface_start;
553 uint32_t sampler_count;
554 struct anv_descriptor_slot *sampler_start;
555 } stage[VK_NUM_SHADER_STAGE];
556
557 uint32_t count;
558 uint32_t num_dynamic_buffers;
559 struct anv_descriptor_slot entries[0];
560 };
561
562 struct anv_descriptor {
563 struct anv_sampler *sampler;
564 struct anv_surface_view *view;
565 };
566
567 struct anv_descriptor_set {
568 struct anv_descriptor descriptors[0];
569 };
570
571 #define MAX_VBS 32
572 #define MAX_SETS 8
573 #define MAX_RTS 8
574
575 struct anv_pipeline_layout {
576 struct {
577 struct anv_descriptor_set_layout *layout;
578 uint32_t surface_start[VK_NUM_SHADER_STAGE];
579 uint32_t sampler_start[VK_NUM_SHADER_STAGE];
580 } set[MAX_SETS];
581
582 uint32_t num_sets;
583
584 struct {
585 uint32_t surface_count;
586 uint32_t sampler_count;
587 } stage[VK_NUM_SHADER_STAGE];
588 };
589
590 struct anv_buffer {
591 struct anv_device * device;
592 VkDeviceSize size;
593
594 /* Set when bound */
595 struct anv_bo * bo;
596 VkDeviceSize offset;
597 };
598
599 #define ANV_CMD_BUFFER_PIPELINE_DIRTY (1 << 0)
600 #define ANV_CMD_BUFFER_DESCRIPTOR_SET_DIRTY (1 << 1)
601 #define ANV_CMD_BUFFER_RS_DIRTY (1 << 2)
602 #define ANV_CMD_BUFFER_DS_DIRTY (1 << 3)
603 #define ANV_CMD_BUFFER_CB_DIRTY (1 << 4)
604
605 struct anv_bindings {
606 struct {
607 struct anv_buffer *buffer;
608 VkDeviceSize offset;
609 } vb[MAX_VBS];
610
611 struct {
612 uint32_t surfaces[256];
613 struct { uint32_t dwords[4]; } samplers[16];
614 } descriptors[VK_NUM_SHADER_STAGE];
615 };
616
617 struct anv_cmd_buffer {
618 struct anv_object base;
619 struct anv_device * device;
620
621 struct drm_i915_gem_execbuffer2 execbuf;
622 struct drm_i915_gem_exec_object2 * exec2_objects;
623 struct anv_bo ** exec2_bos;
624 uint32_t exec2_array_length;
625 bool need_reloc;
626 uint32_t serial;
627
628 uint32_t bo_count;
629 struct anv_batch batch;
630 struct anv_batch_bo * last_batch_bo;
631 struct anv_bo surface_bo;
632 uint32_t surface_next;
633 struct anv_reloc_list surface_relocs;
634 struct anv_state_stream binding_table_state_stream;
635 struct anv_state_stream surface_state_stream;
636 struct anv_state_stream dynamic_state_stream;
637
638 /* State required while building cmd buffer */
639 uint32_t vb_dirty;
640 uint32_t dirty;
641 struct anv_pipeline * pipeline;
642 struct anv_framebuffer * framebuffer;
643 struct anv_dynamic_rs_state * rs_state;
644 struct anv_dynamic_ds_state * ds_state;
645 struct anv_dynamic_vp_state * vp_state;
646 struct anv_dynamic_cb_state * cb_state;
647 struct anv_bindings * bindings;
648 struct anv_bindings default_bindings;
649 };
650
651 void anv_cmd_buffer_dump(struct anv_cmd_buffer *cmd_buffer);
652 void anv_aub_writer_destroy(struct anv_aub_writer *writer);
653
654 struct anv_fence {
655 struct anv_object base;
656 struct anv_bo bo;
657 struct drm_i915_gem_execbuffer2 execbuf;
658 struct drm_i915_gem_exec_object2 exec2_objects[1];
659 bool ready;
660 };
661
662 struct anv_shader {
663 uint32_t size;
664 char data[0];
665 };
666
667 struct anv_pipeline {
668 struct anv_object base;
669 struct anv_device * device;
670 struct anv_batch batch;
671 uint32_t batch_data[256];
672 struct anv_shader * shaders[VK_NUM_SHADER_STAGE];
673 struct anv_pipeline_layout * layout;
674 bool use_repclear;
675
676 struct brw_vs_prog_data vs_prog_data;
677 struct brw_wm_prog_data wm_prog_data;
678 struct brw_gs_prog_data gs_prog_data;
679 struct brw_stage_prog_data * prog_data[VK_NUM_SHADER_STAGE];
680 struct {
681 uint32_t vs_start;
682 uint32_t vs_size;
683 uint32_t nr_vs_entries;
684 uint32_t gs_start;
685 uint32_t gs_size;
686 uint32_t nr_gs_entries;
687 } urb;
688
689 struct anv_bo vs_scratch_bo;
690 struct anv_bo ps_scratch_bo;
691 struct anv_bo gs_scratch_bo;
692
693 uint32_t active_stages;
694 struct anv_state_stream program_stream;
695 struct anv_state blend_state;
696 uint32_t vs_simd8;
697 uint32_t ps_simd8;
698 uint32_t ps_simd16;
699 uint32_t gs_vec4;
700 uint32_t gs_vertex_count;
701
702 uint32_t vb_used;
703 uint32_t binding_stride[MAX_VBS];
704
705 uint32_t state_sf[GEN8_3DSTATE_SF_length];
706 uint32_t state_raster[GEN8_3DSTATE_RASTER_length];
707 uint32_t state_wm_depth_stencil[GEN8_3DSTATE_WM_DEPTH_STENCIL_length];
708 };
709
710 struct anv_pipeline_create_info {
711 bool use_repclear;
712 bool disable_viewport;
713 bool disable_scissor;
714 bool disable_vs;
715 bool use_rectlist;
716 };
717
718 VkResult
719 anv_pipeline_create(VkDevice device,
720 const VkGraphicsPipelineCreateInfo *pCreateInfo,
721 const struct anv_pipeline_create_info *extra,
722 VkPipeline *pPipeline);
723
724 struct anv_compiler *anv_compiler_create(int fd);
725 void anv_compiler_destroy(struct anv_compiler *compiler);
726 int anv_compiler_run(struct anv_compiler *compiler, struct anv_pipeline *pipeline);
727 void anv_compiler_free(struct anv_pipeline *pipeline);
728
729 struct anv_format {
730 uint16_t format;
731 uint8_t cpp;
732 uint8_t channels;
733 bool has_stencil;
734 };
735
736 const struct anv_format *
737 anv_format_for_vk_format(VkFormat format);
738
739 struct anv_image {
740 VkImageType type;
741 VkExtent3D extent;
742 VkFormat format;
743 uint32_t tile_mode;
744 VkDeviceSize size;
745 uint32_t alignment;
746 uint32_t stride;
747
748 uint32_t stencil_offset;
749 uint32_t stencil_stride;
750
751 /* Set when bound */
752 struct anv_bo * bo;
753 VkDeviceSize offset;
754
755 struct anv_swap_chain * swap_chain;
756 };
757
758 struct anv_surface_view {
759 struct anv_state surface_state;
760 struct anv_bo * bo;
761 uint32_t offset;
762 uint32_t range;
763 VkExtent3D extent;
764 VkFormat format;
765 };
766
767 struct anv_image_create_info {
768 uint32_t tile_mode;
769 };
770
771 VkResult anv_image_create(VkDevice _device,
772 const VkImageCreateInfo *pCreateInfo,
773 const struct anv_image_create_info *extra,
774 VkImage *pImage);
775
776 void anv_image_view_init(struct anv_surface_view *view,
777 struct anv_device *device,
778 const VkImageViewCreateInfo* pCreateInfo,
779 struct anv_cmd_buffer *cmd_buffer);
780
781 void anv_color_attachment_view_init(struct anv_surface_view *view,
782 struct anv_device *device,
783 const VkColorAttachmentViewCreateInfo* pCreateInfo,
784 struct anv_cmd_buffer *cmd_buffer);
785
786 struct anv_sampler {
787 uint32_t state[4];
788 };
789
790 struct anv_depth_stencil_view {
791 struct anv_bo * bo;
792
793 uint32_t depth_offset;
794 uint32_t depth_stride;
795 uint32_t depth_format;
796
797 uint32_t stencil_offset;
798 uint32_t stencil_stride;
799 };
800
801 struct anv_framebuffer {
802 struct anv_object base;
803 uint32_t color_attachment_count;
804 const struct anv_surface_view * color_attachments[MAX_RTS];
805 const struct anv_depth_stencil_view * depth_stencil;
806
807 uint32_t sample_count;
808 uint32_t width;
809 uint32_t height;
810 uint32_t layers;
811
812 /* Viewport for clears */
813 VkDynamicVpState vp_state;
814 };
815
816 struct anv_render_pass_layer {
817 VkAttachmentLoadOp color_load_op;
818 VkClearColor clear_color;
819 };
820
821 struct anv_render_pass {
822 VkRect render_area;
823
824 uint32_t num_clear_layers;
825 uint32_t num_layers;
826 struct anv_render_pass_layer layers[0];
827 };
828
829 void anv_device_init_meta(struct anv_device *device);
830
831 void
832 anv_cmd_buffer_clear(struct anv_cmd_buffer *cmd_buffer,
833 struct anv_render_pass *pass);
834
835 void
836 anv_cmd_buffer_fill_render_targets(struct anv_cmd_buffer *cmd_buffer);
837
838 void *
839 anv_lookup_entrypoint(const char *name);
840
841 #ifdef __cplusplus
842 }
843 #endif