2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 VkResult
anv_CreateDescriptorSetLayout(
40 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
41 const VkAllocationCallbacks
* pAllocator
,
42 VkDescriptorSetLayout
* pSetLayout
)
44 ANV_FROM_HANDLE(anv_device
, device
, _device
);
45 struct anv_descriptor_set_layout
*set_layout
;
47 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
49 uint32_t max_binding
= 0;
50 uint32_t immutable_sampler_count
= 0;
51 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
52 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
53 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
54 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
57 size_t size
= sizeof(struct anv_descriptor_set_layout
) +
58 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]) +
59 immutable_sampler_count
* sizeof(struct anv_sampler
*);
61 set_layout
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
62 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
64 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
66 /* We just allocate all the samplers at the end of the struct */
67 struct anv_sampler
**samplers
=
68 (struct anv_sampler
**)&set_layout
->binding
[max_binding
+ 1];
70 memset(set_layout
, 0, sizeof(*set_layout
));
71 set_layout
->binding_count
= max_binding
+ 1;
73 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
74 /* Initialize all binding_layout entries to -1 */
75 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
77 set_layout
->binding
[b
].array_size
= 0;
78 set_layout
->binding
[b
].immutable_samplers
= NULL
;
81 /* Initialize all samplers to 0 */
82 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
84 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
85 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
86 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
87 uint32_t buffer_count
= 0;
88 uint32_t dynamic_offset_count
= 0;
90 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
91 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
92 uint32_t b
= binding
->binding
;
93 /* We temporarily store the pointer to the binding in the
94 * immutable_samplers pointer. This provides us with a quick-and-dirty
95 * way to sort the bindings by binding number.
97 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
100 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
101 const VkDescriptorSetLayoutBinding
*binding
=
102 (void *)set_layout
->binding
[b
].immutable_samplers
;
107 assert(binding
->descriptorCount
> 0);
109 set_layout
->binding
[b
].type
= binding
->descriptorType
;
111 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
112 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
113 set_layout
->size
+= binding
->descriptorCount
;
115 switch (binding
->descriptorType
) {
116 case VK_DESCRIPTOR_TYPE_SAMPLER
:
117 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
118 anv_foreach_stage(s
, binding
->stageFlags
) {
119 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
120 sampler_count
[s
] += binding
->descriptorCount
;
127 switch (binding
->descriptorType
) {
128 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
129 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
130 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
131 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
132 set_layout
->binding
[b
].buffer_index
= buffer_count
;
133 buffer_count
+= binding
->descriptorCount
;
136 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
137 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
138 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
139 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
140 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
141 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
142 anv_foreach_stage(s
, binding
->stageFlags
) {
143 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
144 surface_count
[s
] += binding
->descriptorCount
;
151 switch (binding
->descriptorType
) {
152 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
153 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
154 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
155 dynamic_offset_count
+= binding
->descriptorCount
;
161 switch (binding
->descriptorType
) {
162 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
163 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
164 anv_foreach_stage(s
, binding
->stageFlags
) {
165 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
166 image_count
[s
] += binding
->descriptorCount
;
173 if (binding
->pImmutableSamplers
) {
174 set_layout
->binding
[b
].immutable_samplers
= samplers
;
175 samplers
+= binding
->descriptorCount
;
177 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
178 set_layout
->binding
[b
].immutable_samplers
[i
] =
179 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
181 set_layout
->binding
[b
].immutable_samplers
= NULL
;
184 set_layout
->shader_stages
|= binding
->stageFlags
;
187 set_layout
->buffer_count
= buffer_count
;
188 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
190 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
195 void anv_DestroyDescriptorSetLayout(
197 VkDescriptorSetLayout _set_layout
,
198 const VkAllocationCallbacks
* pAllocator
)
200 ANV_FROM_HANDLE(anv_device
, device
, _device
);
201 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
206 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
210 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
211 const struct anv_descriptor_set_layout
*layout
)
213 size_t size
= sizeof(*layout
) +
214 sizeof(layout
->binding
[0]) * layout
->binding_count
;
215 _mesa_sha1_update(ctx
, layout
, size
);
219 * Pipeline layouts. These have nothing to do with the pipeline. They are
220 * just multiple descriptor set layouts pasted together
223 VkResult
anv_CreatePipelineLayout(
225 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
226 const VkAllocationCallbacks
* pAllocator
,
227 VkPipelineLayout
* pPipelineLayout
)
229 ANV_FROM_HANDLE(anv_device
, device
, _device
);
230 struct anv_pipeline_layout
*layout
;
232 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
234 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
235 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
237 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
239 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
241 unsigned dynamic_offset_count
= 0;
243 memset(layout
->stage
, 0, sizeof(layout
->stage
));
244 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
245 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
246 pCreateInfo
->pSetLayouts
[set
]);
247 layout
->set
[set
].layout
= set_layout
;
249 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
250 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
251 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
254 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
255 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
256 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0)
257 layout
->stage
[s
].has_dynamic_offsets
= true;
262 struct mesa_sha1
*ctx
= _mesa_sha1_init();
263 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
264 sha1_update_descriptor_set_layout(ctx
, layout
->set
[s
].layout
);
265 _mesa_sha1_update(ctx
, &layout
->set
[s
].dynamic_offset_start
,
266 sizeof(layout
->set
[s
].dynamic_offset_start
));
268 _mesa_sha1_update(ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
269 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
270 _mesa_sha1_update(ctx
, &layout
->stage
[s
].has_dynamic_offsets
,
271 sizeof(layout
->stage
[s
].has_dynamic_offsets
));
273 _mesa_sha1_final(ctx
, layout
->sha1
);
275 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
280 void anv_DestroyPipelineLayout(
282 VkPipelineLayout _pipelineLayout
,
283 const VkAllocationCallbacks
* pAllocator
)
285 ANV_FROM_HANDLE(anv_device
, device
, _device
);
286 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
288 if (!pipeline_layout
)
291 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
297 * These are implemented using a big pool of memory and a free-list for the
298 * host memory allocations and a state_stream and a free list for the buffer
299 * view surface state. The spec allows us to fail to allocate due to
300 * fragmentation in all cases but two: 1) after pool reset, allocating up
301 * until the pool size with no freeing must succeed and 2) allocating and
302 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
303 * and the free lists lets us recycle blocks for case 2).
308 VkResult
anv_CreateDescriptorPool(
310 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
311 const VkAllocationCallbacks
* pAllocator
,
312 VkDescriptorPool
* pDescriptorPool
)
314 ANV_FROM_HANDLE(anv_device
, device
, _device
);
315 struct anv_descriptor_pool
*pool
;
317 uint32_t descriptor_count
= 0;
318 uint32_t buffer_count
= 0;
319 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
320 switch (pCreateInfo
->pPoolSizes
[i
].type
) {
321 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
322 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
323 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
324 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
325 buffer_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
327 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
332 const size_t pool_size
=
333 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
334 descriptor_count
* sizeof(struct anv_descriptor
) +
335 buffer_count
* sizeof(struct anv_buffer_view
);
336 const size_t total_size
= sizeof(*pool
) + pool_size
;
338 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
339 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
341 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
343 pool
->size
= pool_size
;
345 pool
->free_list
= EMPTY
;
347 anv_state_stream_init(&pool
->surface_state_stream
,
348 &device
->surface_state_block_pool
);
349 pool
->surface_state_free_list
= NULL
;
351 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
356 void anv_DestroyDescriptorPool(
358 VkDescriptorPool _pool
,
359 const VkAllocationCallbacks
* pAllocator
)
361 ANV_FROM_HANDLE(anv_device
, device
, _device
);
362 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
367 anv_state_stream_finish(&pool
->surface_state_stream
);
368 vk_free2(&device
->alloc
, pAllocator
, pool
);
371 VkResult
anv_ResetDescriptorPool(
373 VkDescriptorPool descriptorPool
,
374 VkDescriptorPoolResetFlags flags
)
376 ANV_FROM_HANDLE(anv_device
, device
, _device
);
377 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
380 pool
->free_list
= EMPTY
;
381 anv_state_stream_finish(&pool
->surface_state_stream
);
382 anv_state_stream_init(&pool
->surface_state_stream
,
383 &device
->surface_state_block_pool
);
384 pool
->surface_state_free_list
= NULL
;
389 struct pool_free_list_entry
{
395 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
398 sizeof(struct anv_descriptor_set
) +
399 layout
->size
* sizeof(struct anv_descriptor
) +
400 layout
->buffer_count
* sizeof(struct anv_buffer_view
);
403 struct surface_state_free_list_entry
{
405 struct anv_state state
;
409 anv_descriptor_set_create(struct anv_device
*device
,
410 struct anv_descriptor_pool
*pool
,
411 const struct anv_descriptor_set_layout
*layout
,
412 struct anv_descriptor_set
**out_set
)
414 struct anv_descriptor_set
*set
;
415 const size_t size
= anv_descriptor_set_layout_size(layout
);
418 if (size
<= pool
->size
- pool
->next
) {
419 set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
422 struct pool_free_list_entry
*entry
;
423 uint32_t *link
= &pool
->free_list
;
424 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
425 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
426 if (size
<= entry
->size
) {
428 set
= (struct anv_descriptor_set
*) entry
;
436 if (pool
->free_list
!= EMPTY
) {
437 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
439 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
444 set
->layout
= layout
;
446 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
447 set
->buffer_count
= layout
->buffer_count
;
449 /* By defining the descriptors to be zero now, we can later verify that
450 * a descriptor has not been populated with user data.
452 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
454 /* Go through and fill out immutable samplers if we have any */
455 struct anv_descriptor
*desc
= set
->descriptors
;
456 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
457 if (layout
->binding
[b
].immutable_samplers
) {
458 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
459 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
460 * UpdateDescriptorSets if needed. However, if the descriptor
461 * set has an immutable sampler, UpdateDescriptorSets may never
462 * touch it, so we need to make sure it's 100% valid now.
464 desc
[i
] = (struct anv_descriptor
) {
465 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
466 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
470 desc
+= layout
->binding
[b
].array_size
;
473 /* Allocate surface state for the buffer views. */
474 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
475 struct surface_state_free_list_entry
*entry
=
476 pool
->surface_state_free_list
;
477 struct anv_state state
;
480 state
= entry
->state
;
481 pool
->surface_state_free_list
= entry
->next
;
482 assert(state
.alloc_size
== 64);
484 state
= anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
487 set
->buffer_views
[b
].surface_state
= state
;
496 anv_descriptor_set_destroy(struct anv_device
*device
,
497 struct anv_descriptor_pool
*pool
,
498 struct anv_descriptor_set
*set
)
500 /* Put the buffer view surface state back on the free list. */
501 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++) {
502 struct surface_state_free_list_entry
*entry
=
503 set
->buffer_views
[b
].surface_state
.map
;
504 entry
->next
= pool
->surface_state_free_list
;
505 entry
->state
= set
->buffer_views
[b
].surface_state
;
506 pool
->surface_state_free_list
= entry
;
509 /* Put the descriptor set allocation back on the free list. */
510 const uint32_t index
= (char *) set
- pool
->data
;
511 if (index
+ set
->size
== pool
->next
) {
514 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
515 entry
->next
= pool
->free_list
;
516 entry
->size
= set
->size
;
517 pool
->free_list
= (char *) entry
- pool
->data
;
521 VkResult
anv_AllocateDescriptorSets(
523 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
524 VkDescriptorSet
* pDescriptorSets
)
526 ANV_FROM_HANDLE(anv_device
, device
, _device
);
527 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
529 VkResult result
= VK_SUCCESS
;
530 struct anv_descriptor_set
*set
;
533 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
534 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
535 pAllocateInfo
->pSetLayouts
[i
]);
537 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
538 if (result
!= VK_SUCCESS
)
541 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
544 if (result
!= VK_SUCCESS
)
545 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
551 VkResult
anv_FreeDescriptorSets(
553 VkDescriptorPool descriptorPool
,
555 const VkDescriptorSet
* pDescriptorSets
)
557 ANV_FROM_HANDLE(anv_device
, device
, _device
);
558 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
560 for (uint32_t i
= 0; i
< count
; i
++) {
561 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
566 anv_descriptor_set_destroy(device
, pool
, set
);
573 anv_descriptor_set_write_image_view(struct anv_descriptor_set
*set
,
574 const struct gen_device_info
* const devinfo
,
575 const VkDescriptorImageInfo
* const info
,
576 VkDescriptorType type
,
580 const struct anv_descriptor_set_binding_layout
*bind_layout
=
581 &set
->layout
->binding
[binding
];
582 struct anv_descriptor
*desc
=
583 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
584 struct anv_image_view
*image_view
= NULL
;
585 struct anv_sampler
*sampler
= NULL
;
587 assert(type
== bind_layout
->type
);
590 case VK_DESCRIPTOR_TYPE_SAMPLER
:
591 sampler
= anv_sampler_from_handle(info
->sampler
);
594 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
595 image_view
= anv_image_view_from_handle(info
->imageView
);
596 sampler
= anv_sampler_from_handle(info
->sampler
);
599 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
600 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
601 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
602 image_view
= anv_image_view_from_handle(info
->imageView
);
606 unreachable("invalid descriptor type");
609 /* If this descriptor has an immutable sampler, we don't want to stomp on
612 sampler
= bind_layout
->immutable_samplers
?
613 bind_layout
->immutable_samplers
[element
] :
616 *desc
= (struct anv_descriptor
) {
618 .image_view
= image_view
,
620 .aux_usage
= image_view
== NULL
? ISL_AUX_USAGE_NONE
:
621 anv_layout_to_aux_usage(devinfo
, image_view
->image
,
622 image_view
->aspect_mask
,
628 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set
*set
,
629 VkDescriptorType type
,
630 struct anv_buffer_view
*buffer_view
,
634 const struct anv_descriptor_set_binding_layout
*bind_layout
=
635 &set
->layout
->binding
[binding
];
636 struct anv_descriptor
*desc
=
637 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
639 assert(type
== bind_layout
->type
);
641 *desc
= (struct anv_descriptor
) {
643 .buffer_view
= buffer_view
,
648 anv_descriptor_set_write_buffer(struct anv_descriptor_set
*set
,
649 struct anv_device
*device
,
650 struct anv_state_stream
*alloc_stream
,
651 VkDescriptorType type
,
652 struct anv_buffer
*buffer
,
658 const struct anv_descriptor_set_binding_layout
*bind_layout
=
659 &set
->layout
->binding
[binding
];
660 struct anv_descriptor
*desc
=
661 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
663 assert(type
== bind_layout
->type
);
665 struct anv_buffer_view
*bview
=
666 &set
->buffer_views
[bind_layout
->buffer_index
+ element
];
668 bview
->format
= anv_isl_format_for_descriptor_type(type
);
669 bview
->bo
= buffer
->bo
;
670 bview
->offset
= buffer
->offset
+ offset
;
672 /* For buffers with dynamic offsets, we use the full possible range in the
673 * surface state and do the actual range-checking in the shader.
675 if (bind_layout
->dynamic_offset_index
>= 0 || range
== VK_WHOLE_SIZE
)
676 bview
->range
= buffer
->size
- offset
;
678 bview
->range
= range
;
680 /* If we're writing descriptors through a push command, we need to allocate
681 * the surface state from the command buffer. Otherwise it will be
682 * allocated by the descriptor pool when calling
683 * vkAllocateDescriptorSets. */
685 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
687 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
689 bview
->offset
, bview
->range
, 1);
691 *desc
= (struct anv_descriptor
) {
693 .buffer_view
= bview
,
697 void anv_UpdateDescriptorSets(
699 uint32_t descriptorWriteCount
,
700 const VkWriteDescriptorSet
* pDescriptorWrites
,
701 uint32_t descriptorCopyCount
,
702 const VkCopyDescriptorSet
* pDescriptorCopies
)
704 ANV_FROM_HANDLE(anv_device
, device
, _device
);
706 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
707 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
708 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
710 switch (write
->descriptorType
) {
711 case VK_DESCRIPTOR_TYPE_SAMPLER
:
712 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
713 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
714 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
715 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
716 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
717 anv_descriptor_set_write_image_view(set
, &device
->info
,
718 write
->pImageInfo
+ j
,
719 write
->descriptorType
,
721 write
->dstArrayElement
+ j
);
725 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
726 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
727 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
728 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
729 write
->pTexelBufferView
[j
]);
731 anv_descriptor_set_write_buffer_view(set
,
732 write
->descriptorType
,
735 write
->dstArrayElement
+ j
);
739 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
740 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
741 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
742 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
743 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
744 assert(write
->pBufferInfo
[j
].buffer
);
745 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
748 anv_descriptor_set_write_buffer(set
,
751 write
->descriptorType
,
754 write
->dstArrayElement
+ j
,
755 write
->pBufferInfo
[j
].offset
,
756 write
->pBufferInfo
[j
].range
);
765 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
766 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
767 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->dstSet
);
768 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
770 const struct anv_descriptor_set_binding_layout
*src_layout
=
771 &src
->layout
->binding
[copy
->srcBinding
];
772 struct anv_descriptor
*src_desc
=
773 &src
->descriptors
[src_layout
->descriptor_index
];
774 src_desc
+= copy
->srcArrayElement
;
776 const struct anv_descriptor_set_binding_layout
*dst_layout
=
777 &dst
->layout
->binding
[copy
->dstBinding
];
778 struct anv_descriptor
*dst_desc
=
779 &dst
->descriptors
[dst_layout
->descriptor_index
];
780 dst_desc
+= copy
->dstArrayElement
;
782 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
783 dst_desc
[j
] = src_desc
[j
];
788 * Descriptor update templates.
792 anv_descriptor_set_write_template(struct anv_descriptor_set
*set
,
793 struct anv_device
*device
,
794 struct anv_state_stream
*alloc_stream
,
795 const struct anv_descriptor_update_template
*template,
798 const struct anv_descriptor_set_layout
*layout
= set
->layout
;
800 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
801 const struct anv_descriptor_template_entry
*entry
=
802 &template->entries
[i
];
803 const struct anv_descriptor_set_binding_layout
*bind_layout
=
804 &layout
->binding
[entry
->binding
];
805 struct anv_descriptor
*desc
= &set
->descriptors
[bind_layout
->descriptor_index
];
806 desc
+= entry
->array_element
;
808 switch (entry
->type
) {
809 case VK_DESCRIPTOR_TYPE_SAMPLER
:
810 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
811 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
812 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
813 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
814 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
815 const VkDescriptorImageInfo
*info
=
816 data
+ entry
->offset
+ j
* entry
->stride
;
817 anv_descriptor_set_write_image_view(set
, &device
->info
,
820 entry
->array_element
+ j
);
824 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
825 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
826 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
827 const VkBufferView
*_bview
=
828 data
+ entry
->offset
+ j
* entry
->stride
;
829 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
831 anv_descriptor_set_write_buffer_view(set
,
835 entry
->array_element
+ j
);
839 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
840 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
841 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
842 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
843 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
844 const VkDescriptorBufferInfo
*info
=
845 data
+ entry
->offset
+ j
* entry
->stride
;
846 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
848 anv_descriptor_set_write_buffer(set
,
854 entry
->array_element
+ j
,
855 info
->offset
, info
->range
);
865 VkResult
anv_CreateDescriptorUpdateTemplateKHR(
867 const VkDescriptorUpdateTemplateCreateInfoKHR
* pCreateInfo
,
868 const VkAllocationCallbacks
* pAllocator
,
869 VkDescriptorUpdateTemplateKHR
* pDescriptorUpdateTemplate
)
871 ANV_FROM_HANDLE(anv_device
, device
, _device
);
872 struct anv_descriptor_update_template
*template;
874 size_t size
= sizeof(*template) +
875 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
876 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
877 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
878 if (template == NULL
)
879 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
881 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
)
882 template->set
= pCreateInfo
->set
;
884 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
885 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
886 const VkDescriptorUpdateTemplateEntryKHR
*pEntry
=
887 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
889 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
890 .type
= pEntry
->descriptorType
,
891 .binding
= pEntry
->dstBinding
,
892 .array_element
= pEntry
->dstArrayElement
,
893 .array_count
= pEntry
->descriptorCount
,
894 .offset
= pEntry
->offset
,
895 .stride
= pEntry
->stride
,
899 *pDescriptorUpdateTemplate
=
900 anv_descriptor_update_template_to_handle(template);
905 void anv_DestroyDescriptorUpdateTemplateKHR(
907 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
908 const VkAllocationCallbacks
* pAllocator
)
910 ANV_FROM_HANDLE(anv_device
, device
, _device
);
911 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
912 descriptorUpdateTemplate
);
914 vk_free2(&device
->alloc
, pAllocator
, template);
917 void anv_UpdateDescriptorSetWithTemplateKHR(
919 VkDescriptorSet descriptorSet
,
920 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
923 ANV_FROM_HANDLE(anv_device
, device
, _device
);
924 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
925 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
926 descriptorUpdateTemplate
);
928 anv_descriptor_set_write_template(set
, device
, NULL
, template, pData
);