2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
33 #include "anv_private.h"
36 * Descriptor set layouts.
39 static enum anv_descriptor_data
40 anv_descriptor_data_for_type(const struct anv_physical_device
*device
,
41 VkDescriptorType type
)
43 enum anv_descriptor_data data
= 0;
46 case VK_DESCRIPTOR_TYPE_SAMPLER
:
47 data
= ANV_DESCRIPTOR_SAMPLER_STATE
;
50 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
51 data
= ANV_DESCRIPTOR_SURFACE_STATE
|
52 ANV_DESCRIPTOR_SAMPLER_STATE
;
55 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
56 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
57 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
58 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
61 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
62 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
63 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
64 if (device
->info
.gen
< 9)
65 data
|= ANV_DESCRIPTOR_IMAGE_PARAM
;
68 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
69 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
70 data
= ANV_DESCRIPTOR_SURFACE_STATE
|
71 ANV_DESCRIPTOR_BUFFER_VIEW
;
74 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
75 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
76 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
79 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
:
80 data
= ANV_DESCRIPTOR_INLINE_UNIFORM
;
84 unreachable("Unsupported descriptor type");
91 anv_descriptor_data_size(enum anv_descriptor_data data
)
96 /** Returns the size in bytes of each descriptor with the given layout */
98 anv_descriptor_size(const struct anv_descriptor_set_binding_layout
*layout
)
100 if (layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
) {
101 assert(layout
->data
== ANV_DESCRIPTOR_INLINE_UNIFORM
);
102 return layout
->array_size
;
105 return anv_descriptor_data_size(layout
->data
);
108 /** Returns the size in bytes of each descriptor of the given type
110 * This version of the function does not have access to the entire layout so
111 * it may only work on certain descriptor types where the descriptor size is
112 * entirely determined by the descriptor type. Whenever possible, code should
113 * use anv_descriptor_size() instead.
116 anv_descriptor_type_size(const struct anv_physical_device
*pdevice
,
117 VkDescriptorType type
)
119 assert(type
!= VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
);
120 return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice
, type
));
123 void anv_GetDescriptorSetLayoutSupport(
125 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
126 VkDescriptorSetLayoutSupport
* pSupport
)
128 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
130 for (uint32_t b
= 0; b
< pCreateInfo
->bindingCount
; b
++) {
131 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[b
];
133 switch (binding
->descriptorType
) {
134 case VK_DESCRIPTOR_TYPE_SAMPLER
:
135 /* There is no real limit on samplers */
138 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
139 if (binding
->pImmutableSamplers
) {
140 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
141 ANV_FROM_HANDLE(anv_sampler
, sampler
,
142 binding
->pImmutableSamplers
[i
]);
143 anv_foreach_stage(s
, binding
->stageFlags
)
144 surface_count
[s
] += sampler
->n_planes
;
147 anv_foreach_stage(s
, binding
->stageFlags
)
148 surface_count
[s
] += binding
->descriptorCount
;
153 anv_foreach_stage(s
, binding
->stageFlags
)
154 surface_count
[s
] += binding
->descriptorCount
;
159 bool supported
= true;
160 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
161 /* Our maximum binding table size is 250 and we need to reserve 8 for
162 * render targets. 240 is a nice round number.
164 if (surface_count
[s
] >= 240)
168 pSupport
->supported
= supported
;
171 VkResult
anv_CreateDescriptorSetLayout(
173 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
174 const VkAllocationCallbacks
* pAllocator
,
175 VkDescriptorSetLayout
* pSetLayout
)
177 ANV_FROM_HANDLE(anv_device
, device
, _device
);
179 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
181 uint32_t max_binding
= 0;
182 uint32_t immutable_sampler_count
= 0;
183 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
184 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
186 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
188 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
189 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
190 * pImmutableSamplers can be used to initialize a set of immutable
191 * samplers. [...] If descriptorType is not one of these descriptor
192 * types, then pImmutableSamplers is ignored.
194 * We need to be careful here and only parse pImmutableSamplers if we
195 * have one of the right descriptor types.
197 VkDescriptorType desc_type
= pCreateInfo
->pBindings
[j
].descriptorType
;
198 if ((desc_type
== VK_DESCRIPTOR_TYPE_SAMPLER
||
199 desc_type
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
) &&
200 pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
201 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
204 struct anv_descriptor_set_layout
*set_layout
;
205 struct anv_descriptor_set_binding_layout
*bindings
;
206 struct anv_sampler
**samplers
;
208 /* We need to allocate decriptor set layouts off the device allocator
209 * with DEVICE scope because they are reference counted and may not be
210 * destroyed when vkDestroyDescriptorSetLayout is called.
213 anv_multialloc_add(&ma
, &set_layout
, 1);
214 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
215 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
217 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
218 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
219 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
221 memset(set_layout
, 0, sizeof(*set_layout
));
222 set_layout
->ref_cnt
= 1;
223 set_layout
->binding_count
= max_binding
+ 1;
225 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
226 /* Initialize all binding_layout entries to -1 */
227 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
229 set_layout
->binding
[b
].data
= 0;
230 set_layout
->binding
[b
].array_size
= 0;
231 set_layout
->binding
[b
].immutable_samplers
= NULL
;
234 /* Initialize all samplers to 0 */
235 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
237 uint32_t buffer_view_count
= 0;
238 uint32_t dynamic_offset_count
= 0;
239 uint32_t descriptor_buffer_size
= 0;
241 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
242 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
243 uint32_t b
= binding
->binding
;
244 /* We temporarily store the pointer to the binding in the
245 * immutable_samplers pointer. This provides us with a quick-and-dirty
246 * way to sort the bindings by binding number.
248 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
251 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
252 const VkDescriptorSetLayoutBinding
*binding
=
253 (void *)set_layout
->binding
[b
].immutable_samplers
;
258 /* We temporarily stashed the pointer to the binding in the
259 * immutable_samplers pointer. Now that we've pulled it back out
260 * again, we reset immutable_samplers to NULL.
262 set_layout
->binding
[b
].immutable_samplers
= NULL
;
264 if (binding
->descriptorCount
== 0)
268 set_layout
->binding
[b
].type
= binding
->descriptorType
;
270 set_layout
->binding
[b
].data
=
271 anv_descriptor_data_for_type(&device
->instance
->physicalDevice
,
272 binding
->descriptorType
);
273 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
274 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
275 set_layout
->size
+= binding
->descriptorCount
;
277 if (set_layout
->binding
[b
].data
& ANV_DESCRIPTOR_BUFFER_VIEW
) {
278 set_layout
->binding
[b
].buffer_view_index
= buffer_view_count
;
279 buffer_view_count
+= binding
->descriptorCount
;
282 switch (binding
->descriptorType
) {
283 case VK_DESCRIPTOR_TYPE_SAMPLER
:
284 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
285 if (binding
->pImmutableSamplers
) {
286 set_layout
->binding
[b
].immutable_samplers
= samplers
;
287 samplers
+= binding
->descriptorCount
;
289 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
290 set_layout
->binding
[b
].immutable_samplers
[i
] =
291 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
298 switch (binding
->descriptorType
) {
299 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
300 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
301 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
302 dynamic_offset_count
+= binding
->descriptorCount
;
309 if (binding
->descriptorType
==
310 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
) {
311 /* Inline uniform blocks are specified to use the descriptor array
312 * size as the size in bytes of the block.
314 descriptor_buffer_size
= align_u32(descriptor_buffer_size
, 32);
315 set_layout
->binding
[b
].descriptor_offset
= descriptor_buffer_size
;
316 descriptor_buffer_size
+= binding
->descriptorCount
;
318 set_layout
->binding
[b
].descriptor_offset
= descriptor_buffer_size
;
319 descriptor_buffer_size
+= anv_descriptor_size(&set_layout
->binding
[b
]) *
320 binding
->descriptorCount
;
323 set_layout
->shader_stages
|= binding
->stageFlags
;
326 set_layout
->buffer_view_count
= buffer_view_count
;
327 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
328 set_layout
->descriptor_buffer_size
= descriptor_buffer_size
;
330 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
335 void anv_DestroyDescriptorSetLayout(
337 VkDescriptorSetLayout _set_layout
,
338 const VkAllocationCallbacks
* pAllocator
)
340 ANV_FROM_HANDLE(anv_device
, device
, _device
);
341 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
346 anv_descriptor_set_layout_unref(device
, set_layout
);
349 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
352 sha1_update_immutable_sampler(struct mesa_sha1
*ctx
,
353 const struct anv_sampler
*sampler
)
355 if (!sampler
->conversion
)
358 /* The only thing that affects the shader is ycbcr conversion */
359 _mesa_sha1_update(ctx
, sampler
->conversion
,
360 sizeof(*sampler
->conversion
));
364 sha1_update_descriptor_set_binding_layout(struct mesa_sha1
*ctx
,
365 const struct anv_descriptor_set_binding_layout
*layout
)
367 SHA1_UPDATE_VALUE(ctx
, layout
->data
);
368 SHA1_UPDATE_VALUE(ctx
, layout
->array_size
);
369 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_index
);
370 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_index
);
371 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_view_index
);
372 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_offset
);
374 if (layout
->immutable_samplers
) {
375 for (uint16_t i
= 0; i
< layout
->array_size
; i
++)
376 sha1_update_immutable_sampler(ctx
, layout
->immutable_samplers
[i
]);
381 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
382 const struct anv_descriptor_set_layout
*layout
)
384 SHA1_UPDATE_VALUE(ctx
, layout
->binding_count
);
385 SHA1_UPDATE_VALUE(ctx
, layout
->size
);
386 SHA1_UPDATE_VALUE(ctx
, layout
->shader_stages
);
387 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_view_count
);
388 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_count
);
389 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_buffer_size
);
391 for (uint16_t i
= 0; i
< layout
->binding_count
; i
++)
392 sha1_update_descriptor_set_binding_layout(ctx
, &layout
->binding
[i
]);
396 * Pipeline layouts. These have nothing to do with the pipeline. They are
397 * just multiple descriptor set layouts pasted together
400 VkResult
anv_CreatePipelineLayout(
402 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
403 const VkAllocationCallbacks
* pAllocator
,
404 VkPipelineLayout
* pPipelineLayout
)
406 ANV_FROM_HANDLE(anv_device
, device
, _device
);
407 struct anv_pipeline_layout
*layout
;
409 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
411 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
412 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
414 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
416 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
418 unsigned dynamic_offset_count
= 0;
420 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
421 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
422 pCreateInfo
->pSetLayouts
[set
]);
423 layout
->set
[set
].layout
= set_layout
;
424 anv_descriptor_set_layout_ref(set_layout
);
426 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
427 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
428 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
431 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
435 struct mesa_sha1 ctx
;
436 _mesa_sha1_init(&ctx
);
437 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
438 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
439 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
440 sizeof(layout
->set
[s
].dynamic_offset_start
));
442 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
443 _mesa_sha1_final(&ctx
, layout
->sha1
);
445 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
450 void anv_DestroyPipelineLayout(
452 VkPipelineLayout _pipelineLayout
,
453 const VkAllocationCallbacks
* pAllocator
)
455 ANV_FROM_HANDLE(anv_device
, device
, _device
);
456 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
458 if (!pipeline_layout
)
461 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
462 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
464 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
470 * These are implemented using a big pool of memory and a free-list for the
471 * host memory allocations and a state_stream and a free list for the buffer
472 * view surface state. The spec allows us to fail to allocate due to
473 * fragmentation in all cases but two: 1) after pool reset, allocating up
474 * until the pool size with no freeing must succeed and 2) allocating and
475 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
476 * and the free lists lets us recycle blocks for case 2).
479 /* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to
480 * ensure we can allocate the entire BO without hitting zero. The actual
481 * amount doesn't matter.
483 #define POOL_HEAP_OFFSET 64
487 VkResult
anv_CreateDescriptorPool(
489 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
490 const VkAllocationCallbacks
* pAllocator
,
491 VkDescriptorPool
* pDescriptorPool
)
493 ANV_FROM_HANDLE(anv_device
, device
, _device
);
494 struct anv_descriptor_pool
*pool
;
496 const VkDescriptorPoolInlineUniformBlockCreateInfoEXT
*inline_info
=
497 vk_find_struct_const(pCreateInfo
->pNext
,
498 DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT
);
500 uint32_t descriptor_count
= 0;
501 uint32_t buffer_view_count
= 0;
502 uint32_t descriptor_bo_size
= 0;
503 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
504 enum anv_descriptor_data desc_data
=
505 anv_descriptor_data_for_type(&device
->instance
->physicalDevice
,
506 pCreateInfo
->pPoolSizes
[i
].type
);
508 if (desc_data
& ANV_DESCRIPTOR_BUFFER_VIEW
)
509 buffer_view_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
511 unsigned desc_data_size
= anv_descriptor_data_size(desc_data
) *
512 pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
514 if (pCreateInfo
->pPoolSizes
[i
].type
==
515 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
) {
516 /* Inline uniform blocks are specified to use the descriptor array
517 * size as the size in bytes of the block.
520 desc_data_size
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
523 descriptor_bo_size
+= desc_data_size
;
525 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
527 /* We have to align descriptor buffer allocations to 32B so that we can
528 * push descriptor buffers. This means that each descriptor buffer
529 * allocated may burn up to 32B of extra space to get the right alignment.
530 * (Technically, it's at most 28B because we're always going to start at
531 * least 4B aligned but we're being conservative here.) Allocate enough
532 * extra space that we can chop it into maxSets pieces and align each one
535 descriptor_bo_size
+= 32 * pCreateInfo
->maxSets
;
536 descriptor_bo_size
= ALIGN(descriptor_bo_size
, 4096);
537 /* We align inline uniform blocks to 32B */
539 descriptor_bo_size
+= 32 * inline_info
->maxInlineUniformBlockBindings
;
541 const size_t pool_size
=
542 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
543 descriptor_count
* sizeof(struct anv_descriptor
) +
544 buffer_view_count
* sizeof(struct anv_buffer_view
);
545 const size_t total_size
= sizeof(*pool
) + pool_size
;
547 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
548 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
550 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
552 pool
->size
= pool_size
;
554 pool
->free_list
= EMPTY
;
556 if (descriptor_bo_size
> 0) {
557 VkResult result
= anv_bo_init_new(&pool
->bo
, device
, descriptor_bo_size
);
558 if (result
!= VK_SUCCESS
) {
559 vk_free2(&device
->alloc
, pAllocator
, pool
);
563 anv_gem_set_caching(device
, pool
->bo
.gem_handle
, I915_CACHING_CACHED
);
565 pool
->bo
.map
= anv_gem_mmap(device
, pool
->bo
.gem_handle
, 0,
566 descriptor_bo_size
, 0);
567 if (pool
->bo
.map
== NULL
) {
568 anv_gem_close(device
, pool
->bo
.gem_handle
);
569 vk_free2(&device
->alloc
, pAllocator
, pool
);
570 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
573 if (device
->instance
->physicalDevice
.use_softpin
) {
574 pool
->bo
.flags
|= EXEC_OBJECT_PINNED
;
575 anv_vma_alloc(device
, &pool
->bo
);
578 util_vma_heap_init(&pool
->bo_heap
, POOL_HEAP_OFFSET
, descriptor_bo_size
);
583 anv_state_stream_init(&pool
->surface_state_stream
,
584 &device
->surface_state_pool
, 4096);
585 pool
->surface_state_free_list
= NULL
;
587 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
592 void anv_DestroyDescriptorPool(
594 VkDescriptorPool _pool
,
595 const VkAllocationCallbacks
* pAllocator
)
597 ANV_FROM_HANDLE(anv_device
, device
, _device
);
598 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
604 anv_gem_munmap(pool
->bo
.map
, pool
->bo
.size
);
605 anv_vma_free(device
, &pool
->bo
);
606 anv_gem_close(device
, pool
->bo
.gem_handle
);
608 anv_state_stream_finish(&pool
->surface_state_stream
);
609 vk_free2(&device
->alloc
, pAllocator
, pool
);
612 VkResult
anv_ResetDescriptorPool(
614 VkDescriptorPool descriptorPool
,
615 VkDescriptorPoolResetFlags flags
)
617 ANV_FROM_HANDLE(anv_device
, device
, _device
);
618 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
621 pool
->free_list
= EMPTY
;
624 util_vma_heap_finish(&pool
->bo_heap
);
625 util_vma_heap_init(&pool
->bo_heap
, POOL_HEAP_OFFSET
, pool
->bo
.size
);
628 anv_state_stream_finish(&pool
->surface_state_stream
);
629 anv_state_stream_init(&pool
->surface_state_stream
,
630 &device
->surface_state_pool
, 4096);
631 pool
->surface_state_free_list
= NULL
;
636 struct pool_free_list_entry
{
642 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool
*pool
,
644 struct anv_descriptor_set
**set
)
646 if (size
<= pool
->size
- pool
->next
) {
647 *set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
651 struct pool_free_list_entry
*entry
;
652 uint32_t *link
= &pool
->free_list
;
653 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
654 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
655 if (size
<= entry
->size
) {
657 *set
= (struct anv_descriptor_set
*) entry
;
663 if (pool
->free_list
!= EMPTY
) {
664 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
666 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY
);
672 anv_descriptor_pool_free_set(struct anv_descriptor_pool
*pool
,
673 struct anv_descriptor_set
*set
)
675 /* Put the descriptor set allocation back on the free list. */
676 const uint32_t index
= (char *) set
- pool
->data
;
677 if (index
+ set
->size
== pool
->next
) {
680 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
681 entry
->next
= pool
->free_list
;
682 entry
->size
= set
->size
;
683 pool
->free_list
= (char *) entry
- pool
->data
;
687 struct surface_state_free_list_entry
{
689 struct anv_state state
;
692 static struct anv_state
693 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool
*pool
)
695 struct surface_state_free_list_entry
*entry
=
696 pool
->surface_state_free_list
;
699 struct anv_state state
= entry
->state
;
700 pool
->surface_state_free_list
= entry
->next
;
701 assert(state
.alloc_size
== 64);
704 return anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
709 anv_descriptor_pool_free_state(struct anv_descriptor_pool
*pool
,
710 struct anv_state state
)
712 /* Put the buffer view surface state back on the free list. */
713 struct surface_state_free_list_entry
*entry
= state
.map
;
714 entry
->next
= pool
->surface_state_free_list
;
715 entry
->state
= state
;
716 pool
->surface_state_free_list
= entry
;
720 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
723 sizeof(struct anv_descriptor_set
) +
724 layout
->size
* sizeof(struct anv_descriptor
) +
725 layout
->buffer_view_count
* sizeof(struct anv_buffer_view
);
729 anv_descriptor_set_create(struct anv_device
*device
,
730 struct anv_descriptor_pool
*pool
,
731 struct anv_descriptor_set_layout
*layout
,
732 struct anv_descriptor_set
**out_set
)
734 struct anv_descriptor_set
*set
;
735 const size_t size
= anv_descriptor_set_layout_size(layout
);
737 VkResult result
= anv_descriptor_pool_alloc_set(pool
, size
, &set
);
738 if (result
!= VK_SUCCESS
)
741 if (layout
->descriptor_buffer_size
) {
742 /* Align the size to 32 so that alignment gaps don't cause extra holes
743 * in the heap which can lead to bad performance.
745 uint64_t pool_vma_offset
=
746 util_vma_heap_alloc(&pool
->bo_heap
,
747 ALIGN(layout
->descriptor_buffer_size
, 32), 32);
748 if (pool_vma_offset
== 0) {
749 anv_descriptor_pool_free_set(pool
, set
);
750 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
752 assert(pool_vma_offset
>= POOL_HEAP_OFFSET
&&
753 pool_vma_offset
- POOL_HEAP_OFFSET
<= INT32_MAX
);
754 set
->desc_mem
.offset
= pool_vma_offset
- POOL_HEAP_OFFSET
;
755 set
->desc_mem
.alloc_size
= layout
->descriptor_buffer_size
;
756 set
->desc_mem
.map
= pool
->bo
.map
+ set
->desc_mem
.offset
;
758 set
->desc_surface_state
= anv_descriptor_pool_alloc_state(pool
);
759 anv_fill_buffer_surface_state(device
, set
->desc_surface_state
,
760 ISL_FORMAT_R32G32B32A32_FLOAT
,
761 (struct anv_address
) {
763 .offset
= set
->desc_mem
.offset
,
765 layout
->descriptor_buffer_size
, 1);
767 set
->desc_mem
= ANV_STATE_NULL
;
768 set
->desc_surface_state
= ANV_STATE_NULL
;
772 set
->layout
= layout
;
773 anv_descriptor_set_layout_ref(layout
);
777 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
778 set
->buffer_view_count
= layout
->buffer_view_count
;
780 /* By defining the descriptors to be zero now, we can later verify that
781 * a descriptor has not been populated with user data.
783 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
785 /* Go through and fill out immutable samplers if we have any */
786 struct anv_descriptor
*desc
= set
->descriptors
;
787 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
788 if (layout
->binding
[b
].immutable_samplers
) {
789 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
790 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
791 * UpdateDescriptorSets if needed. However, if the descriptor
792 * set has an immutable sampler, UpdateDescriptorSets may never
793 * touch it, so we need to make sure it's 100% valid now.
795 desc
[i
] = (struct anv_descriptor
) {
796 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
797 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
801 desc
+= layout
->binding
[b
].array_size
;
804 /* Allocate surface state for the buffer views. */
805 for (uint32_t b
= 0; b
< layout
->buffer_view_count
; b
++) {
806 set
->buffer_views
[b
].surface_state
=
807 anv_descriptor_pool_alloc_state(pool
);
816 anv_descriptor_set_destroy(struct anv_device
*device
,
817 struct anv_descriptor_pool
*pool
,
818 struct anv_descriptor_set
*set
)
820 anv_descriptor_set_layout_unref(device
, set
->layout
);
822 if (set
->desc_mem
.alloc_size
) {
823 util_vma_heap_free(&pool
->bo_heap
,
824 (uint64_t)set
->desc_mem
.offset
+ POOL_HEAP_OFFSET
,
825 set
->desc_mem
.alloc_size
);
826 anv_descriptor_pool_free_state(pool
, set
->desc_surface_state
);
829 for (uint32_t b
= 0; b
< set
->buffer_view_count
; b
++)
830 anv_descriptor_pool_free_state(pool
, set
->buffer_views
[b
].surface_state
);
832 anv_descriptor_pool_free_set(pool
, set
);
835 VkResult
anv_AllocateDescriptorSets(
837 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
838 VkDescriptorSet
* pDescriptorSets
)
840 ANV_FROM_HANDLE(anv_device
, device
, _device
);
841 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
843 VkResult result
= VK_SUCCESS
;
844 struct anv_descriptor_set
*set
;
847 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
848 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
849 pAllocateInfo
->pSetLayouts
[i
]);
851 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
852 if (result
!= VK_SUCCESS
)
855 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
858 if (result
!= VK_SUCCESS
)
859 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
865 VkResult
anv_FreeDescriptorSets(
867 VkDescriptorPool descriptorPool
,
869 const VkDescriptorSet
* pDescriptorSets
)
871 ANV_FROM_HANDLE(anv_device
, device
, _device
);
872 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
874 for (uint32_t i
= 0; i
< count
; i
++) {
875 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
880 anv_descriptor_set_destroy(device
, pool
, set
);
887 anv_descriptor_set_write_image_view(struct anv_device
*device
,
888 struct anv_descriptor_set
*set
,
889 const VkDescriptorImageInfo
* const info
,
890 VkDescriptorType type
,
894 const struct anv_descriptor_set_binding_layout
*bind_layout
=
895 &set
->layout
->binding
[binding
];
896 struct anv_descriptor
*desc
=
897 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
898 struct anv_image_view
*image_view
= NULL
;
899 struct anv_sampler
*sampler
= NULL
;
901 assert(type
== bind_layout
->type
);
904 case VK_DESCRIPTOR_TYPE_SAMPLER
:
905 sampler
= anv_sampler_from_handle(info
->sampler
);
908 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
909 image_view
= anv_image_view_from_handle(info
->imageView
);
910 sampler
= anv_sampler_from_handle(info
->sampler
);
913 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
914 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
915 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
916 image_view
= anv_image_view_from_handle(info
->imageView
);
920 unreachable("invalid descriptor type");
923 /* If this descriptor has an immutable sampler, we don't want to stomp on
926 sampler
= bind_layout
->immutable_samplers
?
927 bind_layout
->immutable_samplers
[element
] :
930 *desc
= (struct anv_descriptor
) {
932 .layout
= info
->imageLayout
,
933 .image_view
= image_view
,
939 anv_descriptor_set_write_buffer_view(struct anv_device
*device
,
940 struct anv_descriptor_set
*set
,
941 VkDescriptorType type
,
942 struct anv_buffer_view
*buffer_view
,
946 const struct anv_descriptor_set_binding_layout
*bind_layout
=
947 &set
->layout
->binding
[binding
];
948 struct anv_descriptor
*desc
=
949 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
951 assert(type
== bind_layout
->type
);
953 *desc
= (struct anv_descriptor
) {
955 .buffer_view
= buffer_view
,
960 anv_descriptor_set_write_buffer(struct anv_device
*device
,
961 struct anv_descriptor_set
*set
,
962 struct anv_state_stream
*alloc_stream
,
963 VkDescriptorType type
,
964 struct anv_buffer
*buffer
,
970 const struct anv_descriptor_set_binding_layout
*bind_layout
=
971 &set
->layout
->binding
[binding
];
972 struct anv_descriptor
*desc
=
973 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
975 assert(type
== bind_layout
->type
);
977 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
978 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
979 *desc
= (struct anv_descriptor
) {
986 assert(bind_layout
->data
& ANV_DESCRIPTOR_BUFFER_VIEW
);
987 struct anv_buffer_view
*bview
=
988 &set
->buffer_views
[bind_layout
->buffer_view_index
+ element
];
990 bview
->format
= anv_isl_format_for_descriptor_type(type
);
991 bview
->range
= anv_buffer_get_range(buffer
, offset
, range
);
992 bview
->address
= anv_address_add(buffer
->address
, offset
);
994 /* If we're writing descriptors through a push command, we need to
995 * allocate the surface state from the command buffer. Otherwise it will
996 * be allocated by the descriptor pool when calling
997 * vkAllocateDescriptorSets. */
999 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
1001 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
1003 bview
->address
, bview
->range
, 1);
1005 *desc
= (struct anv_descriptor
) {
1007 .buffer_view
= bview
,
1013 anv_descriptor_set_write_inline_uniform_data(struct anv_device
*device
,
1014 struct anv_descriptor_set
*set
,
1020 const struct anv_descriptor_set_binding_layout
*bind_layout
=
1021 &set
->layout
->binding
[binding
];
1023 assert(bind_layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
);
1025 void *desc_map
= set
->desc_mem
.map
+ bind_layout
->descriptor_offset
;
1027 memcpy(desc_map
+ offset
, data
, size
);
1030 void anv_UpdateDescriptorSets(
1032 uint32_t descriptorWriteCount
,
1033 const VkWriteDescriptorSet
* pDescriptorWrites
,
1034 uint32_t descriptorCopyCount
,
1035 const VkCopyDescriptorSet
* pDescriptorCopies
)
1037 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1039 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
1040 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
1041 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
1043 switch (write
->descriptorType
) {
1044 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1045 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1046 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1047 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1048 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1049 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1050 anv_descriptor_set_write_image_view(device
, set
,
1051 write
->pImageInfo
+ j
,
1052 write
->descriptorType
,
1054 write
->dstArrayElement
+ j
);
1058 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1059 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1060 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1061 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
1062 write
->pTexelBufferView
[j
]);
1064 anv_descriptor_set_write_buffer_view(device
, set
,
1065 write
->descriptorType
,
1068 write
->dstArrayElement
+ j
);
1072 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1073 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1074 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1075 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1076 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1077 assert(write
->pBufferInfo
[j
].buffer
);
1078 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
1081 anv_descriptor_set_write_buffer(device
, set
,
1083 write
->descriptorType
,
1086 write
->dstArrayElement
+ j
,
1087 write
->pBufferInfo
[j
].offset
,
1088 write
->pBufferInfo
[j
].range
);
1092 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
: {
1093 const VkWriteDescriptorSetInlineUniformBlockEXT
*inline_write
=
1094 vk_find_struct_const(write
->pNext
,
1095 WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
);
1096 assert(inline_write
->dataSize
== write
->descriptorCount
);
1097 anv_descriptor_set_write_inline_uniform_data(device
, set
,
1099 inline_write
->pData
,
1100 write
->dstArrayElement
,
1101 inline_write
->dataSize
);
1110 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
1111 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
1112 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
1113 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
1115 const struct anv_descriptor_set_binding_layout
*src_layout
=
1116 &src
->layout
->binding
[copy
->srcBinding
];
1117 struct anv_descriptor
*src_desc
=
1118 &src
->descriptors
[src_layout
->descriptor_index
];
1119 src_desc
+= copy
->srcArrayElement
;
1121 const struct anv_descriptor_set_binding_layout
*dst_layout
=
1122 &dst
->layout
->binding
[copy
->dstBinding
];
1123 struct anv_descriptor
*dst_desc
=
1124 &dst
->descriptors
[dst_layout
->descriptor_index
];
1125 dst_desc
+= copy
->dstArrayElement
;
1127 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
1128 dst_desc
[j
] = src_desc
[j
];
1130 if (src_layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
) {
1131 assert(src_layout
->data
== ANV_DESCRIPTOR_INLINE_UNIFORM
);
1132 memcpy(dst
->desc_mem
.map
+ dst_layout
->descriptor_offset
+
1133 copy
->dstArrayElement
,
1134 src
->desc_mem
.map
+ src_layout
->descriptor_offset
+
1135 copy
->srcArrayElement
,
1136 copy
->descriptorCount
);
1138 unsigned desc_size
= anv_descriptor_size(src_layout
);
1139 if (desc_size
> 0) {
1140 assert(desc_size
== anv_descriptor_size(dst_layout
));
1141 memcpy(dst
->desc_mem
.map
+ dst_layout
->descriptor_offset
+
1142 copy
->dstArrayElement
* desc_size
,
1143 src
->desc_mem
.map
+ src_layout
->descriptor_offset
+
1144 copy
->srcArrayElement
* desc_size
,
1145 copy
->descriptorCount
* desc_size
);
1152 * Descriptor update templates.
1156 anv_descriptor_set_write_template(struct anv_device
*device
,
1157 struct anv_descriptor_set
*set
,
1158 struct anv_state_stream
*alloc_stream
,
1159 const struct anv_descriptor_update_template
*template,
1162 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
1163 const struct anv_descriptor_template_entry
*entry
=
1164 &template->entries
[i
];
1166 switch (entry
->type
) {
1167 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1169 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1170 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1171 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1172 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1173 const VkDescriptorImageInfo
*info
=
1174 data
+ entry
->offset
+ j
* entry
->stride
;
1175 anv_descriptor_set_write_image_view(device
, set
,
1178 entry
->array_element
+ j
);
1182 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1183 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1184 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1185 const VkBufferView
*_bview
=
1186 data
+ entry
->offset
+ j
* entry
->stride
;
1187 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
1189 anv_descriptor_set_write_buffer_view(device
, set
,
1193 entry
->array_element
+ j
);
1197 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1198 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1199 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1200 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1201 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1202 const VkDescriptorBufferInfo
*info
=
1203 data
+ entry
->offset
+ j
* entry
->stride
;
1204 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
1206 anv_descriptor_set_write_buffer(device
, set
,
1211 entry
->array_element
+ j
,
1212 info
->offset
, info
->range
);
1216 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
:
1217 anv_descriptor_set_write_inline_uniform_data(device
, set
,
1219 data
+ entry
->offset
,
1220 entry
->array_element
,
1221 entry
->array_count
);
1230 VkResult
anv_CreateDescriptorUpdateTemplate(
1232 const VkDescriptorUpdateTemplateCreateInfo
* pCreateInfo
,
1233 const VkAllocationCallbacks
* pAllocator
,
1234 VkDescriptorUpdateTemplate
* pDescriptorUpdateTemplate
)
1236 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1237 struct anv_descriptor_update_template
*template;
1239 size_t size
= sizeof(*template) +
1240 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
1241 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
1242 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
1243 if (template == NULL
)
1244 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1246 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
1248 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
)
1249 template->set
= pCreateInfo
->set
;
1251 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
1252 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
1253 const VkDescriptorUpdateTemplateEntry
*pEntry
=
1254 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
1256 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
1257 .type
= pEntry
->descriptorType
,
1258 .binding
= pEntry
->dstBinding
,
1259 .array_element
= pEntry
->dstArrayElement
,
1260 .array_count
= pEntry
->descriptorCount
,
1261 .offset
= pEntry
->offset
,
1262 .stride
= pEntry
->stride
,
1266 *pDescriptorUpdateTemplate
=
1267 anv_descriptor_update_template_to_handle(template);
1272 void anv_DestroyDescriptorUpdateTemplate(
1274 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1275 const VkAllocationCallbacks
* pAllocator
)
1277 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1278 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1279 descriptorUpdateTemplate
);
1281 vk_free2(&device
->alloc
, pAllocator
, template);
1284 void anv_UpdateDescriptorSetWithTemplate(
1286 VkDescriptorSet descriptorSet
,
1287 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1290 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1291 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
1292 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1293 descriptorUpdateTemplate
);
1295 anv_descriptor_set_write_template(device
, set
, NULL
, template, pData
);