2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 VkResult
anv_CreateDescriptorSetLayout(
40 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
41 const VkAllocationCallbacks
* pAllocator
,
42 VkDescriptorSetLayout
* pSetLayout
)
44 ANV_FROM_HANDLE(anv_device
, device
, _device
);
46 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
48 uint32_t max_binding
= 0;
49 uint32_t immutable_sampler_count
= 0;
50 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
51 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
52 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
53 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
56 struct anv_descriptor_set_layout
*set_layout
;
57 struct anv_descriptor_set_binding_layout
*bindings
;
58 struct anv_sampler
**samplers
;
60 /* We need to allocate decriptor set layouts off the device allocator
61 * with DEVICE scope because they are reference counted and may not be
62 * destroyed when vkDestroyDescriptorSetLayout is called.
65 anv_multialloc_add(&ma
, &set_layout
, 1);
66 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
67 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
69 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
70 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
71 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
73 memset(set_layout
, 0, sizeof(*set_layout
));
74 set_layout
->ref_cnt
= 1;
75 set_layout
->binding_count
= max_binding
+ 1;
77 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
78 /* Initialize all binding_layout entries to -1 */
79 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
81 set_layout
->binding
[b
].array_size
= 0;
82 set_layout
->binding
[b
].immutable_samplers
= NULL
;
85 /* Initialize all samplers to 0 */
86 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
88 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
89 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
90 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
91 uint32_t buffer_count
= 0;
92 uint32_t dynamic_offset_count
= 0;
94 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
95 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
96 uint32_t b
= binding
->binding
;
97 /* We temporarily store the pointer to the binding in the
98 * immutable_samplers pointer. This provides us with a quick-and-dirty
99 * way to sort the bindings by binding number.
101 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
104 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
105 const VkDescriptorSetLayoutBinding
*binding
=
106 (void *)set_layout
->binding
[b
].immutable_samplers
;
111 if (binding
->descriptorCount
== 0)
115 set_layout
->binding
[b
].type
= binding
->descriptorType
;
117 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
118 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
119 set_layout
->size
+= binding
->descriptorCount
;
121 switch (binding
->descriptorType
) {
122 case VK_DESCRIPTOR_TYPE_SAMPLER
:
123 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
124 anv_foreach_stage(s
, binding
->stageFlags
) {
125 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
126 sampler_count
[s
] += binding
->descriptorCount
;
133 switch (binding
->descriptorType
) {
134 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
135 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
136 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
137 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
138 set_layout
->binding
[b
].buffer_index
= buffer_count
;
139 buffer_count
+= binding
->descriptorCount
;
142 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
143 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
144 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
145 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
146 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
147 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
148 anv_foreach_stage(s
, binding
->stageFlags
) {
149 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
150 surface_count
[s
] += binding
->descriptorCount
;
157 switch (binding
->descriptorType
) {
158 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
159 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
160 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
161 dynamic_offset_count
+= binding
->descriptorCount
;
167 switch (binding
->descriptorType
) {
168 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
169 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
170 anv_foreach_stage(s
, binding
->stageFlags
) {
171 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
172 image_count
[s
] += binding
->descriptorCount
;
179 if (binding
->pImmutableSamplers
) {
180 set_layout
->binding
[b
].immutable_samplers
= samplers
;
181 samplers
+= binding
->descriptorCount
;
183 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
184 set_layout
->binding
[b
].immutable_samplers
[i
] =
185 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
187 set_layout
->binding
[b
].immutable_samplers
= NULL
;
190 set_layout
->shader_stages
|= binding
->stageFlags
;
193 set_layout
->buffer_count
= buffer_count
;
194 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
196 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
201 void anv_DestroyDescriptorSetLayout(
203 VkDescriptorSetLayout _set_layout
,
204 const VkAllocationCallbacks
* pAllocator
)
206 ANV_FROM_HANDLE(anv_device
, device
, _device
);
207 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
212 anv_descriptor_set_layout_unref(device
, set_layout
);
216 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
217 const struct anv_descriptor_set_layout
*layout
)
219 size_t size
= sizeof(*layout
) +
220 sizeof(layout
->binding
[0]) * layout
->binding_count
;
221 _mesa_sha1_update(ctx
, layout
, size
);
225 * Pipeline layouts. These have nothing to do with the pipeline. They are
226 * just multiple descriptor set layouts pasted together
229 VkResult
anv_CreatePipelineLayout(
231 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
232 const VkAllocationCallbacks
* pAllocator
,
233 VkPipelineLayout
* pPipelineLayout
)
235 ANV_FROM_HANDLE(anv_device
, device
, _device
);
236 struct anv_pipeline_layout
*layout
;
238 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
240 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
241 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
243 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
245 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
247 unsigned dynamic_offset_count
= 0;
249 memset(layout
->stage
, 0, sizeof(layout
->stage
));
250 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
251 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
252 pCreateInfo
->pSetLayouts
[set
]);
253 layout
->set
[set
].layout
= set_layout
;
254 anv_descriptor_set_layout_ref(set_layout
);
256 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
257 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
258 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
261 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
262 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
263 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0)
264 layout
->stage
[s
].has_dynamic_offsets
= true;
269 struct mesa_sha1 ctx
;
270 _mesa_sha1_init(&ctx
);
271 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
272 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
273 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
274 sizeof(layout
->set
[s
].dynamic_offset_start
));
276 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
277 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
278 _mesa_sha1_update(&ctx
, &layout
->stage
[s
].has_dynamic_offsets
,
279 sizeof(layout
->stage
[s
].has_dynamic_offsets
));
281 _mesa_sha1_final(&ctx
, layout
->sha1
);
283 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
288 void anv_DestroyPipelineLayout(
290 VkPipelineLayout _pipelineLayout
,
291 const VkAllocationCallbacks
* pAllocator
)
293 ANV_FROM_HANDLE(anv_device
, device
, _device
);
294 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
296 if (!pipeline_layout
)
299 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
300 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
302 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
308 * These are implemented using a big pool of memory and a free-list for the
309 * host memory allocations and a state_stream and a free list for the buffer
310 * view surface state. The spec allows us to fail to allocate due to
311 * fragmentation in all cases but two: 1) after pool reset, allocating up
312 * until the pool size with no freeing must succeed and 2) allocating and
313 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
314 * and the free lists lets us recycle blocks for case 2).
319 VkResult
anv_CreateDescriptorPool(
321 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
322 const VkAllocationCallbacks
* pAllocator
,
323 VkDescriptorPool
* pDescriptorPool
)
325 ANV_FROM_HANDLE(anv_device
, device
, _device
);
326 struct anv_descriptor_pool
*pool
;
328 uint32_t descriptor_count
= 0;
329 uint32_t buffer_count
= 0;
330 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
331 switch (pCreateInfo
->pPoolSizes
[i
].type
) {
332 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
333 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
334 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
335 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
336 buffer_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
338 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
343 const size_t pool_size
=
344 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
345 descriptor_count
* sizeof(struct anv_descriptor
) +
346 buffer_count
* sizeof(struct anv_buffer_view
);
347 const size_t total_size
= sizeof(*pool
) + pool_size
;
349 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
350 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
352 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
354 pool
->size
= pool_size
;
356 pool
->free_list
= EMPTY
;
358 anv_state_stream_init(&pool
->surface_state_stream
,
359 &device
->surface_state_pool
, 4096);
360 pool
->surface_state_free_list
= NULL
;
362 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
367 void anv_DestroyDescriptorPool(
369 VkDescriptorPool _pool
,
370 const VkAllocationCallbacks
* pAllocator
)
372 ANV_FROM_HANDLE(anv_device
, device
, _device
);
373 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
378 anv_state_stream_finish(&pool
->surface_state_stream
);
379 vk_free2(&device
->alloc
, pAllocator
, pool
);
382 VkResult
anv_ResetDescriptorPool(
384 VkDescriptorPool descriptorPool
,
385 VkDescriptorPoolResetFlags flags
)
387 ANV_FROM_HANDLE(anv_device
, device
, _device
);
388 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
391 pool
->free_list
= EMPTY
;
392 anv_state_stream_finish(&pool
->surface_state_stream
);
393 anv_state_stream_init(&pool
->surface_state_stream
,
394 &device
->surface_state_pool
, 4096);
395 pool
->surface_state_free_list
= NULL
;
400 struct pool_free_list_entry
{
406 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
409 sizeof(struct anv_descriptor_set
) +
410 layout
->size
* sizeof(struct anv_descriptor
) +
411 layout
->buffer_count
* sizeof(struct anv_buffer_view
);
415 anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout
*binding
)
417 if (!binding
->immutable_samplers
)
418 return binding
->array_size
;
420 uint32_t total_plane_count
= 0;
421 for (uint32_t i
= 0; i
< binding
->array_size
; i
++)
422 total_plane_count
+= binding
->immutable_samplers
[i
]->n_planes
;
424 return total_plane_count
;
427 struct surface_state_free_list_entry
{
429 struct anv_state state
;
433 anv_descriptor_set_create(struct anv_device
*device
,
434 struct anv_descriptor_pool
*pool
,
435 struct anv_descriptor_set_layout
*layout
,
436 struct anv_descriptor_set
**out_set
)
438 struct anv_descriptor_set
*set
;
439 const size_t size
= anv_descriptor_set_layout_size(layout
);
442 if (size
<= pool
->size
- pool
->next
) {
443 set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
446 struct pool_free_list_entry
*entry
;
447 uint32_t *link
= &pool
->free_list
;
448 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
449 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
450 if (size
<= entry
->size
) {
452 set
= (struct anv_descriptor_set
*) entry
;
460 if (pool
->free_list
!= EMPTY
) {
461 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
463 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
467 set
->layout
= layout
;
468 anv_descriptor_set_layout_ref(layout
);
472 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
473 set
->buffer_count
= layout
->buffer_count
;
475 /* By defining the descriptors to be zero now, we can later verify that
476 * a descriptor has not been populated with user data.
478 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
480 /* Go through and fill out immutable samplers if we have any */
481 struct anv_descriptor
*desc
= set
->descriptors
;
482 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
483 if (layout
->binding
[b
].immutable_samplers
) {
484 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
485 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
486 * UpdateDescriptorSets if needed. However, if the descriptor
487 * set has an immutable sampler, UpdateDescriptorSets may never
488 * touch it, so we need to make sure it's 100% valid now.
490 desc
[i
] = (struct anv_descriptor
) {
491 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
492 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
496 desc
+= layout
->binding
[b
].array_size
;
499 /* Allocate surface state for the buffer views. */
500 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
501 struct surface_state_free_list_entry
*entry
=
502 pool
->surface_state_free_list
;
503 struct anv_state state
;
506 state
= entry
->state
;
507 pool
->surface_state_free_list
= entry
->next
;
508 assert(state
.alloc_size
== 64);
510 state
= anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
513 set
->buffer_views
[b
].surface_state
= state
;
522 anv_descriptor_set_destroy(struct anv_device
*device
,
523 struct anv_descriptor_pool
*pool
,
524 struct anv_descriptor_set
*set
)
526 anv_descriptor_set_layout_unref(device
, set
->layout
);
528 /* Put the buffer view surface state back on the free list. */
529 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++) {
530 struct surface_state_free_list_entry
*entry
=
531 set
->buffer_views
[b
].surface_state
.map
;
532 entry
->next
= pool
->surface_state_free_list
;
533 entry
->state
= set
->buffer_views
[b
].surface_state
;
534 pool
->surface_state_free_list
= entry
;
537 /* Put the descriptor set allocation back on the free list. */
538 const uint32_t index
= (char *) set
- pool
->data
;
539 if (index
+ set
->size
== pool
->next
) {
542 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
543 entry
->next
= pool
->free_list
;
544 entry
->size
= set
->size
;
545 pool
->free_list
= (char *) entry
- pool
->data
;
549 VkResult
anv_AllocateDescriptorSets(
551 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
552 VkDescriptorSet
* pDescriptorSets
)
554 ANV_FROM_HANDLE(anv_device
, device
, _device
);
555 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
557 VkResult result
= VK_SUCCESS
;
558 struct anv_descriptor_set
*set
;
561 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
562 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
563 pAllocateInfo
->pSetLayouts
[i
]);
565 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
566 if (result
!= VK_SUCCESS
)
569 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
572 if (result
!= VK_SUCCESS
)
573 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
579 VkResult
anv_FreeDescriptorSets(
581 VkDescriptorPool descriptorPool
,
583 const VkDescriptorSet
* pDescriptorSets
)
585 ANV_FROM_HANDLE(anv_device
, device
, _device
);
586 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
588 for (uint32_t i
= 0; i
< count
; i
++) {
589 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
594 anv_descriptor_set_destroy(device
, pool
, set
);
601 anv_descriptor_set_write_image_view(struct anv_descriptor_set
*set
,
602 const struct gen_device_info
* const devinfo
,
603 const VkDescriptorImageInfo
* const info
,
604 VkDescriptorType type
,
608 const struct anv_descriptor_set_binding_layout
*bind_layout
=
609 &set
->layout
->binding
[binding
];
610 struct anv_descriptor
*desc
=
611 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
612 struct anv_image_view
*image_view
= NULL
;
613 struct anv_sampler
*sampler
= NULL
;
615 assert(type
== bind_layout
->type
);
618 case VK_DESCRIPTOR_TYPE_SAMPLER
:
619 sampler
= anv_sampler_from_handle(info
->sampler
);
622 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
623 image_view
= anv_image_view_from_handle(info
->imageView
);
624 sampler
= anv_sampler_from_handle(info
->sampler
);
627 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
628 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
629 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
630 image_view
= anv_image_view_from_handle(info
->imageView
);
634 unreachable("invalid descriptor type");
637 /* If this descriptor has an immutable sampler, we don't want to stomp on
640 sampler
= bind_layout
->immutable_samplers
?
641 bind_layout
->immutable_samplers
[element
] :
644 *desc
= (struct anv_descriptor
) {
646 .layout
= info
->imageLayout
,
647 .image_view
= image_view
,
653 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set
*set
,
654 VkDescriptorType type
,
655 struct anv_buffer_view
*buffer_view
,
659 const struct anv_descriptor_set_binding_layout
*bind_layout
=
660 &set
->layout
->binding
[binding
];
661 struct anv_descriptor
*desc
=
662 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
664 assert(type
== bind_layout
->type
);
666 *desc
= (struct anv_descriptor
) {
668 .buffer_view
= buffer_view
,
673 anv_descriptor_set_write_buffer(struct anv_descriptor_set
*set
,
674 struct anv_device
*device
,
675 struct anv_state_stream
*alloc_stream
,
676 VkDescriptorType type
,
677 struct anv_buffer
*buffer
,
683 const struct anv_descriptor_set_binding_layout
*bind_layout
=
684 &set
->layout
->binding
[binding
];
685 struct anv_descriptor
*desc
=
686 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
688 assert(type
== bind_layout
->type
);
690 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
691 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
692 *desc
= (struct anv_descriptor
) {
699 struct anv_buffer_view
*bview
=
700 &set
->buffer_views
[bind_layout
->buffer_index
+ element
];
702 bview
->format
= anv_isl_format_for_descriptor_type(type
);
703 bview
->bo
= buffer
->bo
;
704 bview
->offset
= buffer
->offset
+ offset
;
705 bview
->range
= anv_buffer_get_range(buffer
, offset
, range
);
707 /* If we're writing descriptors through a push command, we need to
708 * allocate the surface state from the command buffer. Otherwise it will
709 * be allocated by the descriptor pool when calling
710 * vkAllocateDescriptorSets. */
712 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
714 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
716 bview
->offset
, bview
->range
, 1);
718 *desc
= (struct anv_descriptor
) {
720 .buffer_view
= bview
,
725 void anv_UpdateDescriptorSets(
727 uint32_t descriptorWriteCount
,
728 const VkWriteDescriptorSet
* pDescriptorWrites
,
729 uint32_t descriptorCopyCount
,
730 const VkCopyDescriptorSet
* pDescriptorCopies
)
732 ANV_FROM_HANDLE(anv_device
, device
, _device
);
734 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
735 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
736 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
738 switch (write
->descriptorType
) {
739 case VK_DESCRIPTOR_TYPE_SAMPLER
:
740 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
741 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
742 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
743 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
744 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
745 anv_descriptor_set_write_image_view(set
, &device
->info
,
746 write
->pImageInfo
+ j
,
747 write
->descriptorType
,
749 write
->dstArrayElement
+ j
);
753 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
754 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
755 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
756 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
757 write
->pTexelBufferView
[j
]);
759 anv_descriptor_set_write_buffer_view(set
,
760 write
->descriptorType
,
763 write
->dstArrayElement
+ j
);
767 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
768 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
769 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
770 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
771 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
772 assert(write
->pBufferInfo
[j
].buffer
);
773 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
776 anv_descriptor_set_write_buffer(set
,
779 write
->descriptorType
,
782 write
->dstArrayElement
+ j
,
783 write
->pBufferInfo
[j
].offset
,
784 write
->pBufferInfo
[j
].range
);
793 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
794 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
795 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
796 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
798 const struct anv_descriptor_set_binding_layout
*src_layout
=
799 &src
->layout
->binding
[copy
->srcBinding
];
800 struct anv_descriptor
*src_desc
=
801 &src
->descriptors
[src_layout
->descriptor_index
];
802 src_desc
+= copy
->srcArrayElement
;
804 const struct anv_descriptor_set_binding_layout
*dst_layout
=
805 &dst
->layout
->binding
[copy
->dstBinding
];
806 struct anv_descriptor
*dst_desc
=
807 &dst
->descriptors
[dst_layout
->descriptor_index
];
808 dst_desc
+= copy
->dstArrayElement
;
810 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
811 dst_desc
[j
] = src_desc
[j
];
816 * Descriptor update templates.
820 anv_descriptor_set_write_template(struct anv_descriptor_set
*set
,
821 struct anv_device
*device
,
822 struct anv_state_stream
*alloc_stream
,
823 const struct anv_descriptor_update_template
*template,
826 const struct anv_descriptor_set_layout
*layout
= set
->layout
;
828 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
829 const struct anv_descriptor_template_entry
*entry
=
830 &template->entries
[i
];
831 const struct anv_descriptor_set_binding_layout
*bind_layout
=
832 &layout
->binding
[entry
->binding
];
833 struct anv_descriptor
*desc
= &set
->descriptors
[bind_layout
->descriptor_index
];
834 desc
+= entry
->array_element
;
836 switch (entry
->type
) {
837 case VK_DESCRIPTOR_TYPE_SAMPLER
:
838 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
839 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
840 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
841 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
842 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
843 const VkDescriptorImageInfo
*info
=
844 data
+ entry
->offset
+ j
* entry
->stride
;
845 anv_descriptor_set_write_image_view(set
, &device
->info
,
848 entry
->array_element
+ j
);
852 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
853 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
854 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
855 const VkBufferView
*_bview
=
856 data
+ entry
->offset
+ j
* entry
->stride
;
857 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
859 anv_descriptor_set_write_buffer_view(set
,
863 entry
->array_element
+ j
);
867 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
868 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
869 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
870 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
871 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
872 const VkDescriptorBufferInfo
*info
=
873 data
+ entry
->offset
+ j
* entry
->stride
;
874 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
876 anv_descriptor_set_write_buffer(set
,
882 entry
->array_element
+ j
,
883 info
->offset
, info
->range
);
893 VkResult
anv_CreateDescriptorUpdateTemplateKHR(
895 const VkDescriptorUpdateTemplateCreateInfoKHR
* pCreateInfo
,
896 const VkAllocationCallbacks
* pAllocator
,
897 VkDescriptorUpdateTemplateKHR
* pDescriptorUpdateTemplate
)
899 ANV_FROM_HANDLE(anv_device
, device
, _device
);
900 struct anv_descriptor_update_template
*template;
902 size_t size
= sizeof(*template) +
903 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
904 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
905 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
906 if (template == NULL
)
907 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
909 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
911 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
)
912 template->set
= pCreateInfo
->set
;
914 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
915 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
916 const VkDescriptorUpdateTemplateEntryKHR
*pEntry
=
917 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
919 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
920 .type
= pEntry
->descriptorType
,
921 .binding
= pEntry
->dstBinding
,
922 .array_element
= pEntry
->dstArrayElement
,
923 .array_count
= pEntry
->descriptorCount
,
924 .offset
= pEntry
->offset
,
925 .stride
= pEntry
->stride
,
929 *pDescriptorUpdateTemplate
=
930 anv_descriptor_update_template_to_handle(template);
935 void anv_DestroyDescriptorUpdateTemplateKHR(
937 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
938 const VkAllocationCallbacks
* pAllocator
)
940 ANV_FROM_HANDLE(anv_device
, device
, _device
);
941 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
942 descriptorUpdateTemplate
);
944 vk_free2(&device
->alloc
, pAllocator
, template);
947 void anv_UpdateDescriptorSetWithTemplateKHR(
949 VkDescriptorSet descriptorSet
,
950 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
953 ANV_FROM_HANDLE(anv_device
, device
, _device
);
954 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
955 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
956 descriptorUpdateTemplate
);
958 anv_descriptor_set_write_template(set
, device
, NULL
, template, pData
);