2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 void anv_GetDescriptorSetLayoutSupport(
40 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
41 VkDescriptorSetLayoutSupport
* pSupport
)
43 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
45 for (uint32_t b
= 0; b
< pCreateInfo
->bindingCount
; b
++) {
46 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[b
];
48 switch (binding
->descriptorType
) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER
:
50 /* There is no real limit on samplers */
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
54 if (binding
->pImmutableSamplers
) {
55 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
56 ANV_FROM_HANDLE(anv_sampler
, sampler
,
57 binding
->pImmutableSamplers
[i
]);
58 anv_foreach_stage(s
, binding
->stageFlags
)
59 surface_count
[s
] += sampler
->n_planes
;
65 anv_foreach_stage(s
, binding
->stageFlags
)
66 surface_count
[s
] += binding
->descriptorCount
;
71 bool supported
= true;
72 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
73 /* Our maximum binding table size is 250 and we need to reserve 8 for
74 * render targets. 240 is a nice round number.
76 if (surface_count
[s
] >= 240)
80 pSupport
->supported
= supported
;
83 VkResult
anv_CreateDescriptorSetLayout(
85 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
86 const VkAllocationCallbacks
* pAllocator
,
87 VkDescriptorSetLayout
* pSetLayout
)
89 ANV_FROM_HANDLE(anv_device
, device
, _device
);
91 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
93 uint32_t max_binding
= 0;
94 uint32_t immutable_sampler_count
= 0;
95 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
96 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
97 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
98 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
101 struct anv_descriptor_set_layout
*set_layout
;
102 struct anv_descriptor_set_binding_layout
*bindings
;
103 struct anv_sampler
**samplers
;
105 /* We need to allocate decriptor set layouts off the device allocator
106 * with DEVICE scope because they are reference counted and may not be
107 * destroyed when vkDestroyDescriptorSetLayout is called.
110 anv_multialloc_add(&ma
, &set_layout
, 1);
111 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
112 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
114 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
115 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
116 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
118 memset(set_layout
, 0, sizeof(*set_layout
));
119 set_layout
->ref_cnt
= 1;
120 set_layout
->binding_count
= max_binding
+ 1;
122 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
123 /* Initialize all binding_layout entries to -1 */
124 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
126 set_layout
->binding
[b
].array_size
= 0;
127 set_layout
->binding
[b
].immutable_samplers
= NULL
;
130 /* Initialize all samplers to 0 */
131 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
133 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
134 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
135 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
136 uint32_t buffer_count
= 0;
137 uint32_t dynamic_offset_count
= 0;
139 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
140 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
141 uint32_t b
= binding
->binding
;
142 /* We temporarily store the pointer to the binding in the
143 * immutable_samplers pointer. This provides us with a quick-and-dirty
144 * way to sort the bindings by binding number.
146 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
149 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
150 const VkDescriptorSetLayoutBinding
*binding
=
151 (void *)set_layout
->binding
[b
].immutable_samplers
;
156 if (binding
->descriptorCount
== 0)
160 set_layout
->binding
[b
].type
= binding
->descriptorType
;
162 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
163 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
164 set_layout
->size
+= binding
->descriptorCount
;
166 switch (binding
->descriptorType
) {
167 case VK_DESCRIPTOR_TYPE_SAMPLER
:
168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
169 anv_foreach_stage(s
, binding
->stageFlags
) {
170 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
171 sampler_count
[s
] += binding
->descriptorCount
;
178 switch (binding
->descriptorType
) {
179 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
180 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
181 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
182 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
183 set_layout
->binding
[b
].buffer_index
= buffer_count
;
184 buffer_count
+= binding
->descriptorCount
;
187 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
188 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
189 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
190 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
191 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
192 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
193 anv_foreach_stage(s
, binding
->stageFlags
) {
194 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
195 surface_count
[s
] += binding
->descriptorCount
;
202 switch (binding
->descriptorType
) {
203 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
204 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
205 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
206 dynamic_offset_count
+= binding
->descriptorCount
;
212 switch (binding
->descriptorType
) {
213 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
214 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
215 anv_foreach_stage(s
, binding
->stageFlags
) {
216 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
217 image_count
[s
] += binding
->descriptorCount
;
224 if (binding
->pImmutableSamplers
) {
225 set_layout
->binding
[b
].immutable_samplers
= samplers
;
226 samplers
+= binding
->descriptorCount
;
228 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
229 set_layout
->binding
[b
].immutable_samplers
[i
] =
230 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
232 set_layout
->binding
[b
].immutable_samplers
= NULL
;
235 set_layout
->shader_stages
|= binding
->stageFlags
;
238 set_layout
->buffer_count
= buffer_count
;
239 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
241 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
246 void anv_DestroyDescriptorSetLayout(
248 VkDescriptorSetLayout _set_layout
,
249 const VkAllocationCallbacks
* pAllocator
)
251 ANV_FROM_HANDLE(anv_device
, device
, _device
);
252 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
257 anv_descriptor_set_layout_unref(device
, set_layout
);
260 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
263 sha1_update_immutable_sampler(struct mesa_sha1
*ctx
,
264 const struct anv_sampler
*sampler
)
266 if (!sampler
->conversion
)
269 /* The only thing that affects the shader is ycbcr conversion */
270 _mesa_sha1_update(ctx
, sampler
->conversion
,
271 sizeof(*sampler
->conversion
));
275 sha1_update_descriptor_set_binding_layout(struct mesa_sha1
*ctx
,
276 const struct anv_descriptor_set_binding_layout
*layout
)
278 SHA1_UPDATE_VALUE(ctx
, layout
->array_size
);
279 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_index
);
280 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_index
);
281 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_index
);
282 _mesa_sha1_update(ctx
, layout
->stage
, sizeof(layout
->stage
));
284 if (layout
->immutable_samplers
) {
285 for (uint16_t i
= 0; i
< layout
->array_size
; i
++)
286 sha1_update_immutable_sampler(ctx
, layout
->immutable_samplers
[i
]);
291 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
292 const struct anv_descriptor_set_layout
*layout
)
294 SHA1_UPDATE_VALUE(ctx
, layout
->binding_count
);
295 SHA1_UPDATE_VALUE(ctx
, layout
->size
);
296 SHA1_UPDATE_VALUE(ctx
, layout
->shader_stages
);
297 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_count
);
298 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_count
);
300 for (uint16_t i
= 0; i
< layout
->binding_count
; i
++)
301 sha1_update_descriptor_set_binding_layout(ctx
, &layout
->binding
[i
]);
305 * Pipeline layouts. These have nothing to do with the pipeline. They are
306 * just multiple descriptor set layouts pasted together
309 VkResult
anv_CreatePipelineLayout(
311 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
312 const VkAllocationCallbacks
* pAllocator
,
313 VkPipelineLayout
* pPipelineLayout
)
315 ANV_FROM_HANDLE(anv_device
, device
, _device
);
316 struct anv_pipeline_layout
*layout
;
318 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
320 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
321 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
323 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
325 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
327 unsigned dynamic_offset_count
= 0;
329 memset(layout
->stage
, 0, sizeof(layout
->stage
));
330 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
331 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
332 pCreateInfo
->pSetLayouts
[set
]);
333 layout
->set
[set
].layout
= set_layout
;
334 anv_descriptor_set_layout_ref(set_layout
);
336 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
337 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
338 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
341 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
342 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
343 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0)
344 layout
->stage
[s
].has_dynamic_offsets
= true;
349 struct mesa_sha1 ctx
;
350 _mesa_sha1_init(&ctx
);
351 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
352 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
353 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
354 sizeof(layout
->set
[s
].dynamic_offset_start
));
356 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
357 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
358 _mesa_sha1_update(&ctx
, &layout
->stage
[s
].has_dynamic_offsets
,
359 sizeof(layout
->stage
[s
].has_dynamic_offsets
));
361 _mesa_sha1_final(&ctx
, layout
->sha1
);
363 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
368 void anv_DestroyPipelineLayout(
370 VkPipelineLayout _pipelineLayout
,
371 const VkAllocationCallbacks
* pAllocator
)
373 ANV_FROM_HANDLE(anv_device
, device
, _device
);
374 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
376 if (!pipeline_layout
)
379 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
380 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
382 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
388 * These are implemented using a big pool of memory and a free-list for the
389 * host memory allocations and a state_stream and a free list for the buffer
390 * view surface state. The spec allows us to fail to allocate due to
391 * fragmentation in all cases but two: 1) after pool reset, allocating up
392 * until the pool size with no freeing must succeed and 2) allocating and
393 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
394 * and the free lists lets us recycle blocks for case 2).
399 VkResult
anv_CreateDescriptorPool(
401 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
402 const VkAllocationCallbacks
* pAllocator
,
403 VkDescriptorPool
* pDescriptorPool
)
405 ANV_FROM_HANDLE(anv_device
, device
, _device
);
406 struct anv_descriptor_pool
*pool
;
408 uint32_t descriptor_count
= 0;
409 uint32_t buffer_count
= 0;
410 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
411 switch (pCreateInfo
->pPoolSizes
[i
].type
) {
412 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
413 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
414 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
415 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
416 buffer_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
418 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
423 const size_t pool_size
=
424 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
425 descriptor_count
* sizeof(struct anv_descriptor
) +
426 buffer_count
* sizeof(struct anv_buffer_view
);
427 const size_t total_size
= sizeof(*pool
) + pool_size
;
429 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
430 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
432 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
434 pool
->size
= pool_size
;
436 pool
->free_list
= EMPTY
;
438 anv_state_stream_init(&pool
->surface_state_stream
,
439 &device
->surface_state_pool
, 4096);
440 pool
->surface_state_free_list
= NULL
;
442 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
447 void anv_DestroyDescriptorPool(
449 VkDescriptorPool _pool
,
450 const VkAllocationCallbacks
* pAllocator
)
452 ANV_FROM_HANDLE(anv_device
, device
, _device
);
453 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
458 anv_state_stream_finish(&pool
->surface_state_stream
);
459 vk_free2(&device
->alloc
, pAllocator
, pool
);
462 VkResult
anv_ResetDescriptorPool(
464 VkDescriptorPool descriptorPool
,
465 VkDescriptorPoolResetFlags flags
)
467 ANV_FROM_HANDLE(anv_device
, device
, _device
);
468 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
471 pool
->free_list
= EMPTY
;
472 anv_state_stream_finish(&pool
->surface_state_stream
);
473 anv_state_stream_init(&pool
->surface_state_stream
,
474 &device
->surface_state_pool
, 4096);
475 pool
->surface_state_free_list
= NULL
;
480 struct pool_free_list_entry
{
486 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
489 sizeof(struct anv_descriptor_set
) +
490 layout
->size
* sizeof(struct anv_descriptor
) +
491 layout
->buffer_count
* sizeof(struct anv_buffer_view
);
494 struct surface_state_free_list_entry
{
496 struct anv_state state
;
500 anv_descriptor_set_create(struct anv_device
*device
,
501 struct anv_descriptor_pool
*pool
,
502 struct anv_descriptor_set_layout
*layout
,
503 struct anv_descriptor_set
**out_set
)
505 struct anv_descriptor_set
*set
;
506 const size_t size
= anv_descriptor_set_layout_size(layout
);
509 if (size
<= pool
->size
- pool
->next
) {
510 set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
513 struct pool_free_list_entry
*entry
;
514 uint32_t *link
= &pool
->free_list
;
515 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
516 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
517 if (size
<= entry
->size
) {
519 set
= (struct anv_descriptor_set
*) entry
;
527 if (pool
->free_list
!= EMPTY
) {
528 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
530 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY
);
534 set
->layout
= layout
;
535 anv_descriptor_set_layout_ref(layout
);
539 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
540 set
->buffer_count
= layout
->buffer_count
;
542 /* By defining the descriptors to be zero now, we can later verify that
543 * a descriptor has not been populated with user data.
545 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
547 /* Go through and fill out immutable samplers if we have any */
548 struct anv_descriptor
*desc
= set
->descriptors
;
549 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
550 if (layout
->binding
[b
].immutable_samplers
) {
551 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
552 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
553 * UpdateDescriptorSets if needed. However, if the descriptor
554 * set has an immutable sampler, UpdateDescriptorSets may never
555 * touch it, so we need to make sure it's 100% valid now.
557 desc
[i
] = (struct anv_descriptor
) {
558 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
559 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
563 desc
+= layout
->binding
[b
].array_size
;
566 /* Allocate surface state for the buffer views. */
567 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
568 struct surface_state_free_list_entry
*entry
=
569 pool
->surface_state_free_list
;
570 struct anv_state state
;
573 state
= entry
->state
;
574 pool
->surface_state_free_list
= entry
->next
;
575 assert(state
.alloc_size
== 64);
577 state
= anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
580 set
->buffer_views
[b
].surface_state
= state
;
589 anv_descriptor_set_destroy(struct anv_device
*device
,
590 struct anv_descriptor_pool
*pool
,
591 struct anv_descriptor_set
*set
)
593 anv_descriptor_set_layout_unref(device
, set
->layout
);
595 /* Put the buffer view surface state back on the free list. */
596 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++) {
597 struct surface_state_free_list_entry
*entry
=
598 set
->buffer_views
[b
].surface_state
.map
;
599 entry
->next
= pool
->surface_state_free_list
;
600 entry
->state
= set
->buffer_views
[b
].surface_state
;
601 pool
->surface_state_free_list
= entry
;
604 /* Put the descriptor set allocation back on the free list. */
605 const uint32_t index
= (char *) set
- pool
->data
;
606 if (index
+ set
->size
== pool
->next
) {
609 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
610 entry
->next
= pool
->free_list
;
611 entry
->size
= set
->size
;
612 pool
->free_list
= (char *) entry
- pool
->data
;
616 VkResult
anv_AllocateDescriptorSets(
618 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
619 VkDescriptorSet
* pDescriptorSets
)
621 ANV_FROM_HANDLE(anv_device
, device
, _device
);
622 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
624 VkResult result
= VK_SUCCESS
;
625 struct anv_descriptor_set
*set
;
628 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
629 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
630 pAllocateInfo
->pSetLayouts
[i
]);
632 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
633 if (result
!= VK_SUCCESS
)
636 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
639 if (result
!= VK_SUCCESS
)
640 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
646 VkResult
anv_FreeDescriptorSets(
648 VkDescriptorPool descriptorPool
,
650 const VkDescriptorSet
* pDescriptorSets
)
652 ANV_FROM_HANDLE(anv_device
, device
, _device
);
653 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
655 for (uint32_t i
= 0; i
< count
; i
++) {
656 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
661 anv_descriptor_set_destroy(device
, pool
, set
);
668 anv_descriptor_set_write_image_view(struct anv_descriptor_set
*set
,
669 const struct gen_device_info
* const devinfo
,
670 const VkDescriptorImageInfo
* const info
,
671 VkDescriptorType type
,
675 const struct anv_descriptor_set_binding_layout
*bind_layout
=
676 &set
->layout
->binding
[binding
];
677 struct anv_descriptor
*desc
=
678 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
679 struct anv_image_view
*image_view
= NULL
;
680 struct anv_sampler
*sampler
= NULL
;
682 assert(type
== bind_layout
->type
);
685 case VK_DESCRIPTOR_TYPE_SAMPLER
:
686 sampler
= anv_sampler_from_handle(info
->sampler
);
689 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
690 image_view
= anv_image_view_from_handle(info
->imageView
);
691 sampler
= anv_sampler_from_handle(info
->sampler
);
694 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
695 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
696 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
697 image_view
= anv_image_view_from_handle(info
->imageView
);
701 unreachable("invalid descriptor type");
704 /* If this descriptor has an immutable sampler, we don't want to stomp on
707 sampler
= bind_layout
->immutable_samplers
?
708 bind_layout
->immutable_samplers
[element
] :
711 *desc
= (struct anv_descriptor
) {
713 .layout
= info
->imageLayout
,
714 .image_view
= image_view
,
720 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set
*set
,
721 VkDescriptorType type
,
722 struct anv_buffer_view
*buffer_view
,
726 const struct anv_descriptor_set_binding_layout
*bind_layout
=
727 &set
->layout
->binding
[binding
];
728 struct anv_descriptor
*desc
=
729 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
731 assert(type
== bind_layout
->type
);
733 *desc
= (struct anv_descriptor
) {
735 .buffer_view
= buffer_view
,
740 anv_descriptor_set_write_buffer(struct anv_descriptor_set
*set
,
741 struct anv_device
*device
,
742 struct anv_state_stream
*alloc_stream
,
743 VkDescriptorType type
,
744 struct anv_buffer
*buffer
,
750 const struct anv_descriptor_set_binding_layout
*bind_layout
=
751 &set
->layout
->binding
[binding
];
752 struct anv_descriptor
*desc
=
753 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
755 assert(type
== bind_layout
->type
);
757 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
758 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
759 *desc
= (struct anv_descriptor
) {
766 struct anv_buffer_view
*bview
=
767 &set
->buffer_views
[bind_layout
->buffer_index
+ element
];
769 bview
->format
= anv_isl_format_for_descriptor_type(type
);
770 bview
->range
= anv_buffer_get_range(buffer
, offset
, range
);
771 bview
->address
= anv_address_add(buffer
->address
, offset
);
773 /* If we're writing descriptors through a push command, we need to
774 * allocate the surface state from the command buffer. Otherwise it will
775 * be allocated by the descriptor pool when calling
776 * vkAllocateDescriptorSets. */
778 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
780 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
782 bview
->address
, bview
->range
, 1);
784 *desc
= (struct anv_descriptor
) {
786 .buffer_view
= bview
,
791 void anv_UpdateDescriptorSets(
793 uint32_t descriptorWriteCount
,
794 const VkWriteDescriptorSet
* pDescriptorWrites
,
795 uint32_t descriptorCopyCount
,
796 const VkCopyDescriptorSet
* pDescriptorCopies
)
798 ANV_FROM_HANDLE(anv_device
, device
, _device
);
800 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
801 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
802 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
804 switch (write
->descriptorType
) {
805 case VK_DESCRIPTOR_TYPE_SAMPLER
:
806 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
807 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
808 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
809 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
810 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
811 anv_descriptor_set_write_image_view(set
, &device
->info
,
812 write
->pImageInfo
+ j
,
813 write
->descriptorType
,
815 write
->dstArrayElement
+ j
);
819 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
820 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
821 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
822 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
823 write
->pTexelBufferView
[j
]);
825 anv_descriptor_set_write_buffer_view(set
,
826 write
->descriptorType
,
829 write
->dstArrayElement
+ j
);
833 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
834 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
835 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
836 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
837 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
838 assert(write
->pBufferInfo
[j
].buffer
);
839 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
842 anv_descriptor_set_write_buffer(set
,
845 write
->descriptorType
,
848 write
->dstArrayElement
+ j
,
849 write
->pBufferInfo
[j
].offset
,
850 write
->pBufferInfo
[j
].range
);
859 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
860 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
861 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
862 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
864 const struct anv_descriptor_set_binding_layout
*src_layout
=
865 &src
->layout
->binding
[copy
->srcBinding
];
866 struct anv_descriptor
*src_desc
=
867 &src
->descriptors
[src_layout
->descriptor_index
];
868 src_desc
+= copy
->srcArrayElement
;
870 const struct anv_descriptor_set_binding_layout
*dst_layout
=
871 &dst
->layout
->binding
[copy
->dstBinding
];
872 struct anv_descriptor
*dst_desc
=
873 &dst
->descriptors
[dst_layout
->descriptor_index
];
874 dst_desc
+= copy
->dstArrayElement
;
876 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
877 dst_desc
[j
] = src_desc
[j
];
882 * Descriptor update templates.
886 anv_descriptor_set_write_template(struct anv_descriptor_set
*set
,
887 struct anv_device
*device
,
888 struct anv_state_stream
*alloc_stream
,
889 const struct anv_descriptor_update_template
*template,
892 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
893 const struct anv_descriptor_template_entry
*entry
=
894 &template->entries
[i
];
896 switch (entry
->type
) {
897 case VK_DESCRIPTOR_TYPE_SAMPLER
:
898 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
899 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
900 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
901 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
902 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
903 const VkDescriptorImageInfo
*info
=
904 data
+ entry
->offset
+ j
* entry
->stride
;
905 anv_descriptor_set_write_image_view(set
, &device
->info
,
908 entry
->array_element
+ j
);
912 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
913 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
914 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
915 const VkBufferView
*_bview
=
916 data
+ entry
->offset
+ j
* entry
->stride
;
917 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
919 anv_descriptor_set_write_buffer_view(set
,
923 entry
->array_element
+ j
);
927 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
928 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
929 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
930 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
931 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
932 const VkDescriptorBufferInfo
*info
=
933 data
+ entry
->offset
+ j
* entry
->stride
;
934 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
936 anv_descriptor_set_write_buffer(set
,
942 entry
->array_element
+ j
,
943 info
->offset
, info
->range
);
953 VkResult
anv_CreateDescriptorUpdateTemplate(
955 const VkDescriptorUpdateTemplateCreateInfo
* pCreateInfo
,
956 const VkAllocationCallbacks
* pAllocator
,
957 VkDescriptorUpdateTemplate
* pDescriptorUpdateTemplate
)
959 ANV_FROM_HANDLE(anv_device
, device
, _device
);
960 struct anv_descriptor_update_template
*template;
962 size_t size
= sizeof(*template) +
963 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
964 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
965 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
966 if (template == NULL
)
967 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
969 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
971 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
)
972 template->set
= pCreateInfo
->set
;
974 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
975 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
976 const VkDescriptorUpdateTemplateEntry
*pEntry
=
977 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
979 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
980 .type
= pEntry
->descriptorType
,
981 .binding
= pEntry
->dstBinding
,
982 .array_element
= pEntry
->dstArrayElement
,
983 .array_count
= pEntry
->descriptorCount
,
984 .offset
= pEntry
->offset
,
985 .stride
= pEntry
->stride
,
989 *pDescriptorUpdateTemplate
=
990 anv_descriptor_update_template_to_handle(template);
995 void anv_DestroyDescriptorUpdateTemplate(
997 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
998 const VkAllocationCallbacks
* pAllocator
)
1000 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1001 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1002 descriptorUpdateTemplate
);
1004 vk_free2(&device
->alloc
, pAllocator
, template);
1007 void anv_UpdateDescriptorSetWithTemplate(
1009 VkDescriptorSet descriptorSet
,
1010 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1013 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1014 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
1015 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1016 descriptorUpdateTemplate
);
1018 anv_descriptor_set_write_template(set
, device
, NULL
, template, pData
);