2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 void anv_GetDescriptorSetLayoutSupport(
40 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
41 VkDescriptorSetLayoutSupport
* pSupport
)
43 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
45 for (uint32_t b
= 0; b
< pCreateInfo
->bindingCount
; b
++) {
46 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[b
];
48 switch (binding
->descriptorType
) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER
:
50 /* There is no real limit on samplers */
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
54 if (binding
->pImmutableSamplers
) {
55 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
56 ANV_FROM_HANDLE(anv_sampler
, sampler
,
57 binding
->pImmutableSamplers
[i
]);
58 anv_foreach_stage(s
, binding
->stageFlags
)
59 surface_count
[s
] += sampler
->n_planes
;
62 anv_foreach_stage(s
, binding
->stageFlags
)
63 surface_count
[s
] += binding
->descriptorCount
;
68 anv_foreach_stage(s
, binding
->stageFlags
)
69 surface_count
[s
] += binding
->descriptorCount
;
74 bool supported
= true;
75 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
76 /* Our maximum binding table size is 250 and we need to reserve 8 for
77 * render targets. 240 is a nice round number.
79 if (surface_count
[s
] >= 240)
83 pSupport
->supported
= supported
;
86 VkResult
anv_CreateDescriptorSetLayout(
88 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
89 const VkAllocationCallbacks
* pAllocator
,
90 VkDescriptorSetLayout
* pSetLayout
)
92 ANV_FROM_HANDLE(anv_device
, device
, _device
);
94 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
96 uint32_t max_binding
= 0;
97 uint32_t immutable_sampler_count
= 0;
98 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
99 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
101 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
103 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
104 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
105 * pImmutableSamplers can be used to initialize a set of immutable
106 * samplers. [...] If descriptorType is not one of these descriptor
107 * types, then pImmutableSamplers is ignored.
109 * We need to be careful here and only parse pImmutableSamplers if we
110 * have one of the right descriptor types.
112 VkDescriptorType desc_type
= pCreateInfo
->pBindings
[j
].descriptorType
;
113 if ((desc_type
== VK_DESCRIPTOR_TYPE_SAMPLER
||
114 desc_type
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
) &&
115 pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
116 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
119 struct anv_descriptor_set_layout
*set_layout
;
120 struct anv_descriptor_set_binding_layout
*bindings
;
121 struct anv_sampler
**samplers
;
123 /* We need to allocate decriptor set layouts off the device allocator
124 * with DEVICE scope because they are reference counted and may not be
125 * destroyed when vkDestroyDescriptorSetLayout is called.
128 anv_multialloc_add(&ma
, &set_layout
, 1);
129 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
130 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
132 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
133 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
134 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
136 memset(set_layout
, 0, sizeof(*set_layout
));
137 set_layout
->ref_cnt
= 1;
138 set_layout
->binding_count
= max_binding
+ 1;
140 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
141 /* Initialize all binding_layout entries to -1 */
142 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
144 set_layout
->binding
[b
].array_size
= 0;
145 set_layout
->binding
[b
].immutable_samplers
= NULL
;
148 /* Initialize all samplers to 0 */
149 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
151 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
152 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
153 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
154 uint32_t buffer_count
= 0;
155 uint32_t dynamic_offset_count
= 0;
157 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
158 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
159 uint32_t b
= binding
->binding
;
160 /* We temporarily store the pointer to the binding in the
161 * immutable_samplers pointer. This provides us with a quick-and-dirty
162 * way to sort the bindings by binding number.
164 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
167 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
168 const VkDescriptorSetLayoutBinding
*binding
=
169 (void *)set_layout
->binding
[b
].immutable_samplers
;
174 /* We temporarily stashed the pointer to the binding in the
175 * immutable_samplers pointer. Now that we've pulled it back out
176 * again, we reset immutable_samplers to NULL.
178 set_layout
->binding
[b
].immutable_samplers
= NULL
;
180 if (binding
->descriptorCount
== 0)
184 set_layout
->binding
[b
].type
= binding
->descriptorType
;
186 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
187 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
188 set_layout
->size
+= binding
->descriptorCount
;
190 switch (binding
->descriptorType
) {
191 case VK_DESCRIPTOR_TYPE_SAMPLER
:
192 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
193 anv_foreach_stage(s
, binding
->stageFlags
) {
194 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
195 sampler_count
[s
] += binding
->descriptorCount
;
198 if (binding
->pImmutableSamplers
) {
199 set_layout
->binding
[b
].immutable_samplers
= samplers
;
200 samplers
+= binding
->descriptorCount
;
202 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
203 set_layout
->binding
[b
].immutable_samplers
[i
] =
204 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
211 switch (binding
->descriptorType
) {
212 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
213 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
214 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
215 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
216 set_layout
->binding
[b
].buffer_index
= buffer_count
;
217 buffer_count
+= binding
->descriptorCount
;
220 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
221 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
222 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
223 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
224 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
225 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
226 anv_foreach_stage(s
, binding
->stageFlags
) {
227 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
228 surface_count
[s
] += binding
->descriptorCount
;
235 switch (binding
->descriptorType
) {
236 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
237 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
238 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
239 dynamic_offset_count
+= binding
->descriptorCount
;
245 switch (binding
->descriptorType
) {
246 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
247 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
248 anv_foreach_stage(s
, binding
->stageFlags
) {
249 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
250 image_count
[s
] += binding
->descriptorCount
;
257 set_layout
->shader_stages
|= binding
->stageFlags
;
260 set_layout
->buffer_count
= buffer_count
;
261 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
263 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
268 void anv_DestroyDescriptorSetLayout(
270 VkDescriptorSetLayout _set_layout
,
271 const VkAllocationCallbacks
* pAllocator
)
273 ANV_FROM_HANDLE(anv_device
, device
, _device
);
274 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
279 anv_descriptor_set_layout_unref(device
, set_layout
);
282 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
285 sha1_update_immutable_sampler(struct mesa_sha1
*ctx
,
286 const struct anv_sampler
*sampler
)
288 if (!sampler
->conversion
)
291 /* The only thing that affects the shader is ycbcr conversion */
292 _mesa_sha1_update(ctx
, sampler
->conversion
,
293 sizeof(*sampler
->conversion
));
297 sha1_update_descriptor_set_binding_layout(struct mesa_sha1
*ctx
,
298 const struct anv_descriptor_set_binding_layout
*layout
)
300 SHA1_UPDATE_VALUE(ctx
, layout
->array_size
);
301 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_index
);
302 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_index
);
303 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_index
);
304 _mesa_sha1_update(ctx
, layout
->stage
, sizeof(layout
->stage
));
306 if (layout
->immutable_samplers
) {
307 for (uint16_t i
= 0; i
< layout
->array_size
; i
++)
308 sha1_update_immutable_sampler(ctx
, layout
->immutable_samplers
[i
]);
313 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
314 const struct anv_descriptor_set_layout
*layout
)
316 SHA1_UPDATE_VALUE(ctx
, layout
->binding_count
);
317 SHA1_UPDATE_VALUE(ctx
, layout
->size
);
318 SHA1_UPDATE_VALUE(ctx
, layout
->shader_stages
);
319 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_count
);
320 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_count
);
322 for (uint16_t i
= 0; i
< layout
->binding_count
; i
++)
323 sha1_update_descriptor_set_binding_layout(ctx
, &layout
->binding
[i
]);
327 * Pipeline layouts. These have nothing to do with the pipeline. They are
328 * just multiple descriptor set layouts pasted together
331 VkResult
anv_CreatePipelineLayout(
333 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
334 const VkAllocationCallbacks
* pAllocator
,
335 VkPipelineLayout
* pPipelineLayout
)
337 ANV_FROM_HANDLE(anv_device
, device
, _device
);
338 struct anv_pipeline_layout
*layout
;
340 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
342 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
343 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
345 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
347 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
349 unsigned dynamic_offset_count
= 0;
351 memset(layout
->stage
, 0, sizeof(layout
->stage
));
352 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
353 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
354 pCreateInfo
->pSetLayouts
[set
]);
355 layout
->set
[set
].layout
= set_layout
;
356 anv_descriptor_set_layout_ref(set_layout
);
358 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
359 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
360 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
363 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
364 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
365 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0)
366 layout
->stage
[s
].has_dynamic_offsets
= true;
371 struct mesa_sha1 ctx
;
372 _mesa_sha1_init(&ctx
);
373 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
374 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
375 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
376 sizeof(layout
->set
[s
].dynamic_offset_start
));
378 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
379 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
380 _mesa_sha1_update(&ctx
, &layout
->stage
[s
].has_dynamic_offsets
,
381 sizeof(layout
->stage
[s
].has_dynamic_offsets
));
383 _mesa_sha1_final(&ctx
, layout
->sha1
);
385 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
390 void anv_DestroyPipelineLayout(
392 VkPipelineLayout _pipelineLayout
,
393 const VkAllocationCallbacks
* pAllocator
)
395 ANV_FROM_HANDLE(anv_device
, device
, _device
);
396 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
398 if (!pipeline_layout
)
401 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
402 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
404 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
410 * These are implemented using a big pool of memory and a free-list for the
411 * host memory allocations and a state_stream and a free list for the buffer
412 * view surface state. The spec allows us to fail to allocate due to
413 * fragmentation in all cases but two: 1) after pool reset, allocating up
414 * until the pool size with no freeing must succeed and 2) allocating and
415 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
416 * and the free lists lets us recycle blocks for case 2).
421 VkResult
anv_CreateDescriptorPool(
423 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
424 const VkAllocationCallbacks
* pAllocator
,
425 VkDescriptorPool
* pDescriptorPool
)
427 ANV_FROM_HANDLE(anv_device
, device
, _device
);
428 struct anv_descriptor_pool
*pool
;
430 uint32_t descriptor_count
= 0;
431 uint32_t buffer_count
= 0;
432 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
433 switch (pCreateInfo
->pPoolSizes
[i
].type
) {
434 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
435 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
436 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
437 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
438 buffer_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
440 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
445 const size_t pool_size
=
446 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
447 descriptor_count
* sizeof(struct anv_descriptor
) +
448 buffer_count
* sizeof(struct anv_buffer_view
);
449 const size_t total_size
= sizeof(*pool
) + pool_size
;
451 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
452 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
454 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
456 pool
->size
= pool_size
;
458 pool
->free_list
= EMPTY
;
460 anv_state_stream_init(&pool
->surface_state_stream
,
461 &device
->surface_state_pool
, 4096);
462 pool
->surface_state_free_list
= NULL
;
464 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
469 void anv_DestroyDescriptorPool(
471 VkDescriptorPool _pool
,
472 const VkAllocationCallbacks
* pAllocator
)
474 ANV_FROM_HANDLE(anv_device
, device
, _device
);
475 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
480 anv_state_stream_finish(&pool
->surface_state_stream
);
481 vk_free2(&device
->alloc
, pAllocator
, pool
);
484 VkResult
anv_ResetDescriptorPool(
486 VkDescriptorPool descriptorPool
,
487 VkDescriptorPoolResetFlags flags
)
489 ANV_FROM_HANDLE(anv_device
, device
, _device
);
490 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
493 pool
->free_list
= EMPTY
;
494 anv_state_stream_finish(&pool
->surface_state_stream
);
495 anv_state_stream_init(&pool
->surface_state_stream
,
496 &device
->surface_state_pool
, 4096);
497 pool
->surface_state_free_list
= NULL
;
502 struct pool_free_list_entry
{
508 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool
*pool
,
510 struct anv_descriptor_set
**set
)
512 if (size
<= pool
->size
- pool
->next
) {
513 *set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
517 struct pool_free_list_entry
*entry
;
518 uint32_t *link
= &pool
->free_list
;
519 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
520 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
521 if (size
<= entry
->size
) {
523 *set
= (struct anv_descriptor_set
*) entry
;
529 if (pool
->free_list
!= EMPTY
) {
530 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
532 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY
);
538 anv_descriptor_pool_free_set(struct anv_descriptor_pool
*pool
,
539 struct anv_descriptor_set
*set
)
541 /* Put the descriptor set allocation back on the free list. */
542 const uint32_t index
= (char *) set
- pool
->data
;
543 if (index
+ set
->size
== pool
->next
) {
546 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
547 entry
->next
= pool
->free_list
;
548 entry
->size
= set
->size
;
549 pool
->free_list
= (char *) entry
- pool
->data
;
553 struct surface_state_free_list_entry
{
555 struct anv_state state
;
558 static struct anv_state
559 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool
*pool
)
561 struct surface_state_free_list_entry
*entry
=
562 pool
->surface_state_free_list
;
565 struct anv_state state
= entry
->state
;
566 pool
->surface_state_free_list
= entry
->next
;
567 assert(state
.alloc_size
== 64);
570 return anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
575 anv_descriptor_pool_free_state(struct anv_descriptor_pool
*pool
,
576 struct anv_state state
)
578 /* Put the buffer view surface state back on the free list. */
579 struct surface_state_free_list_entry
*entry
= state
.map
;
580 entry
->next
= pool
->surface_state_free_list
;
581 entry
->state
= state
;
582 pool
->surface_state_free_list
= entry
;
586 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
589 sizeof(struct anv_descriptor_set
) +
590 layout
->size
* sizeof(struct anv_descriptor
) +
591 layout
->buffer_count
* sizeof(struct anv_buffer_view
);
595 anv_descriptor_set_create(struct anv_device
*device
,
596 struct anv_descriptor_pool
*pool
,
597 struct anv_descriptor_set_layout
*layout
,
598 struct anv_descriptor_set
**out_set
)
600 struct anv_descriptor_set
*set
;
601 const size_t size
= anv_descriptor_set_layout_size(layout
);
603 VkResult result
= anv_descriptor_pool_alloc_set(pool
, size
, &set
);
604 if (result
!= VK_SUCCESS
)
607 set
->layout
= layout
;
608 anv_descriptor_set_layout_ref(layout
);
612 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
613 set
->buffer_count
= layout
->buffer_count
;
615 /* By defining the descriptors to be zero now, we can later verify that
616 * a descriptor has not been populated with user data.
618 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
620 /* Go through and fill out immutable samplers if we have any */
621 struct anv_descriptor
*desc
= set
->descriptors
;
622 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
623 if (layout
->binding
[b
].immutable_samplers
) {
624 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
625 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
626 * UpdateDescriptorSets if needed. However, if the descriptor
627 * set has an immutable sampler, UpdateDescriptorSets may never
628 * touch it, so we need to make sure it's 100% valid now.
630 desc
[i
] = (struct anv_descriptor
) {
631 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
632 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
636 desc
+= layout
->binding
[b
].array_size
;
639 /* Allocate surface state for the buffer views. */
640 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
641 set
->buffer_views
[b
].surface_state
=
642 anv_descriptor_pool_alloc_state(pool
);
651 anv_descriptor_set_destroy(struct anv_device
*device
,
652 struct anv_descriptor_pool
*pool
,
653 struct anv_descriptor_set
*set
)
655 anv_descriptor_set_layout_unref(device
, set
->layout
);
657 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++)
658 anv_descriptor_pool_free_state(pool
, set
->buffer_views
[b
].surface_state
);
660 anv_descriptor_pool_free_set(pool
, set
);
663 VkResult
anv_AllocateDescriptorSets(
665 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
666 VkDescriptorSet
* pDescriptorSets
)
668 ANV_FROM_HANDLE(anv_device
, device
, _device
);
669 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
671 VkResult result
= VK_SUCCESS
;
672 struct anv_descriptor_set
*set
;
675 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
676 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
677 pAllocateInfo
->pSetLayouts
[i
]);
679 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
680 if (result
!= VK_SUCCESS
)
683 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
686 if (result
!= VK_SUCCESS
)
687 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
693 VkResult
anv_FreeDescriptorSets(
695 VkDescriptorPool descriptorPool
,
697 const VkDescriptorSet
* pDescriptorSets
)
699 ANV_FROM_HANDLE(anv_device
, device
, _device
);
700 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
702 for (uint32_t i
= 0; i
< count
; i
++) {
703 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
708 anv_descriptor_set_destroy(device
, pool
, set
);
715 anv_descriptor_set_write_image_view(struct anv_device
*device
,
716 struct anv_descriptor_set
*set
,
717 const VkDescriptorImageInfo
* const info
,
718 VkDescriptorType type
,
722 const struct anv_descriptor_set_binding_layout
*bind_layout
=
723 &set
->layout
->binding
[binding
];
724 struct anv_descriptor
*desc
=
725 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
726 struct anv_image_view
*image_view
= NULL
;
727 struct anv_sampler
*sampler
= NULL
;
729 assert(type
== bind_layout
->type
);
732 case VK_DESCRIPTOR_TYPE_SAMPLER
:
733 sampler
= anv_sampler_from_handle(info
->sampler
);
736 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
737 image_view
= anv_image_view_from_handle(info
->imageView
);
738 sampler
= anv_sampler_from_handle(info
->sampler
);
741 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
742 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
743 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
744 image_view
= anv_image_view_from_handle(info
->imageView
);
748 unreachable("invalid descriptor type");
751 /* If this descriptor has an immutable sampler, we don't want to stomp on
754 sampler
= bind_layout
->immutable_samplers
?
755 bind_layout
->immutable_samplers
[element
] :
758 *desc
= (struct anv_descriptor
) {
760 .layout
= info
->imageLayout
,
761 .image_view
= image_view
,
767 anv_descriptor_set_write_buffer_view(struct anv_device
*device
,
768 struct anv_descriptor_set
*set
,
769 VkDescriptorType type
,
770 struct anv_buffer_view
*buffer_view
,
774 const struct anv_descriptor_set_binding_layout
*bind_layout
=
775 &set
->layout
->binding
[binding
];
776 struct anv_descriptor
*desc
=
777 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
779 assert(type
== bind_layout
->type
);
781 *desc
= (struct anv_descriptor
) {
783 .buffer_view
= buffer_view
,
788 anv_descriptor_set_write_buffer(struct anv_device
*device
,
789 struct anv_descriptor_set
*set
,
790 struct anv_state_stream
*alloc_stream
,
791 VkDescriptorType type
,
792 struct anv_buffer
*buffer
,
798 const struct anv_descriptor_set_binding_layout
*bind_layout
=
799 &set
->layout
->binding
[binding
];
800 struct anv_descriptor
*desc
=
801 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
803 assert(type
== bind_layout
->type
);
805 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
806 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
807 *desc
= (struct anv_descriptor
) {
814 struct anv_buffer_view
*bview
=
815 &set
->buffer_views
[bind_layout
->buffer_index
+ element
];
817 bview
->format
= anv_isl_format_for_descriptor_type(type
);
818 bview
->range
= anv_buffer_get_range(buffer
, offset
, range
);
819 bview
->address
= anv_address_add(buffer
->address
, offset
);
821 /* If we're writing descriptors through a push command, we need to
822 * allocate the surface state from the command buffer. Otherwise it will
823 * be allocated by the descriptor pool when calling
824 * vkAllocateDescriptorSets. */
826 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
828 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
830 bview
->address
, bview
->range
, 1);
832 *desc
= (struct anv_descriptor
) {
834 .buffer_view
= bview
,
839 void anv_UpdateDescriptorSets(
841 uint32_t descriptorWriteCount
,
842 const VkWriteDescriptorSet
* pDescriptorWrites
,
843 uint32_t descriptorCopyCount
,
844 const VkCopyDescriptorSet
* pDescriptorCopies
)
846 ANV_FROM_HANDLE(anv_device
, device
, _device
);
848 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
849 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
850 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
852 switch (write
->descriptorType
) {
853 case VK_DESCRIPTOR_TYPE_SAMPLER
:
854 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
855 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
856 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
857 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
858 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
859 anv_descriptor_set_write_image_view(device
, set
,
860 write
->pImageInfo
+ j
,
861 write
->descriptorType
,
863 write
->dstArrayElement
+ j
);
867 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
868 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
869 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
870 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
871 write
->pTexelBufferView
[j
]);
873 anv_descriptor_set_write_buffer_view(device
, set
,
874 write
->descriptorType
,
877 write
->dstArrayElement
+ j
);
881 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
882 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
883 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
884 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
885 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
886 assert(write
->pBufferInfo
[j
].buffer
);
887 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
890 anv_descriptor_set_write_buffer(device
, set
,
892 write
->descriptorType
,
895 write
->dstArrayElement
+ j
,
896 write
->pBufferInfo
[j
].offset
,
897 write
->pBufferInfo
[j
].range
);
906 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
907 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
908 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
909 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
911 const struct anv_descriptor_set_binding_layout
*src_layout
=
912 &src
->layout
->binding
[copy
->srcBinding
];
913 struct anv_descriptor
*src_desc
=
914 &src
->descriptors
[src_layout
->descriptor_index
];
915 src_desc
+= copy
->srcArrayElement
;
917 const struct anv_descriptor_set_binding_layout
*dst_layout
=
918 &dst
->layout
->binding
[copy
->dstBinding
];
919 struct anv_descriptor
*dst_desc
=
920 &dst
->descriptors
[dst_layout
->descriptor_index
];
921 dst_desc
+= copy
->dstArrayElement
;
923 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
924 dst_desc
[j
] = src_desc
[j
];
929 * Descriptor update templates.
933 anv_descriptor_set_write_template(struct anv_device
*device
,
934 struct anv_descriptor_set
*set
,
935 struct anv_state_stream
*alloc_stream
,
936 const struct anv_descriptor_update_template
*template,
939 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
940 const struct anv_descriptor_template_entry
*entry
=
941 &template->entries
[i
];
943 switch (entry
->type
) {
944 case VK_DESCRIPTOR_TYPE_SAMPLER
:
945 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
946 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
947 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
948 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
949 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
950 const VkDescriptorImageInfo
*info
=
951 data
+ entry
->offset
+ j
* entry
->stride
;
952 anv_descriptor_set_write_image_view(device
, set
,
955 entry
->array_element
+ j
);
959 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
960 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
961 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
962 const VkBufferView
*_bview
=
963 data
+ entry
->offset
+ j
* entry
->stride
;
964 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
966 anv_descriptor_set_write_buffer_view(device
, set
,
970 entry
->array_element
+ j
);
974 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
975 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
976 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
977 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
978 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
979 const VkDescriptorBufferInfo
*info
=
980 data
+ entry
->offset
+ j
* entry
->stride
;
981 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
983 anv_descriptor_set_write_buffer(device
, set
,
988 entry
->array_element
+ j
,
989 info
->offset
, info
->range
);
999 VkResult
anv_CreateDescriptorUpdateTemplate(
1001 const VkDescriptorUpdateTemplateCreateInfo
* pCreateInfo
,
1002 const VkAllocationCallbacks
* pAllocator
,
1003 VkDescriptorUpdateTemplate
* pDescriptorUpdateTemplate
)
1005 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1006 struct anv_descriptor_update_template
*template;
1008 size_t size
= sizeof(*template) +
1009 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
1010 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
1011 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
1012 if (template == NULL
)
1013 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1015 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
1017 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
)
1018 template->set
= pCreateInfo
->set
;
1020 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
1021 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
1022 const VkDescriptorUpdateTemplateEntry
*pEntry
=
1023 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
1025 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
1026 .type
= pEntry
->descriptorType
,
1027 .binding
= pEntry
->dstBinding
,
1028 .array_element
= pEntry
->dstArrayElement
,
1029 .array_count
= pEntry
->descriptorCount
,
1030 .offset
= pEntry
->offset
,
1031 .stride
= pEntry
->stride
,
1035 *pDescriptorUpdateTemplate
=
1036 anv_descriptor_update_template_to_handle(template);
1041 void anv_DestroyDescriptorUpdateTemplate(
1043 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1044 const VkAllocationCallbacks
* pAllocator
)
1046 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1047 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1048 descriptorUpdateTemplate
);
1050 vk_free2(&device
->alloc
, pAllocator
, template);
1053 void anv_UpdateDescriptorSetWithTemplate(
1055 VkDescriptorSet descriptorSet
,
1056 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1059 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1060 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
1061 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1062 descriptorUpdateTemplate
);
1064 anv_descriptor_set_write_template(device
, set
, NULL
, template, pData
);