2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
32 #include "anv_private.h"
35 * Descriptor set layouts.
38 void anv_GetDescriptorSetLayoutSupport(
40 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
41 VkDescriptorSetLayoutSupport
* pSupport
)
43 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
45 for (uint32_t b
= 0; b
< pCreateInfo
->bindingCount
; b
++) {
46 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[b
];
48 switch (binding
->descriptorType
) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER
:
50 /* There is no real limit on samplers */
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
54 if (binding
->pImmutableSamplers
) {
55 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
56 ANV_FROM_HANDLE(anv_sampler
, sampler
,
57 binding
->pImmutableSamplers
[i
]);
58 anv_foreach_stage(s
, binding
->stageFlags
)
59 surface_count
[s
] += sampler
->n_planes
;
65 anv_foreach_stage(s
, binding
->stageFlags
)
66 surface_count
[s
] += binding
->descriptorCount
;
71 bool supported
= true;
72 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
73 /* Our maximum binding table size is 250 and we need to reserve 8 for
74 * render targets. 240 is a nice round number.
76 if (surface_count
[s
] >= 240)
80 pSupport
->supported
= supported
;
83 VkResult
anv_CreateDescriptorSetLayout(
85 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
86 const VkAllocationCallbacks
* pAllocator
,
87 VkDescriptorSetLayout
* pSetLayout
)
89 ANV_FROM_HANDLE(anv_device
, device
, _device
);
91 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
93 uint32_t max_binding
= 0;
94 uint32_t immutable_sampler_count
= 0;
95 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
96 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
97 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
98 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
101 struct anv_descriptor_set_layout
*set_layout
;
102 struct anv_descriptor_set_binding_layout
*bindings
;
103 struct anv_sampler
**samplers
;
105 /* We need to allocate decriptor set layouts off the device allocator
106 * with DEVICE scope because they are reference counted and may not be
107 * destroyed when vkDestroyDescriptorSetLayout is called.
110 anv_multialloc_add(&ma
, &set_layout
, 1);
111 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
112 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
114 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
115 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
116 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
118 memset(set_layout
, 0, sizeof(*set_layout
));
119 set_layout
->ref_cnt
= 1;
120 set_layout
->binding_count
= max_binding
+ 1;
122 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
123 /* Initialize all binding_layout entries to -1 */
124 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
126 set_layout
->binding
[b
].array_size
= 0;
127 set_layout
->binding
[b
].immutable_samplers
= NULL
;
130 /* Initialize all samplers to 0 */
131 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
133 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
134 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
135 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
136 uint32_t buffer_count
= 0;
137 uint32_t dynamic_offset_count
= 0;
139 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
140 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
141 uint32_t b
= binding
->binding
;
142 /* We temporarily store the pointer to the binding in the
143 * immutable_samplers pointer. This provides us with a quick-and-dirty
144 * way to sort the bindings by binding number.
146 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
149 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
150 const VkDescriptorSetLayoutBinding
*binding
=
151 (void *)set_layout
->binding
[b
].immutable_samplers
;
156 if (binding
->descriptorCount
== 0)
160 set_layout
->binding
[b
].type
= binding
->descriptorType
;
162 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
163 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
164 set_layout
->size
+= binding
->descriptorCount
;
166 switch (binding
->descriptorType
) {
167 case VK_DESCRIPTOR_TYPE_SAMPLER
:
168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
169 anv_foreach_stage(s
, binding
->stageFlags
) {
170 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
171 sampler_count
[s
] += binding
->descriptorCount
;
178 switch (binding
->descriptorType
) {
179 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
180 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
181 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
182 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
183 set_layout
->binding
[b
].buffer_index
= buffer_count
;
184 buffer_count
+= binding
->descriptorCount
;
187 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
188 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
189 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
190 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
191 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
192 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
193 anv_foreach_stage(s
, binding
->stageFlags
) {
194 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
195 surface_count
[s
] += binding
->descriptorCount
;
202 switch (binding
->descriptorType
) {
203 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
204 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
205 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
206 dynamic_offset_count
+= binding
->descriptorCount
;
212 switch (binding
->descriptorType
) {
213 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
214 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
215 anv_foreach_stage(s
, binding
->stageFlags
) {
216 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
217 image_count
[s
] += binding
->descriptorCount
;
224 if (binding
->pImmutableSamplers
) {
225 set_layout
->binding
[b
].immutable_samplers
= samplers
;
226 samplers
+= binding
->descriptorCount
;
228 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
229 set_layout
->binding
[b
].immutable_samplers
[i
] =
230 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
232 set_layout
->binding
[b
].immutable_samplers
= NULL
;
235 set_layout
->shader_stages
|= binding
->stageFlags
;
238 set_layout
->buffer_count
= buffer_count
;
239 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
241 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
246 void anv_DestroyDescriptorSetLayout(
248 VkDescriptorSetLayout _set_layout
,
249 const VkAllocationCallbacks
* pAllocator
)
251 ANV_FROM_HANDLE(anv_device
, device
, _device
);
252 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
257 anv_descriptor_set_layout_unref(device
, set_layout
);
261 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
262 const struct anv_descriptor_set_layout
*layout
)
264 size_t size
= sizeof(*layout
) +
265 sizeof(layout
->binding
[0]) * layout
->binding_count
;
266 _mesa_sha1_update(ctx
, layout
, size
);
270 * Pipeline layouts. These have nothing to do with the pipeline. They are
271 * just multiple descriptor set layouts pasted together
274 VkResult
anv_CreatePipelineLayout(
276 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
277 const VkAllocationCallbacks
* pAllocator
,
278 VkPipelineLayout
* pPipelineLayout
)
280 ANV_FROM_HANDLE(anv_device
, device
, _device
);
281 struct anv_pipeline_layout
*layout
;
283 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
285 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
286 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
288 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
290 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
292 unsigned dynamic_offset_count
= 0;
294 memset(layout
->stage
, 0, sizeof(layout
->stage
));
295 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
296 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
297 pCreateInfo
->pSetLayouts
[set
]);
298 layout
->set
[set
].layout
= set_layout
;
299 anv_descriptor_set_layout_ref(set_layout
);
301 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
302 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
303 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
306 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
307 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
308 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0)
309 layout
->stage
[s
].has_dynamic_offsets
= true;
314 struct mesa_sha1 ctx
;
315 _mesa_sha1_init(&ctx
);
316 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
317 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
318 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
319 sizeof(layout
->set
[s
].dynamic_offset_start
));
321 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
322 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
323 _mesa_sha1_update(&ctx
, &layout
->stage
[s
].has_dynamic_offsets
,
324 sizeof(layout
->stage
[s
].has_dynamic_offsets
));
326 _mesa_sha1_final(&ctx
, layout
->sha1
);
328 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
333 void anv_DestroyPipelineLayout(
335 VkPipelineLayout _pipelineLayout
,
336 const VkAllocationCallbacks
* pAllocator
)
338 ANV_FROM_HANDLE(anv_device
, device
, _device
);
339 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
341 if (!pipeline_layout
)
344 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
345 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
347 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
353 * These are implemented using a big pool of memory and a free-list for the
354 * host memory allocations and a state_stream and a free list for the buffer
355 * view surface state. The spec allows us to fail to allocate due to
356 * fragmentation in all cases but two: 1) after pool reset, allocating up
357 * until the pool size with no freeing must succeed and 2) allocating and
358 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
359 * and the free lists lets us recycle blocks for case 2).
364 VkResult
anv_CreateDescriptorPool(
366 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
367 const VkAllocationCallbacks
* pAllocator
,
368 VkDescriptorPool
* pDescriptorPool
)
370 ANV_FROM_HANDLE(anv_device
, device
, _device
);
371 struct anv_descriptor_pool
*pool
;
373 uint32_t descriptor_count
= 0;
374 uint32_t buffer_count
= 0;
375 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
376 switch (pCreateInfo
->pPoolSizes
[i
].type
) {
377 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
378 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
379 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
380 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
381 buffer_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
383 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
388 const size_t pool_size
=
389 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
390 descriptor_count
* sizeof(struct anv_descriptor
) +
391 buffer_count
* sizeof(struct anv_buffer_view
);
392 const size_t total_size
= sizeof(*pool
) + pool_size
;
394 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
395 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
397 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
399 pool
->size
= pool_size
;
401 pool
->free_list
= EMPTY
;
403 anv_state_stream_init(&pool
->surface_state_stream
,
404 &device
->surface_state_pool
, 4096);
405 pool
->surface_state_free_list
= NULL
;
407 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
412 void anv_DestroyDescriptorPool(
414 VkDescriptorPool _pool
,
415 const VkAllocationCallbacks
* pAllocator
)
417 ANV_FROM_HANDLE(anv_device
, device
, _device
);
418 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
423 anv_state_stream_finish(&pool
->surface_state_stream
);
424 vk_free2(&device
->alloc
, pAllocator
, pool
);
427 VkResult
anv_ResetDescriptorPool(
429 VkDescriptorPool descriptorPool
,
430 VkDescriptorPoolResetFlags flags
)
432 ANV_FROM_HANDLE(anv_device
, device
, _device
);
433 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
436 pool
->free_list
= EMPTY
;
437 anv_state_stream_finish(&pool
->surface_state_stream
);
438 anv_state_stream_init(&pool
->surface_state_stream
,
439 &device
->surface_state_pool
, 4096);
440 pool
->surface_state_free_list
= NULL
;
445 struct pool_free_list_entry
{
451 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
454 sizeof(struct anv_descriptor_set
) +
455 layout
->size
* sizeof(struct anv_descriptor
) +
456 layout
->buffer_count
* sizeof(struct anv_buffer_view
);
460 anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout
*binding
)
462 if (!binding
->immutable_samplers
)
463 return binding
->array_size
;
465 uint32_t total_plane_count
= 0;
466 for (uint32_t i
= 0; i
< binding
->array_size
; i
++)
467 total_plane_count
+= binding
->immutable_samplers
[i
]->n_planes
;
469 return total_plane_count
;
472 struct surface_state_free_list_entry
{
474 struct anv_state state
;
478 anv_descriptor_set_create(struct anv_device
*device
,
479 struct anv_descriptor_pool
*pool
,
480 struct anv_descriptor_set_layout
*layout
,
481 struct anv_descriptor_set
**out_set
)
483 struct anv_descriptor_set
*set
;
484 const size_t size
= anv_descriptor_set_layout_size(layout
);
487 if (size
<= pool
->size
- pool
->next
) {
488 set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
491 struct pool_free_list_entry
*entry
;
492 uint32_t *link
= &pool
->free_list
;
493 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
494 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
495 if (size
<= entry
->size
) {
497 set
= (struct anv_descriptor_set
*) entry
;
505 if (pool
->free_list
!= EMPTY
) {
506 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
508 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY
);
512 set
->layout
= layout
;
513 anv_descriptor_set_layout_ref(layout
);
517 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
518 set
->buffer_count
= layout
->buffer_count
;
520 /* By defining the descriptors to be zero now, we can later verify that
521 * a descriptor has not been populated with user data.
523 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
525 /* Go through and fill out immutable samplers if we have any */
526 struct anv_descriptor
*desc
= set
->descriptors
;
527 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
528 if (layout
->binding
[b
].immutable_samplers
) {
529 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
530 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
531 * UpdateDescriptorSets if needed. However, if the descriptor
532 * set has an immutable sampler, UpdateDescriptorSets may never
533 * touch it, so we need to make sure it's 100% valid now.
535 desc
[i
] = (struct anv_descriptor
) {
536 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
537 .sampler
= layout
->binding
[b
].immutable_samplers
[i
],
541 desc
+= layout
->binding
[b
].array_size
;
544 /* Allocate surface state for the buffer views. */
545 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
546 struct surface_state_free_list_entry
*entry
=
547 pool
->surface_state_free_list
;
548 struct anv_state state
;
551 state
= entry
->state
;
552 pool
->surface_state_free_list
= entry
->next
;
553 assert(state
.alloc_size
== 64);
555 state
= anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
558 set
->buffer_views
[b
].surface_state
= state
;
567 anv_descriptor_set_destroy(struct anv_device
*device
,
568 struct anv_descriptor_pool
*pool
,
569 struct anv_descriptor_set
*set
)
571 anv_descriptor_set_layout_unref(device
, set
->layout
);
573 /* Put the buffer view surface state back on the free list. */
574 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++) {
575 struct surface_state_free_list_entry
*entry
=
576 set
->buffer_views
[b
].surface_state
.map
;
577 entry
->next
= pool
->surface_state_free_list
;
578 entry
->state
= set
->buffer_views
[b
].surface_state
;
579 pool
->surface_state_free_list
= entry
;
582 /* Put the descriptor set allocation back on the free list. */
583 const uint32_t index
= (char *) set
- pool
->data
;
584 if (index
+ set
->size
== pool
->next
) {
587 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
588 entry
->next
= pool
->free_list
;
589 entry
->size
= set
->size
;
590 pool
->free_list
= (char *) entry
- pool
->data
;
594 VkResult
anv_AllocateDescriptorSets(
596 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
597 VkDescriptorSet
* pDescriptorSets
)
599 ANV_FROM_HANDLE(anv_device
, device
, _device
);
600 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
602 VkResult result
= VK_SUCCESS
;
603 struct anv_descriptor_set
*set
;
606 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
607 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
608 pAllocateInfo
->pSetLayouts
[i
]);
610 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
611 if (result
!= VK_SUCCESS
)
614 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
617 if (result
!= VK_SUCCESS
)
618 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
624 VkResult
anv_FreeDescriptorSets(
626 VkDescriptorPool descriptorPool
,
628 const VkDescriptorSet
* pDescriptorSets
)
630 ANV_FROM_HANDLE(anv_device
, device
, _device
);
631 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
633 for (uint32_t i
= 0; i
< count
; i
++) {
634 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
639 anv_descriptor_set_destroy(device
, pool
, set
);
646 anv_descriptor_set_write_image_view(struct anv_descriptor_set
*set
,
647 const struct gen_device_info
* const devinfo
,
648 const VkDescriptorImageInfo
* const info
,
649 VkDescriptorType type
,
653 const struct anv_descriptor_set_binding_layout
*bind_layout
=
654 &set
->layout
->binding
[binding
];
655 struct anv_descriptor
*desc
=
656 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
657 struct anv_image_view
*image_view
= NULL
;
658 struct anv_sampler
*sampler
= NULL
;
660 assert(type
== bind_layout
->type
);
663 case VK_DESCRIPTOR_TYPE_SAMPLER
:
664 sampler
= anv_sampler_from_handle(info
->sampler
);
667 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
668 image_view
= anv_image_view_from_handle(info
->imageView
);
669 sampler
= anv_sampler_from_handle(info
->sampler
);
672 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
673 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
674 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
675 image_view
= anv_image_view_from_handle(info
->imageView
);
679 unreachable("invalid descriptor type");
682 /* If this descriptor has an immutable sampler, we don't want to stomp on
685 sampler
= bind_layout
->immutable_samplers
?
686 bind_layout
->immutable_samplers
[element
] :
689 *desc
= (struct anv_descriptor
) {
691 .layout
= info
->imageLayout
,
692 .image_view
= image_view
,
698 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set
*set
,
699 VkDescriptorType type
,
700 struct anv_buffer_view
*buffer_view
,
704 const struct anv_descriptor_set_binding_layout
*bind_layout
=
705 &set
->layout
->binding
[binding
];
706 struct anv_descriptor
*desc
=
707 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
709 assert(type
== bind_layout
->type
);
711 *desc
= (struct anv_descriptor
) {
713 .buffer_view
= buffer_view
,
718 anv_descriptor_set_write_buffer(struct anv_descriptor_set
*set
,
719 struct anv_device
*device
,
720 struct anv_state_stream
*alloc_stream
,
721 VkDescriptorType type
,
722 struct anv_buffer
*buffer
,
728 const struct anv_descriptor_set_binding_layout
*bind_layout
=
729 &set
->layout
->binding
[binding
];
730 struct anv_descriptor
*desc
=
731 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
733 assert(type
== bind_layout
->type
);
735 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
736 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
737 *desc
= (struct anv_descriptor
) {
744 struct anv_buffer_view
*bview
=
745 &set
->buffer_views
[bind_layout
->buffer_index
+ element
];
747 bview
->format
= anv_isl_format_for_descriptor_type(type
);
748 bview
->bo
= buffer
->bo
;
749 bview
->offset
= buffer
->offset
+ offset
;
750 bview
->range
= anv_buffer_get_range(buffer
, offset
, range
);
752 /* If we're writing descriptors through a push command, we need to
753 * allocate the surface state from the command buffer. Otherwise it will
754 * be allocated by the descriptor pool when calling
755 * vkAllocateDescriptorSets. */
757 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
759 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
761 bview
->offset
, bview
->range
, 1);
763 *desc
= (struct anv_descriptor
) {
765 .buffer_view
= bview
,
770 void anv_UpdateDescriptorSets(
772 uint32_t descriptorWriteCount
,
773 const VkWriteDescriptorSet
* pDescriptorWrites
,
774 uint32_t descriptorCopyCount
,
775 const VkCopyDescriptorSet
* pDescriptorCopies
)
777 ANV_FROM_HANDLE(anv_device
, device
, _device
);
779 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
780 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
781 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
783 switch (write
->descriptorType
) {
784 case VK_DESCRIPTOR_TYPE_SAMPLER
:
785 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
786 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
787 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
788 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
789 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
790 anv_descriptor_set_write_image_view(set
, &device
->info
,
791 write
->pImageInfo
+ j
,
792 write
->descriptorType
,
794 write
->dstArrayElement
+ j
);
798 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
799 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
800 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
801 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
802 write
->pTexelBufferView
[j
]);
804 anv_descriptor_set_write_buffer_view(set
,
805 write
->descriptorType
,
808 write
->dstArrayElement
+ j
);
812 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
813 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
814 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
815 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
816 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
817 assert(write
->pBufferInfo
[j
].buffer
);
818 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
821 anv_descriptor_set_write_buffer(set
,
824 write
->descriptorType
,
827 write
->dstArrayElement
+ j
,
828 write
->pBufferInfo
[j
].offset
,
829 write
->pBufferInfo
[j
].range
);
838 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
839 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
840 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
841 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
843 const struct anv_descriptor_set_binding_layout
*src_layout
=
844 &src
->layout
->binding
[copy
->srcBinding
];
845 struct anv_descriptor
*src_desc
=
846 &src
->descriptors
[src_layout
->descriptor_index
];
847 src_desc
+= copy
->srcArrayElement
;
849 const struct anv_descriptor_set_binding_layout
*dst_layout
=
850 &dst
->layout
->binding
[copy
->dstBinding
];
851 struct anv_descriptor
*dst_desc
=
852 &dst
->descriptors
[dst_layout
->descriptor_index
];
853 dst_desc
+= copy
->dstArrayElement
;
855 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
856 dst_desc
[j
] = src_desc
[j
];
861 * Descriptor update templates.
865 anv_descriptor_set_write_template(struct anv_descriptor_set
*set
,
866 struct anv_device
*device
,
867 struct anv_state_stream
*alloc_stream
,
868 const struct anv_descriptor_update_template
*template,
871 const struct anv_descriptor_set_layout
*layout
= set
->layout
;
873 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
874 const struct anv_descriptor_template_entry
*entry
=
875 &template->entries
[i
];
876 const struct anv_descriptor_set_binding_layout
*bind_layout
=
877 &layout
->binding
[entry
->binding
];
878 struct anv_descriptor
*desc
= &set
->descriptors
[bind_layout
->descriptor_index
];
879 desc
+= entry
->array_element
;
881 switch (entry
->type
) {
882 case VK_DESCRIPTOR_TYPE_SAMPLER
:
883 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
884 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
885 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
886 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
887 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
888 const VkDescriptorImageInfo
*info
=
889 data
+ entry
->offset
+ j
* entry
->stride
;
890 anv_descriptor_set_write_image_view(set
, &device
->info
,
893 entry
->array_element
+ j
);
897 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
898 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
899 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
900 const VkBufferView
*_bview
=
901 data
+ entry
->offset
+ j
* entry
->stride
;
902 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
904 anv_descriptor_set_write_buffer_view(set
,
908 entry
->array_element
+ j
);
912 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
913 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
914 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
915 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
916 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
917 const VkDescriptorBufferInfo
*info
=
918 data
+ entry
->offset
+ j
* entry
->stride
;
919 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
921 anv_descriptor_set_write_buffer(set
,
927 entry
->array_element
+ j
,
928 info
->offset
, info
->range
);
938 VkResult
anv_CreateDescriptorUpdateTemplate(
940 const VkDescriptorUpdateTemplateCreateInfo
* pCreateInfo
,
941 const VkAllocationCallbacks
* pAllocator
,
942 VkDescriptorUpdateTemplate
* pDescriptorUpdateTemplate
)
944 ANV_FROM_HANDLE(anv_device
, device
, _device
);
945 struct anv_descriptor_update_template
*template;
947 size_t size
= sizeof(*template) +
948 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
949 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
950 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
951 if (template == NULL
)
952 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
954 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
956 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
)
957 template->set
= pCreateInfo
->set
;
959 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
960 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
961 const VkDescriptorUpdateTemplateEntryKHR
*pEntry
=
962 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
964 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
965 .type
= pEntry
->descriptorType
,
966 .binding
= pEntry
->dstBinding
,
967 .array_element
= pEntry
->dstArrayElement
,
968 .array_count
= pEntry
->descriptorCount
,
969 .offset
= pEntry
->offset
,
970 .stride
= pEntry
->stride
,
974 *pDescriptorUpdateTemplate
=
975 anv_descriptor_update_template_to_handle(template);
980 void anv_DestroyDescriptorUpdateTemplate(
982 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
983 const VkAllocationCallbacks
* pAllocator
)
985 ANV_FROM_HANDLE(anv_device
, device
, _device
);
986 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
987 descriptorUpdateTemplate
);
989 vk_free2(&device
->alloc
, pAllocator
, template);
992 void anv_UpdateDescriptorSetWithTemplate(
994 VkDescriptorSet descriptorSet
,
995 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
998 ANV_FROM_HANDLE(anv_device
, device
, _device
);
999 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
1000 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1001 descriptorUpdateTemplate
);
1003 anv_descriptor_set_write_template(set
, device
, NULL
, template, pData
);