2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
33 #include "anv_private.h"
36 * Descriptor set layouts.
39 static enum anv_descriptor_data
40 anv_descriptor_data_for_type(const struct anv_physical_device
*device
,
41 VkDescriptorType type
)
43 enum anv_descriptor_data data
= 0;
46 case VK_DESCRIPTOR_TYPE_SAMPLER
:
47 data
= ANV_DESCRIPTOR_SAMPLER_STATE
;
48 if (device
->has_bindless_samplers
)
49 data
|= ANV_DESCRIPTOR_SAMPLED_IMAGE
;
52 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
53 data
= ANV_DESCRIPTOR_SURFACE_STATE
|
54 ANV_DESCRIPTOR_SAMPLER_STATE
;
55 if (device
->has_bindless_images
|| device
->has_bindless_samplers
)
56 data
|= ANV_DESCRIPTOR_SAMPLED_IMAGE
;
59 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
60 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
61 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
62 if (device
->has_bindless_images
)
63 data
|= ANV_DESCRIPTOR_SAMPLED_IMAGE
;
66 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
67 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
70 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
71 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
72 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
73 if (device
->info
.gen
< 9)
74 data
|= ANV_DESCRIPTOR_IMAGE_PARAM
;
77 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
78 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
79 data
= ANV_DESCRIPTOR_SURFACE_STATE
|
80 ANV_DESCRIPTOR_BUFFER_VIEW
;
83 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
84 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
85 data
= ANV_DESCRIPTOR_SURFACE_STATE
;
88 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
:
89 data
= ANV_DESCRIPTOR_INLINE_UNIFORM
;
93 unreachable("Unsupported descriptor type");
96 /* On gen8 and above when we have softpin enabled, we also need to push
97 * SSBO address ranges so that we can use A64 messages in the shader.
99 if (device
->has_a64_buffer_access
&&
100 (type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
||
101 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
))
102 data
|= ANV_DESCRIPTOR_ADDRESS_RANGE
;
108 anv_descriptor_data_size(enum anv_descriptor_data data
)
112 if (data
& ANV_DESCRIPTOR_SAMPLED_IMAGE
)
113 size
+= sizeof(struct anv_sampled_image_descriptor
);
115 if (data
& ANV_DESCRIPTOR_IMAGE_PARAM
)
116 size
+= BRW_IMAGE_PARAM_SIZE
* 4;
118 if (data
& ANV_DESCRIPTOR_ADDRESS_RANGE
)
119 size
+= sizeof(struct anv_address_range_descriptor
);
124 /** Returns the size in bytes of each descriptor with the given layout */
126 anv_descriptor_size(const struct anv_descriptor_set_binding_layout
*layout
)
128 if (layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
) {
129 assert(layout
->data
== ANV_DESCRIPTOR_INLINE_UNIFORM
);
130 return layout
->array_size
;
133 unsigned size
= anv_descriptor_data_size(layout
->data
);
135 /* For multi-planar bindings, we make every descriptor consume the maximum
136 * number of planes so we don't have to bother with walking arrays and
137 * adding things up every time. Fortunately, YCbCr samplers aren't all
138 * that common and likely won't be in the middle of big arrays.
140 if (layout
->max_plane_count
> 1)
141 size
*= layout
->max_plane_count
;
146 /** Returns the size in bytes of each descriptor of the given type
148 * This version of the function does not have access to the entire layout so
149 * it may only work on certain descriptor types where the descriptor size is
150 * entirely determined by the descriptor type. Whenever possible, code should
151 * use anv_descriptor_size() instead.
154 anv_descriptor_type_size(const struct anv_physical_device
*pdevice
,
155 VkDescriptorType type
)
157 assert(type
!= VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
&&
158 type
!= VK_DESCRIPTOR_TYPE_SAMPLER
&&
159 type
!= VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
&&
160 type
!= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
);
162 return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice
, type
));
166 anv_descriptor_data_supports_bindless(const struct anv_physical_device
*pdevice
,
167 enum anv_descriptor_data data
,
170 if (data
& ANV_DESCRIPTOR_ADDRESS_RANGE
) {
171 assert(pdevice
->has_a64_buffer_access
);
175 if (data
& ANV_DESCRIPTOR_SAMPLED_IMAGE
) {
176 assert(pdevice
->has_bindless_images
|| pdevice
->has_bindless_samplers
);
177 return sampler
? pdevice
->has_bindless_samplers
:
178 pdevice
->has_bindless_images
;
185 anv_descriptor_supports_bindless(const struct anv_physical_device
*pdevice
,
186 const struct anv_descriptor_set_binding_layout
*binding
,
189 return anv_descriptor_data_supports_bindless(pdevice
, binding
->data
,
194 anv_descriptor_requires_bindless(const struct anv_physical_device
*pdevice
,
195 const struct anv_descriptor_set_binding_layout
*binding
,
198 if (pdevice
->always_use_bindless
)
199 return anv_descriptor_supports_bindless(pdevice
, binding
, sampler
);
204 void anv_GetDescriptorSetLayoutSupport(
206 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
207 VkDescriptorSetLayoutSupport
* pSupport
)
209 ANV_FROM_HANDLE(anv_device
, device
, _device
);
210 const struct anv_physical_device
*pdevice
=
211 &device
->instance
->physicalDevice
;
213 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
215 for (uint32_t b
= 0; b
< pCreateInfo
->bindingCount
; b
++) {
216 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[b
];
218 enum anv_descriptor_data desc_data
=
219 anv_descriptor_data_for_type(pdevice
, binding
->descriptorType
);
221 switch (binding
->descriptorType
) {
222 case VK_DESCRIPTOR_TYPE_SAMPLER
:
223 /* There is no real limit on samplers */
226 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
227 if (anv_descriptor_data_supports_bindless(pdevice
, desc_data
, false))
230 if (binding
->pImmutableSamplers
) {
231 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
232 ANV_FROM_HANDLE(anv_sampler
, sampler
,
233 binding
->pImmutableSamplers
[i
]);
234 anv_foreach_stage(s
, binding
->stageFlags
)
235 surface_count
[s
] += sampler
->n_planes
;
238 anv_foreach_stage(s
, binding
->stageFlags
)
239 surface_count
[s
] += binding
->descriptorCount
;
244 if (anv_descriptor_data_supports_bindless(pdevice
, desc_data
, false))
247 anv_foreach_stage(s
, binding
->stageFlags
)
248 surface_count
[s
] += binding
->descriptorCount
;
253 bool supported
= true;
254 for (unsigned s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
255 /* Our maximum binding table size is 240 and we need to reserve 8 for
258 if (surface_count
[s
] >= MAX_BINDING_TABLE_SIZE
- MAX_RTS
)
262 pSupport
->supported
= supported
;
265 VkResult
anv_CreateDescriptorSetLayout(
267 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
268 const VkAllocationCallbacks
* pAllocator
,
269 VkDescriptorSetLayout
* pSetLayout
)
271 ANV_FROM_HANDLE(anv_device
, device
, _device
);
273 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
275 uint32_t max_binding
= 0;
276 uint32_t immutable_sampler_count
= 0;
277 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
278 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
280 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
282 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
283 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
284 * pImmutableSamplers can be used to initialize a set of immutable
285 * samplers. [...] If descriptorType is not one of these descriptor
286 * types, then pImmutableSamplers is ignored.
288 * We need to be careful here and only parse pImmutableSamplers if we
289 * have one of the right descriptor types.
291 VkDescriptorType desc_type
= pCreateInfo
->pBindings
[j
].descriptorType
;
292 if ((desc_type
== VK_DESCRIPTOR_TYPE_SAMPLER
||
293 desc_type
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
) &&
294 pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
295 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
298 struct anv_descriptor_set_layout
*set_layout
;
299 struct anv_descriptor_set_binding_layout
*bindings
;
300 struct anv_sampler
**samplers
;
302 /* We need to allocate decriptor set layouts off the device allocator
303 * with DEVICE scope because they are reference counted and may not be
304 * destroyed when vkDestroyDescriptorSetLayout is called.
307 anv_multialloc_add(&ma
, &set_layout
, 1);
308 anv_multialloc_add(&ma
, &bindings
, max_binding
+ 1);
309 anv_multialloc_add(&ma
, &samplers
, immutable_sampler_count
);
311 if (!anv_multialloc_alloc(&ma
, &device
->alloc
,
312 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE
))
313 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
315 memset(set_layout
, 0, sizeof(*set_layout
));
316 set_layout
->ref_cnt
= 1;
317 set_layout
->binding_count
= max_binding
+ 1;
319 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
320 /* Initialize all binding_layout entries to -1 */
321 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
323 set_layout
->binding
[b
].data
= 0;
324 set_layout
->binding
[b
].max_plane_count
= 0;
325 set_layout
->binding
[b
].array_size
= 0;
326 set_layout
->binding
[b
].immutable_samplers
= NULL
;
329 /* Initialize all samplers to 0 */
330 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
332 uint32_t buffer_view_count
= 0;
333 uint32_t dynamic_offset_count
= 0;
334 uint32_t descriptor_buffer_size
= 0;
336 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
337 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
338 uint32_t b
= binding
->binding
;
339 /* We temporarily store the pointer to the binding in the
340 * immutable_samplers pointer. This provides us with a quick-and-dirty
341 * way to sort the bindings by binding number.
343 set_layout
->binding
[b
].immutable_samplers
= (void *)binding
;
346 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
347 const VkDescriptorSetLayoutBinding
*binding
=
348 (void *)set_layout
->binding
[b
].immutable_samplers
;
353 /* We temporarily stashed the pointer to the binding in the
354 * immutable_samplers pointer. Now that we've pulled it back out
355 * again, we reset immutable_samplers to NULL.
357 set_layout
->binding
[b
].immutable_samplers
= NULL
;
359 if (binding
->descriptorCount
== 0)
363 set_layout
->binding
[b
].type
= binding
->descriptorType
;
365 set_layout
->binding
[b
].data
=
366 anv_descriptor_data_for_type(&device
->instance
->physicalDevice
,
367 binding
->descriptorType
);
368 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
369 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
370 set_layout
->size
+= binding
->descriptorCount
;
372 if (set_layout
->binding
[b
].data
& ANV_DESCRIPTOR_BUFFER_VIEW
) {
373 set_layout
->binding
[b
].buffer_view_index
= buffer_view_count
;
374 buffer_view_count
+= binding
->descriptorCount
;
377 switch (binding
->descriptorType
) {
378 case VK_DESCRIPTOR_TYPE_SAMPLER
:
379 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
380 set_layout
->binding
[b
].max_plane_count
= 1;
381 if (binding
->pImmutableSamplers
) {
382 set_layout
->binding
[b
].immutable_samplers
= samplers
;
383 samplers
+= binding
->descriptorCount
;
385 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++) {
386 ANV_FROM_HANDLE(anv_sampler
, sampler
,
387 binding
->pImmutableSamplers
[i
]);
389 set_layout
->binding
[b
].immutable_samplers
[i
] = sampler
;
390 if (set_layout
->binding
[b
].max_plane_count
< sampler
->n_planes
)
391 set_layout
->binding
[b
].max_plane_count
= sampler
->n_planes
;
396 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
397 set_layout
->binding
[b
].max_plane_count
= 1;
404 switch (binding
->descriptorType
) {
405 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
406 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
407 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
408 dynamic_offset_count
+= binding
->descriptorCount
;
415 if (binding
->descriptorType
==
416 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
) {
417 /* Inline uniform blocks are specified to use the descriptor array
418 * size as the size in bytes of the block.
420 descriptor_buffer_size
= align_u32(descriptor_buffer_size
, 32);
421 set_layout
->binding
[b
].descriptor_offset
= descriptor_buffer_size
;
422 descriptor_buffer_size
+= binding
->descriptorCount
;
424 set_layout
->binding
[b
].descriptor_offset
= descriptor_buffer_size
;
425 descriptor_buffer_size
+= anv_descriptor_size(&set_layout
->binding
[b
]) *
426 binding
->descriptorCount
;
429 set_layout
->shader_stages
|= binding
->stageFlags
;
432 set_layout
->buffer_view_count
= buffer_view_count
;
433 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
434 set_layout
->descriptor_buffer_size
= descriptor_buffer_size
;
436 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
441 void anv_DestroyDescriptorSetLayout(
443 VkDescriptorSetLayout _set_layout
,
444 const VkAllocationCallbacks
* pAllocator
)
446 ANV_FROM_HANDLE(anv_device
, device
, _device
);
447 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
452 anv_descriptor_set_layout_unref(device
, set_layout
);
455 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
458 sha1_update_immutable_sampler(struct mesa_sha1
*ctx
,
459 const struct anv_sampler
*sampler
)
461 if (!sampler
->conversion
)
464 /* The only thing that affects the shader is ycbcr conversion */
465 _mesa_sha1_update(ctx
, sampler
->conversion
,
466 sizeof(*sampler
->conversion
));
470 sha1_update_descriptor_set_binding_layout(struct mesa_sha1
*ctx
,
471 const struct anv_descriptor_set_binding_layout
*layout
)
473 SHA1_UPDATE_VALUE(ctx
, layout
->data
);
474 SHA1_UPDATE_VALUE(ctx
, layout
->max_plane_count
);
475 SHA1_UPDATE_VALUE(ctx
, layout
->array_size
);
476 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_index
);
477 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_index
);
478 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_view_index
);
479 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_offset
);
481 if (layout
->immutable_samplers
) {
482 for (uint16_t i
= 0; i
< layout
->array_size
; i
++)
483 sha1_update_immutable_sampler(ctx
, layout
->immutable_samplers
[i
]);
488 sha1_update_descriptor_set_layout(struct mesa_sha1
*ctx
,
489 const struct anv_descriptor_set_layout
*layout
)
491 SHA1_UPDATE_VALUE(ctx
, layout
->binding_count
);
492 SHA1_UPDATE_VALUE(ctx
, layout
->size
);
493 SHA1_UPDATE_VALUE(ctx
, layout
->shader_stages
);
494 SHA1_UPDATE_VALUE(ctx
, layout
->buffer_view_count
);
495 SHA1_UPDATE_VALUE(ctx
, layout
->dynamic_offset_count
);
496 SHA1_UPDATE_VALUE(ctx
, layout
->descriptor_buffer_size
);
498 for (uint16_t i
= 0; i
< layout
->binding_count
; i
++)
499 sha1_update_descriptor_set_binding_layout(ctx
, &layout
->binding
[i
]);
503 * Pipeline layouts. These have nothing to do with the pipeline. They are
504 * just multiple descriptor set layouts pasted together
507 VkResult
anv_CreatePipelineLayout(
509 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
510 const VkAllocationCallbacks
* pAllocator
,
511 VkPipelineLayout
* pPipelineLayout
)
513 ANV_FROM_HANDLE(anv_device
, device
, _device
);
514 struct anv_pipeline_layout
*layout
;
516 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
518 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
519 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
521 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
523 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
525 unsigned dynamic_offset_count
= 0;
527 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
528 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
529 pCreateInfo
->pSetLayouts
[set
]);
530 layout
->set
[set
].layout
= set_layout
;
531 anv_descriptor_set_layout_ref(set_layout
);
533 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
534 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
535 if (set_layout
->binding
[b
].dynamic_offset_index
< 0)
538 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
542 struct mesa_sha1 ctx
;
543 _mesa_sha1_init(&ctx
);
544 for (unsigned s
= 0; s
< layout
->num_sets
; s
++) {
545 sha1_update_descriptor_set_layout(&ctx
, layout
->set
[s
].layout
);
546 _mesa_sha1_update(&ctx
, &layout
->set
[s
].dynamic_offset_start
,
547 sizeof(layout
->set
[s
].dynamic_offset_start
));
549 _mesa_sha1_update(&ctx
, &layout
->num_sets
, sizeof(layout
->num_sets
));
550 _mesa_sha1_final(&ctx
, layout
->sha1
);
552 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
557 void anv_DestroyPipelineLayout(
559 VkPipelineLayout _pipelineLayout
,
560 const VkAllocationCallbacks
* pAllocator
)
562 ANV_FROM_HANDLE(anv_device
, device
, _device
);
563 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
565 if (!pipeline_layout
)
568 for (uint32_t i
= 0; i
< pipeline_layout
->num_sets
; i
++)
569 anv_descriptor_set_layout_unref(device
, pipeline_layout
->set
[i
].layout
);
571 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
577 * These are implemented using a big pool of memory and a free-list for the
578 * host memory allocations and a state_stream and a free list for the buffer
579 * view surface state. The spec allows us to fail to allocate due to
580 * fragmentation in all cases but two: 1) after pool reset, allocating up
581 * until the pool size with no freeing must succeed and 2) allocating and
582 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
583 * and the free lists lets us recycle blocks for case 2).
586 /* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to
587 * ensure we can allocate the entire BO without hitting zero. The actual
588 * amount doesn't matter.
590 #define POOL_HEAP_OFFSET 64
594 VkResult
anv_CreateDescriptorPool(
596 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
597 const VkAllocationCallbacks
* pAllocator
,
598 VkDescriptorPool
* pDescriptorPool
)
600 ANV_FROM_HANDLE(anv_device
, device
, _device
);
601 struct anv_descriptor_pool
*pool
;
603 const VkDescriptorPoolInlineUniformBlockCreateInfoEXT
*inline_info
=
604 vk_find_struct_const(pCreateInfo
->pNext
,
605 DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT
);
607 uint32_t descriptor_count
= 0;
608 uint32_t buffer_view_count
= 0;
609 uint32_t descriptor_bo_size
= 0;
610 for (uint32_t i
= 0; i
< pCreateInfo
->poolSizeCount
; i
++) {
611 enum anv_descriptor_data desc_data
=
612 anv_descriptor_data_for_type(&device
->instance
->physicalDevice
,
613 pCreateInfo
->pPoolSizes
[i
].type
);
615 if (desc_data
& ANV_DESCRIPTOR_BUFFER_VIEW
)
616 buffer_view_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
618 unsigned desc_data_size
= anv_descriptor_data_size(desc_data
) *
619 pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
621 /* Combined image sampler descriptors can take up to 3 slots if they
622 * hold a YCbCr image.
624 if (pCreateInfo
->pPoolSizes
[i
].type
==
625 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
)
628 if (pCreateInfo
->pPoolSizes
[i
].type
==
629 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
) {
630 /* Inline uniform blocks are specified to use the descriptor array
631 * size as the size in bytes of the block.
634 desc_data_size
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
637 descriptor_bo_size
+= desc_data_size
;
639 descriptor_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
641 /* We have to align descriptor buffer allocations to 32B so that we can
642 * push descriptor buffers. This means that each descriptor buffer
643 * allocated may burn up to 32B of extra space to get the right alignment.
644 * (Technically, it's at most 28B because we're always going to start at
645 * least 4B aligned but we're being conservative here.) Allocate enough
646 * extra space that we can chop it into maxSets pieces and align each one
649 descriptor_bo_size
+= 32 * pCreateInfo
->maxSets
;
650 descriptor_bo_size
= ALIGN(descriptor_bo_size
, 4096);
651 /* We align inline uniform blocks to 32B */
653 descriptor_bo_size
+= 32 * inline_info
->maxInlineUniformBlockBindings
;
655 const size_t pool_size
=
656 pCreateInfo
->maxSets
* sizeof(struct anv_descriptor_set
) +
657 descriptor_count
* sizeof(struct anv_descriptor
) +
658 buffer_view_count
* sizeof(struct anv_buffer_view
);
659 const size_t total_size
= sizeof(*pool
) + pool_size
;
661 pool
= vk_alloc2(&device
->alloc
, pAllocator
, total_size
, 8,
662 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
664 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
666 pool
->size
= pool_size
;
668 pool
->free_list
= EMPTY
;
670 if (descriptor_bo_size
> 0) {
671 VkResult result
= anv_bo_init_new(&pool
->bo
, device
, descriptor_bo_size
);
672 if (result
!= VK_SUCCESS
) {
673 vk_free2(&device
->alloc
, pAllocator
, pool
);
677 anv_gem_set_caching(device
, pool
->bo
.gem_handle
, I915_CACHING_CACHED
);
679 pool
->bo
.map
= anv_gem_mmap(device
, pool
->bo
.gem_handle
, 0,
680 descriptor_bo_size
, 0);
681 if (pool
->bo
.map
== NULL
) {
682 anv_gem_close(device
, pool
->bo
.gem_handle
);
683 vk_free2(&device
->alloc
, pAllocator
, pool
);
684 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
687 if (device
->instance
->physicalDevice
.use_softpin
) {
688 pool
->bo
.flags
|= EXEC_OBJECT_PINNED
;
689 anv_vma_alloc(device
, &pool
->bo
);
692 util_vma_heap_init(&pool
->bo_heap
, POOL_HEAP_OFFSET
, descriptor_bo_size
);
697 anv_state_stream_init(&pool
->surface_state_stream
,
698 &device
->surface_state_pool
, 4096);
699 pool
->surface_state_free_list
= NULL
;
701 list_inithead(&pool
->desc_sets
);
703 *pDescriptorPool
= anv_descriptor_pool_to_handle(pool
);
708 void anv_DestroyDescriptorPool(
710 VkDescriptorPool _pool
,
711 const VkAllocationCallbacks
* pAllocator
)
713 ANV_FROM_HANDLE(anv_device
, device
, _device
);
714 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, _pool
);
720 anv_gem_munmap(pool
->bo
.map
, pool
->bo
.size
);
721 anv_vma_free(device
, &pool
->bo
);
722 anv_gem_close(device
, pool
->bo
.gem_handle
);
724 anv_state_stream_finish(&pool
->surface_state_stream
);
726 list_for_each_entry_safe(struct anv_descriptor_set
, set
,
727 &pool
->desc_sets
, pool_link
) {
728 anv_descriptor_set_destroy(device
, pool
, set
);
731 util_vma_heap_finish(&pool
->bo_heap
);
733 vk_free2(&device
->alloc
, pAllocator
, pool
);
736 VkResult
anv_ResetDescriptorPool(
738 VkDescriptorPool descriptorPool
,
739 VkDescriptorPoolResetFlags flags
)
741 ANV_FROM_HANDLE(anv_device
, device
, _device
);
742 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
744 list_for_each_entry_safe(struct anv_descriptor_set
, set
,
745 &pool
->desc_sets
, pool_link
) {
746 anv_descriptor_set_destroy(device
, pool
, set
);
750 pool
->free_list
= EMPTY
;
753 util_vma_heap_finish(&pool
->bo_heap
);
754 util_vma_heap_init(&pool
->bo_heap
, POOL_HEAP_OFFSET
, pool
->bo
.size
);
757 anv_state_stream_finish(&pool
->surface_state_stream
);
758 anv_state_stream_init(&pool
->surface_state_stream
,
759 &device
->surface_state_pool
, 4096);
760 pool
->surface_state_free_list
= NULL
;
765 struct pool_free_list_entry
{
771 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool
*pool
,
773 struct anv_descriptor_set
**set
)
775 if (size
<= pool
->size
- pool
->next
) {
776 *set
= (struct anv_descriptor_set
*) (pool
->data
+ pool
->next
);
780 struct pool_free_list_entry
*entry
;
781 uint32_t *link
= &pool
->free_list
;
782 for (uint32_t f
= pool
->free_list
; f
!= EMPTY
; f
= entry
->next
) {
783 entry
= (struct pool_free_list_entry
*) (pool
->data
+ f
);
784 if (size
<= entry
->size
) {
786 *set
= (struct anv_descriptor_set
*) entry
;
792 if (pool
->free_list
!= EMPTY
) {
793 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
795 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY
);
801 anv_descriptor_pool_free_set(struct anv_descriptor_pool
*pool
,
802 struct anv_descriptor_set
*set
)
804 /* Put the descriptor set allocation back on the free list. */
805 const uint32_t index
= (char *) set
- pool
->data
;
806 if (index
+ set
->size
== pool
->next
) {
809 struct pool_free_list_entry
*entry
= (struct pool_free_list_entry
*) set
;
810 entry
->next
= pool
->free_list
;
811 entry
->size
= set
->size
;
812 pool
->free_list
= (char *) entry
- pool
->data
;
815 list_del(&set
->pool_link
);
818 struct surface_state_free_list_entry
{
820 struct anv_state state
;
823 static struct anv_state
824 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool
*pool
)
826 struct surface_state_free_list_entry
*entry
=
827 pool
->surface_state_free_list
;
830 struct anv_state state
= entry
->state
;
831 pool
->surface_state_free_list
= entry
->next
;
832 assert(state
.alloc_size
== 64);
835 return anv_state_stream_alloc(&pool
->surface_state_stream
, 64, 64);
840 anv_descriptor_pool_free_state(struct anv_descriptor_pool
*pool
,
841 struct anv_state state
)
843 /* Put the buffer view surface state back on the free list. */
844 struct surface_state_free_list_entry
*entry
= state
.map
;
845 entry
->next
= pool
->surface_state_free_list
;
846 entry
->state
= state
;
847 pool
->surface_state_free_list
= entry
;
851 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout
*layout
)
854 sizeof(struct anv_descriptor_set
) +
855 layout
->size
* sizeof(struct anv_descriptor
) +
856 layout
->buffer_view_count
* sizeof(struct anv_buffer_view
);
860 anv_descriptor_set_create(struct anv_device
*device
,
861 struct anv_descriptor_pool
*pool
,
862 struct anv_descriptor_set_layout
*layout
,
863 struct anv_descriptor_set
**out_set
)
865 struct anv_descriptor_set
*set
;
866 const size_t size
= anv_descriptor_set_layout_size(layout
);
868 VkResult result
= anv_descriptor_pool_alloc_set(pool
, size
, &set
);
869 if (result
!= VK_SUCCESS
)
872 if (layout
->descriptor_buffer_size
) {
873 /* Align the size to 32 so that alignment gaps don't cause extra holes
874 * in the heap which can lead to bad performance.
876 uint64_t pool_vma_offset
=
877 util_vma_heap_alloc(&pool
->bo_heap
,
878 ALIGN(layout
->descriptor_buffer_size
, 32), 32);
879 if (pool_vma_offset
== 0) {
880 anv_descriptor_pool_free_set(pool
, set
);
881 return vk_error(VK_ERROR_FRAGMENTED_POOL
);
883 assert(pool_vma_offset
>= POOL_HEAP_OFFSET
&&
884 pool_vma_offset
- POOL_HEAP_OFFSET
<= INT32_MAX
);
885 set
->desc_mem
.offset
= pool_vma_offset
- POOL_HEAP_OFFSET
;
886 set
->desc_mem
.alloc_size
= layout
->descriptor_buffer_size
;
887 set
->desc_mem
.map
= pool
->bo
.map
+ set
->desc_mem
.offset
;
889 set
->desc_surface_state
= anv_descriptor_pool_alloc_state(pool
);
890 anv_fill_buffer_surface_state(device
, set
->desc_surface_state
,
891 ISL_FORMAT_R32G32B32A32_FLOAT
,
892 (struct anv_address
) {
894 .offset
= set
->desc_mem
.offset
,
896 layout
->descriptor_buffer_size
, 1);
898 set
->desc_mem
= ANV_STATE_NULL
;
899 set
->desc_surface_state
= ANV_STATE_NULL
;
903 set
->layout
= layout
;
904 anv_descriptor_set_layout_ref(layout
);
908 (struct anv_buffer_view
*) &set
->descriptors
[layout
->size
];
909 set
->buffer_view_count
= layout
->buffer_view_count
;
911 /* By defining the descriptors to be zero now, we can later verify that
912 * a descriptor has not been populated with user data.
914 memset(set
->descriptors
, 0, sizeof(struct anv_descriptor
) * layout
->size
);
916 /* Go through and fill out immutable samplers if we have any */
917 struct anv_descriptor
*desc
= set
->descriptors
;
918 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
919 if (layout
->binding
[b
].immutable_samplers
) {
920 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++) {
921 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
922 * UpdateDescriptorSets if needed. However, if the descriptor
923 * set has an immutable sampler, UpdateDescriptorSets may never
924 * touch it, so we need to make sure it's 100% valid now.
926 * We don't need to actually provide a sampler because the helper
927 * will always write in the immutable sampler regardless of what
928 * is in the sampler parameter.
930 struct VkDescriptorImageInfo info
= { };
931 anv_descriptor_set_write_image_view(device
, set
, &info
,
932 VK_DESCRIPTOR_TYPE_SAMPLER
,
936 desc
+= layout
->binding
[b
].array_size
;
939 /* Allocate surface state for the buffer views. */
940 for (uint32_t b
= 0; b
< layout
->buffer_view_count
; b
++) {
941 set
->buffer_views
[b
].surface_state
=
942 anv_descriptor_pool_alloc_state(pool
);
951 anv_descriptor_set_destroy(struct anv_device
*device
,
952 struct anv_descriptor_pool
*pool
,
953 struct anv_descriptor_set
*set
)
955 anv_descriptor_set_layout_unref(device
, set
->layout
);
957 if (set
->desc_mem
.alloc_size
) {
958 util_vma_heap_free(&pool
->bo_heap
,
959 (uint64_t)set
->desc_mem
.offset
+ POOL_HEAP_OFFSET
,
960 set
->desc_mem
.alloc_size
);
961 anv_descriptor_pool_free_state(pool
, set
->desc_surface_state
);
964 for (uint32_t b
= 0; b
< set
->buffer_view_count
; b
++)
965 anv_descriptor_pool_free_state(pool
, set
->buffer_views
[b
].surface_state
);
967 anv_descriptor_pool_free_set(pool
, set
);
970 VkResult
anv_AllocateDescriptorSets(
972 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
973 VkDescriptorSet
* pDescriptorSets
)
975 ANV_FROM_HANDLE(anv_device
, device
, _device
);
976 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
978 VkResult result
= VK_SUCCESS
;
979 struct anv_descriptor_set
*set
;
982 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
983 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
984 pAllocateInfo
->pSetLayouts
[i
]);
986 result
= anv_descriptor_set_create(device
, pool
, layout
, &set
);
987 if (result
!= VK_SUCCESS
)
990 list_addtail(&set
->pool_link
, &pool
->desc_sets
);
992 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
995 if (result
!= VK_SUCCESS
)
996 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
1002 VkResult
anv_FreeDescriptorSets(
1004 VkDescriptorPool descriptorPool
,
1006 const VkDescriptorSet
* pDescriptorSets
)
1008 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1009 ANV_FROM_HANDLE(anv_descriptor_pool
, pool
, descriptorPool
);
1011 for (uint32_t i
= 0; i
< count
; i
++) {
1012 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
1017 anv_descriptor_set_destroy(device
, pool
, set
);
1024 anv_descriptor_set_write_image_param(uint32_t *param_desc_map
,
1025 const struct brw_image_param
*param
)
1027 #define WRITE_PARAM_FIELD(field, FIELD) \
1028 for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1029 param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1031 WRITE_PARAM_FIELD(offset
, OFFSET
);
1032 WRITE_PARAM_FIELD(size
, SIZE
);
1033 WRITE_PARAM_FIELD(stride
, STRIDE
);
1034 WRITE_PARAM_FIELD(tiling
, TILING
);
1035 WRITE_PARAM_FIELD(swizzling
, SWIZZLING
);
1036 WRITE_PARAM_FIELD(size
, SIZE
);
1038 #undef WRITE_PARAM_FIELD
1042 anv_surface_state_to_handle(struct anv_state state
)
1044 /* Bits 31:12 of the bindless surface offset in the extended message
1045 * descriptor is bits 25:6 of the byte-based address.
1047 assert(state
.offset
>= 0);
1048 uint32_t offset
= state
.offset
;
1049 assert((offset
& 0x3f) == 0 && offset
< (1 << 26));
1054 anv_descriptor_set_write_image_view(struct anv_device
*device
,
1055 struct anv_descriptor_set
*set
,
1056 const VkDescriptorImageInfo
* const info
,
1057 VkDescriptorType type
,
1061 const struct anv_descriptor_set_binding_layout
*bind_layout
=
1062 &set
->layout
->binding
[binding
];
1063 struct anv_descriptor
*desc
=
1064 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
1065 struct anv_image_view
*image_view
= NULL
;
1066 struct anv_sampler
*sampler
= NULL
;
1068 /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1069 * set initialization to set the bindless samplers.
1071 assert(type
== bind_layout
->type
||
1072 type
== VK_DESCRIPTOR_TYPE_SAMPLER
);
1075 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1076 sampler
= anv_sampler_from_handle(info
->sampler
);
1079 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1080 image_view
= anv_image_view_from_handle(info
->imageView
);
1081 sampler
= anv_sampler_from_handle(info
->sampler
);
1084 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1085 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1086 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1087 image_view
= anv_image_view_from_handle(info
->imageView
);
1091 unreachable("invalid descriptor type");
1094 /* If this descriptor has an immutable sampler, we don't want to stomp on
1097 sampler
= bind_layout
->immutable_samplers
?
1098 bind_layout
->immutable_samplers
[element
] :
1101 *desc
= (struct anv_descriptor
) {
1103 .layout
= info
->imageLayout
,
1104 .image_view
= image_view
,
1108 void *desc_map
= set
->desc_mem
.map
+ bind_layout
->descriptor_offset
+
1109 element
* anv_descriptor_size(bind_layout
);
1111 if (bind_layout
->data
& ANV_DESCRIPTOR_SAMPLED_IMAGE
) {
1112 struct anv_sampled_image_descriptor desc_data
[3];
1113 memset(desc_data
, 0, sizeof(desc_data
));
1116 for (unsigned p
= 0; p
< image_view
->n_planes
; p
++) {
1117 struct anv_surface_state sstate
=
1118 (desc
->layout
== VK_IMAGE_LAYOUT_GENERAL
) ?
1119 image_view
->planes
[p
].general_sampler_surface_state
:
1120 image_view
->planes
[p
].optimal_sampler_surface_state
;
1121 desc_data
[p
].image
= anv_surface_state_to_handle(sstate
.state
);
1126 for (unsigned p
= 0; p
< sampler
->n_planes
; p
++)
1127 desc_data
[p
].sampler
= sampler
->bindless_state
.offset
+ p
* 32;
1130 /* We may have max_plane_count < 0 if this isn't a sampled image but it
1131 * can be no more than the size of our array of handles.
1133 assert(bind_layout
->max_plane_count
<= ARRAY_SIZE(desc_data
));
1134 memcpy(desc_map
, desc_data
,
1135 MAX2(1, bind_layout
->max_plane_count
) * sizeof(desc_data
[0]));
1138 if (bind_layout
->data
& ANV_DESCRIPTOR_IMAGE_PARAM
) {
1139 /* Storage images can only ever have one plane */
1140 assert(image_view
->n_planes
== 1);
1141 const struct brw_image_param
*image_param
=
1142 &image_view
->planes
[0].storage_image_param
;
1144 anv_descriptor_set_write_image_param(desc_map
, image_param
);
1149 anv_descriptor_set_write_buffer_view(struct anv_device
*device
,
1150 struct anv_descriptor_set
*set
,
1151 VkDescriptorType type
,
1152 struct anv_buffer_view
*buffer_view
,
1156 const struct anv_descriptor_set_binding_layout
*bind_layout
=
1157 &set
->layout
->binding
[binding
];
1158 struct anv_descriptor
*desc
=
1159 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
1161 assert(type
== bind_layout
->type
);
1163 *desc
= (struct anv_descriptor
) {
1165 .buffer_view
= buffer_view
,
1168 void *desc_map
= set
->desc_mem
.map
+ bind_layout
->descriptor_offset
+
1169 element
* anv_descriptor_size(bind_layout
);
1171 if (bind_layout
->data
& ANV_DESCRIPTOR_SAMPLED_IMAGE
) {
1172 struct anv_sampled_image_descriptor desc_data
= {
1173 .image
= anv_surface_state_to_handle(buffer_view
->surface_state
),
1175 memcpy(desc_map
, &desc_data
, sizeof(desc_data
));
1178 if (bind_layout
->data
& ANV_DESCRIPTOR_IMAGE_PARAM
) {
1179 anv_descriptor_set_write_image_param(desc_map
,
1180 &buffer_view
->storage_image_param
);
1185 anv_descriptor_set_write_buffer(struct anv_device
*device
,
1186 struct anv_descriptor_set
*set
,
1187 struct anv_state_stream
*alloc_stream
,
1188 VkDescriptorType type
,
1189 struct anv_buffer
*buffer
,
1192 VkDeviceSize offset
,
1195 const struct anv_descriptor_set_binding_layout
*bind_layout
=
1196 &set
->layout
->binding
[binding
];
1197 struct anv_descriptor
*desc
=
1198 &set
->descriptors
[bind_layout
->descriptor_index
+ element
];
1200 assert(type
== bind_layout
->type
);
1202 struct anv_address bind_addr
= anv_address_add(buffer
->address
, offset
);
1203 uint64_t bind_range
= anv_buffer_get_range(buffer
, offset
, range
);
1205 if (type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
1206 type
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
1207 *desc
= (struct anv_descriptor
) {
1214 assert(bind_layout
->data
& ANV_DESCRIPTOR_BUFFER_VIEW
);
1215 struct anv_buffer_view
*bview
=
1216 &set
->buffer_views
[bind_layout
->buffer_view_index
+ element
];
1218 bview
->format
= anv_isl_format_for_descriptor_type(type
);
1219 bview
->range
= bind_range
;
1220 bview
->address
= bind_addr
;
1222 /* If we're writing descriptors through a push command, we need to
1223 * allocate the surface state from the command buffer. Otherwise it will
1224 * be allocated by the descriptor pool when calling
1225 * vkAllocateDescriptorSets. */
1227 bview
->surface_state
= anv_state_stream_alloc(alloc_stream
, 64, 64);
1229 anv_fill_buffer_surface_state(device
, bview
->surface_state
,
1230 bview
->format
, bind_addr
, bind_range
, 1);
1232 *desc
= (struct anv_descriptor
) {
1234 .buffer_view
= bview
,
1238 void *desc_map
= set
->desc_mem
.map
+ bind_layout
->descriptor_offset
+
1239 element
* anv_descriptor_size(bind_layout
);
1241 if (bind_layout
->data
& ANV_DESCRIPTOR_ADDRESS_RANGE
) {
1242 struct anv_address_range_descriptor desc
= {
1243 .address
= anv_address_physical(bind_addr
),
1244 .range
= bind_range
,
1246 memcpy(desc_map
, &desc
, sizeof(desc
));
1251 anv_descriptor_set_write_inline_uniform_data(struct anv_device
*device
,
1252 struct anv_descriptor_set
*set
,
1258 const struct anv_descriptor_set_binding_layout
*bind_layout
=
1259 &set
->layout
->binding
[binding
];
1261 assert(bind_layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
);
1263 void *desc_map
= set
->desc_mem
.map
+ bind_layout
->descriptor_offset
;
1265 memcpy(desc_map
+ offset
, data
, size
);
1268 void anv_UpdateDescriptorSets(
1270 uint32_t descriptorWriteCount
,
1271 const VkWriteDescriptorSet
* pDescriptorWrites
,
1272 uint32_t descriptorCopyCount
,
1273 const VkCopyDescriptorSet
* pDescriptorCopies
)
1275 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1277 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
1278 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
1279 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
1281 switch (write
->descriptorType
) {
1282 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1283 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1284 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1285 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1286 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1287 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1288 anv_descriptor_set_write_image_view(device
, set
,
1289 write
->pImageInfo
+ j
,
1290 write
->descriptorType
,
1292 write
->dstArrayElement
+ j
);
1296 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1297 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1298 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1299 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
1300 write
->pTexelBufferView
[j
]);
1302 anv_descriptor_set_write_buffer_view(device
, set
,
1303 write
->descriptorType
,
1306 write
->dstArrayElement
+ j
);
1310 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1311 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1312 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1313 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1314 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
1315 assert(write
->pBufferInfo
[j
].buffer
);
1316 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
1319 anv_descriptor_set_write_buffer(device
, set
,
1321 write
->descriptorType
,
1324 write
->dstArrayElement
+ j
,
1325 write
->pBufferInfo
[j
].offset
,
1326 write
->pBufferInfo
[j
].range
);
1330 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
: {
1331 const VkWriteDescriptorSetInlineUniformBlockEXT
*inline_write
=
1332 vk_find_struct_const(write
->pNext
,
1333 WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
);
1334 assert(inline_write
->dataSize
== write
->descriptorCount
);
1335 anv_descriptor_set_write_inline_uniform_data(device
, set
,
1337 inline_write
->pData
,
1338 write
->dstArrayElement
,
1339 inline_write
->dataSize
);
1348 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
1349 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
1350 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->srcSet
);
1351 ANV_FROM_HANDLE(anv_descriptor_set
, dst
, copy
->dstSet
);
1353 const struct anv_descriptor_set_binding_layout
*src_layout
=
1354 &src
->layout
->binding
[copy
->srcBinding
];
1355 struct anv_descriptor
*src_desc
=
1356 &src
->descriptors
[src_layout
->descriptor_index
];
1357 src_desc
+= copy
->srcArrayElement
;
1359 const struct anv_descriptor_set_binding_layout
*dst_layout
=
1360 &dst
->layout
->binding
[copy
->dstBinding
];
1361 struct anv_descriptor
*dst_desc
=
1362 &dst
->descriptors
[dst_layout
->descriptor_index
];
1363 dst_desc
+= copy
->dstArrayElement
;
1365 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
1366 dst_desc
[j
] = src_desc
[j
];
1368 if (src_layout
->data
& ANV_DESCRIPTOR_INLINE_UNIFORM
) {
1369 assert(src_layout
->data
== ANV_DESCRIPTOR_INLINE_UNIFORM
);
1370 memcpy(dst
->desc_mem
.map
+ dst_layout
->descriptor_offset
+
1371 copy
->dstArrayElement
,
1372 src
->desc_mem
.map
+ src_layout
->descriptor_offset
+
1373 copy
->srcArrayElement
,
1374 copy
->descriptorCount
);
1376 unsigned desc_size
= anv_descriptor_size(src_layout
);
1377 if (desc_size
> 0) {
1378 assert(desc_size
== anv_descriptor_size(dst_layout
));
1379 memcpy(dst
->desc_mem
.map
+ dst_layout
->descriptor_offset
+
1380 copy
->dstArrayElement
* desc_size
,
1381 src
->desc_mem
.map
+ src_layout
->descriptor_offset
+
1382 copy
->srcArrayElement
* desc_size
,
1383 copy
->descriptorCount
* desc_size
);
1390 * Descriptor update templates.
1394 anv_descriptor_set_write_template(struct anv_device
*device
,
1395 struct anv_descriptor_set
*set
,
1396 struct anv_state_stream
*alloc_stream
,
1397 const struct anv_descriptor_update_template
*template,
1400 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
1401 const struct anv_descriptor_template_entry
*entry
=
1402 &template->entries
[i
];
1404 switch (entry
->type
) {
1405 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1406 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1407 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1408 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1409 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1410 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1411 const VkDescriptorImageInfo
*info
=
1412 data
+ entry
->offset
+ j
* entry
->stride
;
1413 anv_descriptor_set_write_image_view(device
, set
,
1416 entry
->array_element
+ j
);
1420 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1421 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1422 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1423 const VkBufferView
*_bview
=
1424 data
+ entry
->offset
+ j
* entry
->stride
;
1425 ANV_FROM_HANDLE(anv_buffer_view
, bview
, *_bview
);
1427 anv_descriptor_set_write_buffer_view(device
, set
,
1431 entry
->array_element
+ j
);
1435 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1436 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1437 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1438 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1439 for (uint32_t j
= 0; j
< entry
->array_count
; j
++) {
1440 const VkDescriptorBufferInfo
*info
=
1441 data
+ entry
->offset
+ j
* entry
->stride
;
1442 ANV_FROM_HANDLE(anv_buffer
, buffer
, info
->buffer
);
1444 anv_descriptor_set_write_buffer(device
, set
,
1449 entry
->array_element
+ j
,
1450 info
->offset
, info
->range
);
1454 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
:
1455 anv_descriptor_set_write_inline_uniform_data(device
, set
,
1457 data
+ entry
->offset
,
1458 entry
->array_element
,
1459 entry
->array_count
);
1468 VkResult
anv_CreateDescriptorUpdateTemplate(
1470 const VkDescriptorUpdateTemplateCreateInfo
* pCreateInfo
,
1471 const VkAllocationCallbacks
* pAllocator
,
1472 VkDescriptorUpdateTemplate
* pDescriptorUpdateTemplate
)
1474 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1475 struct anv_descriptor_update_template
*template;
1477 size_t size
= sizeof(*template) +
1478 pCreateInfo
->descriptorUpdateEntryCount
* sizeof(template->entries
[0]);
1479 template = vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
1480 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
1481 if (template == NULL
)
1482 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1484 template->bind_point
= pCreateInfo
->pipelineBindPoint
;
1486 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
)
1487 template->set
= pCreateInfo
->set
;
1489 template->entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
1490 for (uint32_t i
= 0; i
< template->entry_count
; i
++) {
1491 const VkDescriptorUpdateTemplateEntry
*pEntry
=
1492 &pCreateInfo
->pDescriptorUpdateEntries
[i
];
1494 template->entries
[i
] = (struct anv_descriptor_template_entry
) {
1495 .type
= pEntry
->descriptorType
,
1496 .binding
= pEntry
->dstBinding
,
1497 .array_element
= pEntry
->dstArrayElement
,
1498 .array_count
= pEntry
->descriptorCount
,
1499 .offset
= pEntry
->offset
,
1500 .stride
= pEntry
->stride
,
1504 *pDescriptorUpdateTemplate
=
1505 anv_descriptor_update_template_to_handle(template);
1510 void anv_DestroyDescriptorUpdateTemplate(
1512 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1513 const VkAllocationCallbacks
* pAllocator
)
1515 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1516 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1517 descriptorUpdateTemplate
);
1519 vk_free2(&device
->alloc
, pAllocator
, template);
1522 void anv_UpdateDescriptorSetWithTemplate(
1524 VkDescriptorSet descriptorSet
,
1525 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
1528 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1529 ANV_FROM_HANDLE(anv_descriptor_set
, set
, descriptorSet
);
1530 ANV_FROM_HANDLE(anv_descriptor_update_template
, template,
1531 descriptorUpdateTemplate
);
1533 anv_descriptor_set_write_template(device
, set
, NULL
, template, pData
);