2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
24 #include "tu_private.h"
32 #include "util/mesa-sha1.h"
36 binding_compare(const void *av
, const void *bv
)
38 const VkDescriptorSetLayoutBinding
*a
=
39 (const VkDescriptorSetLayoutBinding
*) av
;
40 const VkDescriptorSetLayoutBinding
*b
=
41 (const VkDescriptorSetLayoutBinding
*) bv
;
43 return (a
->binding
< b
->binding
) ? -1 : (a
->binding
> b
->binding
) ? 1 : 0;
46 static VkDescriptorSetLayoutBinding
*
47 create_sorted_bindings(const VkDescriptorSetLayoutBinding
*bindings
,
50 VkDescriptorSetLayoutBinding
*sorted_bindings
=
51 malloc(count
* sizeof(VkDescriptorSetLayoutBinding
));
55 memcpy(sorted_bindings
, bindings
,
56 count
* sizeof(VkDescriptorSetLayoutBinding
));
58 qsort(sorted_bindings
, count
, sizeof(VkDescriptorSetLayoutBinding
),
61 return sorted_bindings
;
65 descriptor_size(enum VkDescriptorType type
)
68 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
71 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
72 /* texture const + tu_sampler struct (includes border color) */
73 return A6XX_TEX_CONST_DWORDS
*4 + sizeof(struct tu_sampler
);
75 unreachable("unknown descriptor type\n");
81 tu_CreateDescriptorSetLayout(
83 const VkDescriptorSetLayoutCreateInfo
*pCreateInfo
,
84 const VkAllocationCallbacks
*pAllocator
,
85 VkDescriptorSetLayout
*pSetLayout
)
87 TU_FROM_HANDLE(tu_device
, device
, _device
);
88 struct tu_descriptor_set_layout
*set_layout
;
90 assert(pCreateInfo
->sType
==
91 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
92 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT
*variable_flags
=
95 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
);
97 uint32_t max_binding
= 0;
98 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++)
99 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
102 sizeof(struct tu_descriptor_set_layout
) +
103 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]);
105 set_layout
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
106 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
108 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
110 set_layout
->flags
= pCreateInfo
->flags
;
112 /* We just allocate all the samplers at the end of the struct */
113 uint32_t *samplers
= (uint32_t *) &set_layout
->binding
[max_binding
+ 1];
114 (void) samplers
; /* TODO: Use me */
116 VkDescriptorSetLayoutBinding
*bindings
= create_sorted_bindings(
117 pCreateInfo
->pBindings
, pCreateInfo
->bindingCount
);
119 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
120 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
123 set_layout
->binding_count
= max_binding
+ 1;
124 set_layout
->shader_stages
= 0;
125 set_layout
->dynamic_shader_stages
= 0;
126 set_layout
->has_immutable_samplers
= false;
127 set_layout
->size
= 0;
129 memset(set_layout
->binding
, 0,
130 size
- sizeof(struct tu_descriptor_set_layout
));
132 uint32_t buffer_count
= 0;
133 uint32_t dynamic_offset_count
= 0;
135 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
136 const VkDescriptorSetLayoutBinding
*binding
= bindings
+ j
;
137 uint32_t b
= binding
->binding
;
138 uint32_t alignment
= 8;
139 unsigned binding_buffer_count
= 1;
141 set_layout
->size
= align(set_layout
->size
, alignment
);
142 set_layout
->binding
[b
].type
= binding
->descriptorType
;
143 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
144 set_layout
->binding
[b
].offset
= set_layout
->size
;
145 set_layout
->binding
[b
].buffer_offset
= buffer_count
;
146 set_layout
->binding
[b
].dynamic_offset_offset
= dynamic_offset_count
;
147 set_layout
->binding
[b
].size
= descriptor_size(binding
->descriptorType
);
149 if (variable_flags
&& binding
->binding
< variable_flags
->bindingCount
&&
150 (variable_flags
->pBindingFlags
[binding
->binding
] &
151 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
)) {
152 assert(!binding
->pImmutableSamplers
); /* Terribly ill defined how
153 many samplers are valid */
154 assert(binding
->binding
== max_binding
);
156 set_layout
->has_variable_descriptors
= true;
160 binding
->descriptorCount
* set_layout
->binding
[b
].size
;
161 buffer_count
+= binding
->descriptorCount
* binding_buffer_count
;
162 dynamic_offset_count
+= binding
->descriptorCount
*
163 set_layout
->binding
[b
].dynamic_offset_count
;
164 set_layout
->shader_stages
|= binding
->stageFlags
;
169 set_layout
->buffer_count
= buffer_count
;
170 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
172 *pSetLayout
= tu_descriptor_set_layout_to_handle(set_layout
);
178 tu_DestroyDescriptorSetLayout(VkDevice _device
,
179 VkDescriptorSetLayout _set_layout
,
180 const VkAllocationCallbacks
*pAllocator
)
182 TU_FROM_HANDLE(tu_device
, device
, _device
);
183 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
, _set_layout
);
188 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
192 tu_GetDescriptorSetLayoutSupport(
194 const VkDescriptorSetLayoutCreateInfo
*pCreateInfo
,
195 VkDescriptorSetLayoutSupport
*pSupport
)
197 VkDescriptorSetLayoutBinding
*bindings
= create_sorted_bindings(
198 pCreateInfo
->pBindings
, pCreateInfo
->bindingCount
);
200 pSupport
->supported
= false;
204 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT
*variable_flags
=
205 vk_find_struct_const(
207 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
);
208 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT
*variable_count
=
210 (void *) pCreateInfo
->pNext
,
211 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT
);
212 if (variable_count
) {
213 variable_count
->maxVariableDescriptorCount
= 0;
216 bool supported
= true;
218 for (uint32_t i
= 0; i
< pCreateInfo
->bindingCount
; i
++) {
219 const VkDescriptorSetLayoutBinding
*binding
= bindings
+ i
;
221 uint64_t descriptor_sz
= descriptor_size(binding
->descriptorType
);
222 uint64_t descriptor_alignment
= 8;
224 if (size
&& !align_u64(size
, descriptor_alignment
)) {
227 size
= align_u64(size
, descriptor_alignment
);
229 uint64_t max_count
= UINT64_MAX
;
231 max_count
= (UINT64_MAX
- size
) / descriptor_sz
;
233 if (max_count
< binding
->descriptorCount
) {
236 if (variable_flags
&& binding
->binding
< variable_flags
->bindingCount
&&
238 (variable_flags
->pBindingFlags
[binding
->binding
] &
239 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
)) {
240 variable_count
->maxVariableDescriptorCount
=
241 MIN2(UINT32_MAX
, max_count
);
243 size
+= binding
->descriptorCount
* descriptor_sz
;
248 pSupport
->supported
= supported
;
252 * Pipeline layouts. These have nothing to do with the pipeline. They are
253 * just multiple descriptor set layouts pasted together.
257 tu_CreatePipelineLayout(VkDevice _device
,
258 const VkPipelineLayoutCreateInfo
*pCreateInfo
,
259 const VkAllocationCallbacks
*pAllocator
,
260 VkPipelineLayout
*pPipelineLayout
)
262 TU_FROM_HANDLE(tu_device
, device
, _device
);
263 struct tu_pipeline_layout
*layout
;
264 struct mesa_sha1 ctx
;
266 assert(pCreateInfo
->sType
==
267 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
269 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
270 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
272 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
274 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
276 unsigned dynamic_offset_count
= 0;
278 _mesa_sha1_init(&ctx
);
279 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
280 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
,
281 pCreateInfo
->pSetLayouts
[set
]);
282 layout
->set
[set
].layout
= set_layout
;
284 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
285 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
286 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
*
287 set_layout
->binding
[b
].dynamic_offset_count
;
288 if (set_layout
->binding
[b
].immutable_samplers_offset
)
291 tu_immutable_samplers(set_layout
, set_layout
->binding
+ b
),
292 set_layout
->binding
[b
].array_size
* 4 * sizeof(uint32_t));
295 &ctx
, set_layout
->binding
,
296 sizeof(set_layout
->binding
[0]) * set_layout
->binding_count
);
299 layout
->dynamic_offset_count
= dynamic_offset_count
;
300 layout
->push_constant_size
= 0;
302 for (unsigned i
= 0; i
< pCreateInfo
->pushConstantRangeCount
; ++i
) {
303 const VkPushConstantRange
*range
= pCreateInfo
->pPushConstantRanges
+ i
;
304 layout
->push_constant_size
=
305 MAX2(layout
->push_constant_size
, range
->offset
+ range
->size
);
308 layout
->push_constant_size
= align(layout
->push_constant_size
, 16);
309 _mesa_sha1_update(&ctx
, &layout
->push_constant_size
,
310 sizeof(layout
->push_constant_size
));
311 _mesa_sha1_final(&ctx
, layout
->sha1
);
312 *pPipelineLayout
= tu_pipeline_layout_to_handle(layout
);
318 tu_DestroyPipelineLayout(VkDevice _device
,
319 VkPipelineLayout _pipelineLayout
,
320 const VkAllocationCallbacks
*pAllocator
)
322 TU_FROM_HANDLE(tu_device
, device
, _device
);
323 TU_FROM_HANDLE(tu_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
325 if (!pipeline_layout
)
327 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
333 tu_descriptor_set_create(struct tu_device
*device
,
334 struct tu_descriptor_pool
*pool
,
335 const struct tu_descriptor_set_layout
*layout
,
336 const uint32_t *variable_count
,
337 struct tu_descriptor_set
**out_set
)
339 struct tu_descriptor_set
*set
;
340 uint32_t buffer_count
= layout
->buffer_count
;
341 if (variable_count
) {
343 if (layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_SAMPLER
||
344 layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
)
346 buffer_count
= layout
->binding
[layout
->binding_count
- 1].buffer_offset
+
347 *variable_count
* stride
;
349 unsigned range_offset
= sizeof(struct tu_descriptor_set
) +
350 sizeof(struct tu_bo
*) * buffer_count
;
351 unsigned mem_size
= range_offset
+
352 sizeof(struct tu_descriptor_range
) * layout
->dynamic_offset_count
;
354 if (pool
->host_memory_base
) {
355 if (pool
->host_memory_end
- pool
->host_memory_ptr
< mem_size
)
356 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
358 set
= (struct tu_descriptor_set
*)pool
->host_memory_ptr
;
359 pool
->host_memory_ptr
+= mem_size
;
361 set
= vk_alloc2(&device
->alloc
, NULL
, mem_size
, 8,
362 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
365 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
368 memset(set
, 0, mem_size
);
370 if (layout
->dynamic_offset_count
) {
371 set
->dynamic_descriptors
= (struct tu_descriptor_range
*)((uint8_t*)set
+ range_offset
);
374 set
->layout
= layout
;
375 uint32_t layout_size
= layout
->size
;
376 if (variable_count
) {
377 assert(layout
->has_variable_descriptors
);
378 uint32_t stride
= layout
->binding
[layout
->binding_count
- 1].size
;
379 if (layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
)
382 layout_size
= layout
->binding
[layout
->binding_count
- 1].offset
+
383 *variable_count
* stride
;
387 set
->size
= layout_size
;
389 if (!pool
->host_memory_base
&& pool
->entry_count
== pool
->max_entry_count
) {
390 vk_free2(&device
->alloc
, NULL
, set
);
391 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
394 /* try to allocate linearly first, so that we don't spend
395 * time looking for gaps if the app only allocates &
396 * resets via the pool. */
397 if (pool
->current_offset
+ layout_size
<= pool
->size
) {
398 set
->mapped_ptr
= (uint32_t*)(pool
->bo
.map
+ pool
->current_offset
);
399 set
->va
= pool
->bo
.iova
+ pool
->current_offset
;
400 if (!pool
->host_memory_base
) {
401 pool
->entries
[pool
->entry_count
].offset
= pool
->current_offset
;
402 pool
->entries
[pool
->entry_count
].size
= layout_size
;
403 pool
->entries
[pool
->entry_count
].set
= set
;
406 pool
->current_offset
+= layout_size
;
407 } else if (!pool
->host_memory_base
) {
411 for (index
= 0; index
< pool
->entry_count
; ++index
) {
412 if (pool
->entries
[index
].offset
- offset
>= layout_size
)
414 offset
= pool
->entries
[index
].offset
+ pool
->entries
[index
].size
;
417 if (pool
->size
- offset
< layout_size
) {
418 vk_free2(&device
->alloc
, NULL
, set
);
419 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
422 set
->mapped_ptr
= (uint32_t*)(pool
->bo
.map
+ offset
);
423 set
->va
= pool
->bo
.iova
+ offset
;
424 memmove(&pool
->entries
[index
+ 1], &pool
->entries
[index
],
425 sizeof(pool
->entries
[0]) * (pool
->entry_count
- index
));
426 pool
->entries
[index
].offset
= offset
;
427 pool
->entries
[index
].size
= layout_size
;
428 pool
->entries
[index
].set
= set
;
431 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
439 tu_descriptor_set_destroy(struct tu_device
*device
,
440 struct tu_descriptor_pool
*pool
,
441 struct tu_descriptor_set
*set
,
444 assert(!pool
->host_memory_base
);
446 if (free_bo
&& set
->size
&& !pool
->host_memory_base
) {
447 uint32_t offset
= (uint8_t*)set
->mapped_ptr
- (uint8_t*)pool
->bo
.map
;
448 for (int i
= 0; i
< pool
->entry_count
; ++i
) {
449 if (pool
->entries
[i
].offset
== offset
) {
450 memmove(&pool
->entries
[i
], &pool
->entries
[i
+1],
451 sizeof(pool
->entries
[i
]) * (pool
->entry_count
- i
- 1));
457 vk_free2(&device
->alloc
, NULL
, set
);
461 tu_CreateDescriptorPool(VkDevice _device
,
462 const VkDescriptorPoolCreateInfo
*pCreateInfo
,
463 const VkAllocationCallbacks
*pAllocator
,
464 VkDescriptorPool
*pDescriptorPool
)
466 TU_FROM_HANDLE(tu_device
, device
, _device
);
467 struct tu_descriptor_pool
*pool
;
468 uint64_t size
= sizeof(struct tu_descriptor_pool
);
469 uint64_t bo_size
= 0, bo_count
= 0, range_count
= 0;
471 for (unsigned i
= 0; i
< pCreateInfo
->poolSizeCount
; ++i
) {
472 if (pCreateInfo
->pPoolSizes
[i
].type
!= VK_DESCRIPTOR_TYPE_SAMPLER
)
473 bo_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
475 bo_size
+= descriptor_size(pCreateInfo
->pPoolSizes
[i
].type
) *
476 pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
479 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
480 uint64_t host_size
= pCreateInfo
->maxSets
* sizeof(struct tu_descriptor_set
);
481 host_size
+= sizeof(struct tu_bo
*) * bo_count
;
482 host_size
+= sizeof(struct tu_descriptor_range
) * range_count
;
485 size
+= sizeof(struct tu_descriptor_pool_entry
) * pCreateInfo
->maxSets
;
488 pool
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
489 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
491 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
493 memset(pool
, 0, sizeof(*pool
));
495 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
496 pool
->host_memory_base
= (uint8_t*)pool
+ sizeof(struct tu_descriptor_pool
);
497 pool
->host_memory_ptr
= pool
->host_memory_base
;
498 pool
->host_memory_end
= (uint8_t*)pool
+ size
;
504 ret
= tu_bo_init_new(device
, &pool
->bo
, bo_size
);
505 assert(ret
== VK_SUCCESS
);
507 ret
= tu_bo_map(device
, &pool
->bo
);
508 assert(ret
== VK_SUCCESS
);
510 pool
->size
= bo_size
;
511 pool
->max_entry_count
= pCreateInfo
->maxSets
;
513 *pDescriptorPool
= tu_descriptor_pool_to_handle(pool
);
518 tu_DestroyDescriptorPool(VkDevice _device
,
519 VkDescriptorPool _pool
,
520 const VkAllocationCallbacks
*pAllocator
)
522 TU_FROM_HANDLE(tu_device
, device
, _device
);
523 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, _pool
);
528 if (!pool
->host_memory_base
) {
529 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
530 tu_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
535 tu_bo_finish(device
, &pool
->bo
);
536 vk_free2(&device
->alloc
, pAllocator
, pool
);
540 tu_ResetDescriptorPool(VkDevice _device
,
541 VkDescriptorPool descriptorPool
,
542 VkDescriptorPoolResetFlags flags
)
544 TU_FROM_HANDLE(tu_device
, device
, _device
);
545 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, descriptorPool
);
547 if (!pool
->host_memory_base
) {
548 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
549 tu_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
551 pool
->entry_count
= 0;
554 pool
->current_offset
= 0;
555 pool
->host_memory_ptr
= pool
->host_memory_base
;
561 tu_AllocateDescriptorSets(VkDevice _device
,
562 const VkDescriptorSetAllocateInfo
*pAllocateInfo
,
563 VkDescriptorSet
*pDescriptorSets
)
565 TU_FROM_HANDLE(tu_device
, device
, _device
);
566 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
568 VkResult result
= VK_SUCCESS
;
570 struct tu_descriptor_set
*set
= NULL
;
572 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT
*variable_counts
=
573 vk_find_struct_const(pAllocateInfo
->pNext
, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT
);
574 const uint32_t zero
= 0;
576 /* allocate a set of buffers for each shader to contain descriptors */
577 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
578 TU_FROM_HANDLE(tu_descriptor_set_layout
, layout
,
579 pAllocateInfo
->pSetLayouts
[i
]);
581 const uint32_t *variable_count
= NULL
;
582 if (variable_counts
) {
583 if (i
< variable_counts
->descriptorSetCount
)
584 variable_count
= variable_counts
->pDescriptorCounts
+ i
;
586 variable_count
= &zero
;
589 assert(!(layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
591 result
= tu_descriptor_set_create(device
, pool
, layout
, variable_count
, &set
);
592 if (result
!= VK_SUCCESS
)
595 pDescriptorSets
[i
] = tu_descriptor_set_to_handle(set
);
598 if (result
!= VK_SUCCESS
) {
599 tu_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
601 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
602 pDescriptorSets
[i
] = VK_NULL_HANDLE
;
609 tu_FreeDescriptorSets(VkDevice _device
,
610 VkDescriptorPool descriptorPool
,
612 const VkDescriptorSet
*pDescriptorSets
)
614 TU_FROM_HANDLE(tu_device
, device
, _device
);
615 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, descriptorPool
);
617 for (uint32_t i
= 0; i
< count
; i
++) {
618 TU_FROM_HANDLE(tu_descriptor_set
, set
, pDescriptorSets
[i
]);
620 if (set
&& !pool
->host_memory_base
)
621 tu_descriptor_set_destroy(device
, pool
, set
, true);
626 static void write_buffer_descriptor(struct tu_device
*device
,
627 struct tu_cmd_buffer
*cmd_buffer
,
629 struct tu_bo
**buffer_list
,
630 const VkDescriptorBufferInfo
*buffer_info
)
632 TU_FROM_HANDLE(tu_buffer
, buffer
, buffer_info
->buffer
);
633 uint64_t va
= buffer
->bo
->iova
;
635 va
+= buffer_info
->offset
+ buffer
->bo_offset
;
640 tu_bo_list_add(&cmd_buffer
->bo_list
, buffer
->bo
, MSM_SUBMIT_BO_READ
);
642 *buffer_list
= buffer
->bo
;
646 write_image_descriptor(struct tu_device
*device
,
647 struct tu_cmd_buffer
*cmd_buffer
,
648 unsigned size
, unsigned *dst
,
649 struct tu_bo
**buffer_list
,
650 VkDescriptorType descriptor_type
,
651 const VkDescriptorImageInfo
*image_info
)
653 TU_FROM_HANDLE(tu_image_view
, iview
, image_info
->imageView
);
654 uint32_t *descriptor
;
656 if (descriptor_type
== VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
) {
657 descriptor
= iview
->storage_descriptor
;
659 descriptor
= iview
->descriptor
;
662 memcpy(dst
, descriptor
, sizeof(iview
->descriptor
));
665 tu_bo_list_add(&cmd_buffer
->bo_list
, iview
->image
->bo
, MSM_SUBMIT_BO_READ
);
667 *buffer_list
= iview
->image
->bo
;
671 write_combined_image_sampler_descriptor(struct tu_device
*device
,
672 struct tu_cmd_buffer
*cmd_buffer
,
673 unsigned sampler_offset
,
675 struct tu_bo
**buffer_list
,
676 VkDescriptorType descriptor_type
,
677 const VkDescriptorImageInfo
*image_info
,
680 TU_FROM_HANDLE(tu_sampler
, sampler
, image_info
->sampler
);
682 write_image_descriptor(device
, cmd_buffer
, sampler_offset
, dst
, buffer_list
,
683 descriptor_type
, image_info
);
684 /* copy over sampler state */
686 memcpy(dst
+ sampler_offset
/ sizeof(*dst
), sampler
, sizeof(*sampler
));
691 tu_update_descriptor_sets(struct tu_device
*device
,
692 struct tu_cmd_buffer
*cmd_buffer
,
693 VkDescriptorSet dstSetOverride
,
694 uint32_t descriptorWriteCount
,
695 const VkWriteDescriptorSet
*pDescriptorWrites
,
696 uint32_t descriptorCopyCount
,
697 const VkCopyDescriptorSet
*pDescriptorCopies
)
700 for (i
= 0; i
< descriptorWriteCount
; i
++) {
701 const VkWriteDescriptorSet
*writeset
= &pDescriptorWrites
[i
];
702 TU_FROM_HANDLE(tu_descriptor_set
, set
,
703 dstSetOverride
? dstSetOverride
: writeset
->dstSet
);
704 const struct tu_descriptor_set_binding_layout
*binding_layout
=
705 set
->layout
->binding
+ writeset
->dstBinding
;
706 uint32_t *ptr
= set
->mapped_ptr
;
707 struct tu_bo
**buffer_list
= set
->descriptors
;
709 ptr
+= binding_layout
->offset
/ 4;
711 ptr
+= binding_layout
->size
* writeset
->dstArrayElement
/ 4;
712 buffer_list
+= binding_layout
->buffer_offset
;
713 buffer_list
+= writeset
->dstArrayElement
;
714 for (j
= 0; j
< writeset
->descriptorCount
; ++j
) {
715 switch(writeset
->descriptorType
) {
716 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
717 write_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
718 writeset
->pBufferInfo
+ j
);
720 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
721 write_combined_image_sampler_descriptor(device
, cmd_buffer
,
722 A6XX_TEX_CONST_DWORDS
*4,
724 writeset
->descriptorType
,
725 writeset
->pImageInfo
+ j
,
726 !binding_layout
->immutable_samplers_offset
);
729 unreachable("unimplemented descriptor type");
732 ptr
+= binding_layout
->size
/ 4;
737 for (i
= 0; i
< descriptorCopyCount
; i
++) {
738 const VkCopyDescriptorSet
*copyset
= &pDescriptorCopies
[i
];
739 TU_FROM_HANDLE(tu_descriptor_set
, src_set
,
741 TU_FROM_HANDLE(tu_descriptor_set
, dst_set
,
743 const struct tu_descriptor_set_binding_layout
*src_binding_layout
=
744 src_set
->layout
->binding
+ copyset
->srcBinding
;
745 const struct tu_descriptor_set_binding_layout
*dst_binding_layout
=
746 dst_set
->layout
->binding
+ copyset
->dstBinding
;
747 uint32_t *src_ptr
= src_set
->mapped_ptr
;
748 uint32_t *dst_ptr
= dst_set
->mapped_ptr
;
749 struct tu_bo
**src_buffer_list
= src_set
->descriptors
;
750 struct tu_bo
**dst_buffer_list
= dst_set
->descriptors
;
752 src_ptr
+= src_binding_layout
->offset
/ 4;
753 dst_ptr
+= dst_binding_layout
->offset
/ 4;
755 src_ptr
+= src_binding_layout
->size
* copyset
->srcArrayElement
/ 4;
756 dst_ptr
+= dst_binding_layout
->size
* copyset
->dstArrayElement
/ 4;
758 src_buffer_list
+= src_binding_layout
->buffer_offset
;
759 src_buffer_list
+= copyset
->srcArrayElement
;
761 dst_buffer_list
+= dst_binding_layout
->buffer_offset
;
762 dst_buffer_list
+= copyset
->dstArrayElement
;
764 for (j
= 0; j
< copyset
->descriptorCount
; ++j
) {
765 switch (src_binding_layout
->type
) {
766 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
767 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
768 unsigned src_idx
= copyset
->srcArrayElement
+ j
;
769 unsigned dst_idx
= copyset
->dstArrayElement
+ j
;
770 struct tu_descriptor_range
*src_range
, *dst_range
;
771 src_idx
+= src_binding_layout
->dynamic_offset_offset
;
772 dst_idx
+= dst_binding_layout
->dynamic_offset_offset
;
774 src_range
= src_set
->dynamic_descriptors
+ src_idx
;
775 dst_range
= dst_set
->dynamic_descriptors
+ dst_idx
;
776 *dst_range
= *src_range
;
780 memcpy(dst_ptr
, src_ptr
, src_binding_layout
->size
);
782 src_ptr
+= src_binding_layout
->size
/ 4;
783 dst_ptr
+= dst_binding_layout
->size
/ 4;
785 if (src_binding_layout
->type
!= VK_DESCRIPTOR_TYPE_SAMPLER
) {
786 /* Sampler descriptors don't have a buffer list. */
787 dst_buffer_list
[j
] = src_buffer_list
[j
];
794 tu_UpdateDescriptorSets(VkDevice _device
,
795 uint32_t descriptorWriteCount
,
796 const VkWriteDescriptorSet
*pDescriptorWrites
,
797 uint32_t descriptorCopyCount
,
798 const VkCopyDescriptorSet
*pDescriptorCopies
)
800 TU_FROM_HANDLE(tu_device
, device
, _device
);
802 tu_update_descriptor_sets(device
, NULL
, VK_NULL_HANDLE
,
803 descriptorWriteCount
, pDescriptorWrites
,
804 descriptorCopyCount
, pDescriptorCopies
);
808 tu_CreateDescriptorUpdateTemplate(
810 const VkDescriptorUpdateTemplateCreateInfo
*pCreateInfo
,
811 const VkAllocationCallbacks
*pAllocator
,
812 VkDescriptorUpdateTemplate
*pDescriptorUpdateTemplate
)
814 TU_FROM_HANDLE(tu_device
, device
, _device
);
815 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
,
816 pCreateInfo
->descriptorSetLayout
);
817 const uint32_t entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
819 sizeof(struct tu_descriptor_update_template
) +
820 sizeof(struct tu_descriptor_update_template_entry
) * entry_count
;
821 struct tu_descriptor_update_template
*templ
;
823 templ
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
824 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
826 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
828 *pDescriptorUpdateTemplate
=
829 tu_descriptor_update_template_to_handle(templ
);
831 tu_use_args(set_layout
);
837 tu_DestroyDescriptorUpdateTemplate(
839 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
840 const VkAllocationCallbacks
*pAllocator
)
842 TU_FROM_HANDLE(tu_device
, device
, _device
);
843 TU_FROM_HANDLE(tu_descriptor_update_template
, templ
,
844 descriptorUpdateTemplate
);
849 vk_free2(&device
->alloc
, pAllocator
, templ
);
853 tu_update_descriptor_set_with_template(
854 struct tu_device
*device
,
855 struct tu_cmd_buffer
*cmd_buffer
,
856 struct tu_descriptor_set
*set
,
857 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
860 TU_FROM_HANDLE(tu_descriptor_update_template
, templ
,
861 descriptorUpdateTemplate
);
866 tu_UpdateDescriptorSetWithTemplate(
868 VkDescriptorSet descriptorSet
,
869 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
872 TU_FROM_HANDLE(tu_device
, device
, _device
);
873 TU_FROM_HANDLE(tu_descriptor_set
, set
, descriptorSet
);
875 tu_update_descriptor_set_with_template(device
, NULL
, set
,
876 descriptorUpdateTemplate
, pData
);
880 tu_CreateSamplerYcbcrConversion(
882 const VkSamplerYcbcrConversionCreateInfo
*pCreateInfo
,
883 const VkAllocationCallbacks
*pAllocator
,
884 VkSamplerYcbcrConversion
*pYcbcrConversion
)
886 *pYcbcrConversion
= VK_NULL_HANDLE
;
891 tu_DestroySamplerYcbcrConversion(VkDevice device
,
892 VkSamplerYcbcrConversion ycbcrConversion
,
893 const VkAllocationCallbacks
*pAllocator
)