2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
24 #include "tu_private.h"
32 #include "util/mesa-sha1.h"
36 binding_compare(const void *av
, const void *bv
)
38 const VkDescriptorSetLayoutBinding
*a
=
39 (const VkDescriptorSetLayoutBinding
*) av
;
40 const VkDescriptorSetLayoutBinding
*b
=
41 (const VkDescriptorSetLayoutBinding
*) bv
;
43 return (a
->binding
< b
->binding
) ? -1 : (a
->binding
> b
->binding
) ? 1 : 0;
46 static VkDescriptorSetLayoutBinding
*
47 create_sorted_bindings(const VkDescriptorSetLayoutBinding
*bindings
,
50 VkDescriptorSetLayoutBinding
*sorted_bindings
=
51 malloc(count
* sizeof(VkDescriptorSetLayoutBinding
));
55 memcpy(sorted_bindings
, bindings
,
56 count
* sizeof(VkDescriptorSetLayoutBinding
));
58 qsort(sorted_bindings
, count
, sizeof(VkDescriptorSetLayoutBinding
),
61 return sorted_bindings
;
65 descriptor_size(enum VkDescriptorType type
)
68 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
69 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
71 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
72 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
73 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
74 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
77 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
78 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
79 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
80 return A6XX_TEX_CONST_DWORDS
*4;
81 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
82 /* texture const + tu_sampler struct (includes border color) */
83 return A6XX_TEX_CONST_DWORDS
*4 + sizeof(struct tu_sampler
);
84 case VK_DESCRIPTOR_TYPE_SAMPLER
:
85 return sizeof(struct tu_sampler
);
87 unreachable("unknown descriptor type\n");
93 tu_CreateDescriptorSetLayout(
95 const VkDescriptorSetLayoutCreateInfo
*pCreateInfo
,
96 const VkAllocationCallbacks
*pAllocator
,
97 VkDescriptorSetLayout
*pSetLayout
)
99 TU_FROM_HANDLE(tu_device
, device
, _device
);
100 struct tu_descriptor_set_layout
*set_layout
;
102 assert(pCreateInfo
->sType
==
103 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
104 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT
*variable_flags
=
105 vk_find_struct_const(
107 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
);
109 uint32_t max_binding
= 0;
110 uint32_t immutable_sampler_count
= 0;
111 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
112 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
113 if ((pCreateInfo
->pBindings
[j
].descriptorType
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
||
114 pCreateInfo
->pBindings
[j
].descriptorType
== VK_DESCRIPTOR_TYPE_SAMPLER
) &&
115 pCreateInfo
->pBindings
[j
].pImmutableSamplers
) {
116 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
120 uint32_t samplers_offset
= sizeof(struct tu_descriptor_set_layout
) +
121 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]);
122 uint32_t size
= samplers_offset
+ immutable_sampler_count
* sizeof(struct tu_sampler
);
124 set_layout
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
125 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
127 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
129 set_layout
->flags
= pCreateInfo
->flags
;
131 /* We just allocate all the samplers at the end of the struct */
132 struct tu_sampler
*samplers
= (void*) &set_layout
->binding
[max_binding
+ 1];
134 VkDescriptorSetLayoutBinding
*bindings
= create_sorted_bindings(
135 pCreateInfo
->pBindings
, pCreateInfo
->bindingCount
);
137 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
138 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
141 set_layout
->binding_count
= max_binding
+ 1;
142 set_layout
->shader_stages
= 0;
143 set_layout
->dynamic_shader_stages
= 0;
144 set_layout
->has_immutable_samplers
= false;
145 set_layout
->size
= 0;
147 memset(set_layout
->binding
, 0,
148 size
- sizeof(struct tu_descriptor_set_layout
));
150 uint32_t buffer_count
= 0;
151 uint32_t dynamic_offset_count
= 0;
153 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
154 const VkDescriptorSetLayoutBinding
*binding
= bindings
+ j
;
155 uint32_t b
= binding
->binding
;
156 uint32_t alignment
= 4;
157 unsigned binding_buffer_count
= 1;
159 switch (binding
->descriptorType
) {
160 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
161 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
162 assert(!(pCreateInfo
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
163 set_layout
->binding
[b
].dynamic_offset_count
= 1;
169 set_layout
->size
= align(set_layout
->size
, alignment
);
170 set_layout
->binding
[b
].type
= binding
->descriptorType
;
171 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
172 set_layout
->binding
[b
].offset
= set_layout
->size
;
173 set_layout
->binding
[b
].buffer_offset
= buffer_count
;
174 set_layout
->binding
[b
].dynamic_offset_offset
= dynamic_offset_count
;
175 set_layout
->binding
[b
].size
= descriptor_size(binding
->descriptorType
);
177 if (variable_flags
&& binding
->binding
< variable_flags
->bindingCount
&&
178 (variable_flags
->pBindingFlags
[binding
->binding
] &
179 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
)) {
180 assert(!binding
->pImmutableSamplers
); /* Terribly ill defined how
181 many samplers are valid */
182 assert(binding
->binding
== max_binding
);
184 set_layout
->has_variable_descriptors
= true;
187 if ((binding
->descriptorType
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
||
188 binding
->descriptorType
== VK_DESCRIPTOR_TYPE_SAMPLER
) &&
189 binding
->pImmutableSamplers
) {
190 set_layout
->binding
[b
].immutable_samplers_offset
= samplers_offset
;
191 set_layout
->has_immutable_samplers
= true;
193 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
194 samplers
[i
] = *tu_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
196 samplers
+= binding
->descriptorCount
;
197 samplers_offset
+= sizeof(struct tu_sampler
) * binding
->descriptorCount
;
201 binding
->descriptorCount
* set_layout
->binding
[b
].size
;
202 buffer_count
+= binding
->descriptorCount
* binding_buffer_count
;
203 dynamic_offset_count
+= binding
->descriptorCount
*
204 set_layout
->binding
[b
].dynamic_offset_count
;
205 set_layout
->shader_stages
|= binding
->stageFlags
;
210 set_layout
->buffer_count
= buffer_count
;
211 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
213 *pSetLayout
= tu_descriptor_set_layout_to_handle(set_layout
);
219 tu_DestroyDescriptorSetLayout(VkDevice _device
,
220 VkDescriptorSetLayout _set_layout
,
221 const VkAllocationCallbacks
*pAllocator
)
223 TU_FROM_HANDLE(tu_device
, device
, _device
);
224 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
, _set_layout
);
229 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
233 tu_GetDescriptorSetLayoutSupport(
235 const VkDescriptorSetLayoutCreateInfo
*pCreateInfo
,
236 VkDescriptorSetLayoutSupport
*pSupport
)
238 VkDescriptorSetLayoutBinding
*bindings
= create_sorted_bindings(
239 pCreateInfo
->pBindings
, pCreateInfo
->bindingCount
);
241 pSupport
->supported
= false;
245 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT
*variable_flags
=
246 vk_find_struct_const(
248 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
);
249 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT
*variable_count
=
251 (void *) pCreateInfo
->pNext
,
252 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT
);
253 if (variable_count
) {
254 variable_count
->maxVariableDescriptorCount
= 0;
257 bool supported
= true;
259 for (uint32_t i
= 0; i
< pCreateInfo
->bindingCount
; i
++) {
260 const VkDescriptorSetLayoutBinding
*binding
= bindings
+ i
;
262 uint64_t descriptor_sz
= descriptor_size(binding
->descriptorType
);
263 uint64_t descriptor_alignment
= 8;
265 if (size
&& !align_u64(size
, descriptor_alignment
)) {
268 size
= align_u64(size
, descriptor_alignment
);
270 uint64_t max_count
= UINT64_MAX
;
272 max_count
= (UINT64_MAX
- size
) / descriptor_sz
;
274 if (max_count
< binding
->descriptorCount
) {
277 if (variable_flags
&& binding
->binding
< variable_flags
->bindingCount
&&
279 (variable_flags
->pBindingFlags
[binding
->binding
] &
280 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
)) {
281 variable_count
->maxVariableDescriptorCount
=
282 MIN2(UINT32_MAX
, max_count
);
284 size
+= binding
->descriptorCount
* descriptor_sz
;
289 pSupport
->supported
= supported
;
293 * Pipeline layouts. These have nothing to do with the pipeline. They are
294 * just multiple descriptor set layouts pasted together.
298 tu_CreatePipelineLayout(VkDevice _device
,
299 const VkPipelineLayoutCreateInfo
*pCreateInfo
,
300 const VkAllocationCallbacks
*pAllocator
,
301 VkPipelineLayout
*pPipelineLayout
)
303 TU_FROM_HANDLE(tu_device
, device
, _device
);
304 struct tu_pipeline_layout
*layout
;
305 struct mesa_sha1 ctx
;
307 assert(pCreateInfo
->sType
==
308 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
310 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
311 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
313 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
315 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
317 unsigned dynamic_offset_count
= 0;
319 _mesa_sha1_init(&ctx
);
320 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
321 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
,
322 pCreateInfo
->pSetLayouts
[set
]);
323 layout
->set
[set
].layout
= set_layout
;
325 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
326 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
327 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
*
328 set_layout
->binding
[b
].dynamic_offset_count
;
329 if (set_layout
->binding
[b
].immutable_samplers_offset
)
332 tu_immutable_samplers(set_layout
, set_layout
->binding
+ b
),
333 set_layout
->binding
[b
].array_size
* 4 * sizeof(uint32_t));
336 &ctx
, set_layout
->binding
,
337 sizeof(set_layout
->binding
[0]) * set_layout
->binding_count
);
340 layout
->dynamic_offset_count
= dynamic_offset_count
;
341 layout
->push_constant_size
= 0;
343 for (unsigned i
= 0; i
< pCreateInfo
->pushConstantRangeCount
; ++i
) {
344 const VkPushConstantRange
*range
= pCreateInfo
->pPushConstantRanges
+ i
;
345 layout
->push_constant_size
=
346 MAX2(layout
->push_constant_size
, range
->offset
+ range
->size
);
349 layout
->push_constant_size
= align(layout
->push_constant_size
, 16);
350 _mesa_sha1_update(&ctx
, &layout
->push_constant_size
,
351 sizeof(layout
->push_constant_size
));
352 _mesa_sha1_final(&ctx
, layout
->sha1
);
353 *pPipelineLayout
= tu_pipeline_layout_to_handle(layout
);
359 tu_DestroyPipelineLayout(VkDevice _device
,
360 VkPipelineLayout _pipelineLayout
,
361 const VkAllocationCallbacks
*pAllocator
)
363 TU_FROM_HANDLE(tu_device
, device
, _device
);
364 TU_FROM_HANDLE(tu_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
366 if (!pipeline_layout
)
368 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
374 tu_descriptor_set_create(struct tu_device
*device
,
375 struct tu_descriptor_pool
*pool
,
376 const struct tu_descriptor_set_layout
*layout
,
377 const uint32_t *variable_count
,
378 struct tu_descriptor_set
**out_set
)
380 struct tu_descriptor_set
*set
;
381 uint32_t buffer_count
= layout
->buffer_count
;
382 if (variable_count
) {
384 if (layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_SAMPLER
||
385 layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
)
387 buffer_count
= layout
->binding
[layout
->binding_count
- 1].buffer_offset
+
388 *variable_count
* stride
;
390 unsigned range_offset
= sizeof(struct tu_descriptor_set
) +
391 sizeof(struct tu_bo
*) * buffer_count
;
392 unsigned mem_size
= range_offset
+
393 sizeof(struct tu_descriptor_range
) * layout
->dynamic_offset_count
;
395 if (pool
->host_memory_base
) {
396 if (pool
->host_memory_end
- pool
->host_memory_ptr
< mem_size
)
397 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
399 set
= (struct tu_descriptor_set
*)pool
->host_memory_ptr
;
400 pool
->host_memory_ptr
+= mem_size
;
402 set
= vk_alloc2(&device
->alloc
, NULL
, mem_size
, 8,
403 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
406 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
409 memset(set
, 0, mem_size
);
411 if (layout
->dynamic_offset_count
) {
412 set
->dynamic_descriptors
= (struct tu_descriptor_range
*)((uint8_t*)set
+ range_offset
);
415 set
->layout
= layout
;
416 uint32_t layout_size
= layout
->size
;
417 if (variable_count
) {
418 assert(layout
->has_variable_descriptors
);
419 uint32_t stride
= layout
->binding
[layout
->binding_count
- 1].size
;
420 if (layout
->binding
[layout
->binding_count
- 1].type
== VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT
)
423 layout_size
= layout
->binding
[layout
->binding_count
- 1].offset
+
424 *variable_count
* stride
;
428 set
->size
= layout_size
;
430 if (!pool
->host_memory_base
&& pool
->entry_count
== pool
->max_entry_count
) {
431 vk_free2(&device
->alloc
, NULL
, set
);
432 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
435 /* try to allocate linearly first, so that we don't spend
436 * time looking for gaps if the app only allocates &
437 * resets via the pool. */
438 if (pool
->current_offset
+ layout_size
<= pool
->size
) {
439 set
->mapped_ptr
= (uint32_t*)(pool
->bo
.map
+ pool
->current_offset
);
440 set
->va
= pool
->bo
.iova
+ pool
->current_offset
;
441 if (!pool
->host_memory_base
) {
442 pool
->entries
[pool
->entry_count
].offset
= pool
->current_offset
;
443 pool
->entries
[pool
->entry_count
].size
= layout_size
;
444 pool
->entries
[pool
->entry_count
].set
= set
;
447 pool
->current_offset
+= layout_size
;
448 } else if (!pool
->host_memory_base
) {
452 for (index
= 0; index
< pool
->entry_count
; ++index
) {
453 if (pool
->entries
[index
].offset
- offset
>= layout_size
)
455 offset
= pool
->entries
[index
].offset
+ pool
->entries
[index
].size
;
458 if (pool
->size
- offset
< layout_size
) {
459 vk_free2(&device
->alloc
, NULL
, set
);
460 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
463 set
->mapped_ptr
= (uint32_t*)(pool
->bo
.map
+ offset
);
464 set
->va
= pool
->bo
.iova
+ offset
;
465 memmove(&pool
->entries
[index
+ 1], &pool
->entries
[index
],
466 sizeof(pool
->entries
[0]) * (pool
->entry_count
- index
));
467 pool
->entries
[index
].offset
= offset
;
468 pool
->entries
[index
].size
= layout_size
;
469 pool
->entries
[index
].set
= set
;
472 return vk_error(device
->instance
, VK_ERROR_OUT_OF_POOL_MEMORY
);
480 tu_descriptor_set_destroy(struct tu_device
*device
,
481 struct tu_descriptor_pool
*pool
,
482 struct tu_descriptor_set
*set
,
485 assert(!pool
->host_memory_base
);
487 if (free_bo
&& set
->size
&& !pool
->host_memory_base
) {
488 uint32_t offset
= (uint8_t*)set
->mapped_ptr
- (uint8_t*)pool
->bo
.map
;
489 for (int i
= 0; i
< pool
->entry_count
; ++i
) {
490 if (pool
->entries
[i
].offset
== offset
) {
491 memmove(&pool
->entries
[i
], &pool
->entries
[i
+1],
492 sizeof(pool
->entries
[i
]) * (pool
->entry_count
- i
- 1));
498 vk_free2(&device
->alloc
, NULL
, set
);
502 tu_CreateDescriptorPool(VkDevice _device
,
503 const VkDescriptorPoolCreateInfo
*pCreateInfo
,
504 const VkAllocationCallbacks
*pAllocator
,
505 VkDescriptorPool
*pDescriptorPool
)
507 TU_FROM_HANDLE(tu_device
, device
, _device
);
508 struct tu_descriptor_pool
*pool
;
509 uint64_t size
= sizeof(struct tu_descriptor_pool
);
510 uint64_t bo_size
= 0, bo_count
= 0, range_count
= 0;
512 for (unsigned i
= 0; i
< pCreateInfo
->poolSizeCount
; ++i
) {
513 if (pCreateInfo
->pPoolSizes
[i
].type
!= VK_DESCRIPTOR_TYPE_SAMPLER
)
514 bo_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
516 switch(pCreateInfo
->pPoolSizes
[i
].type
) {
517 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
518 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
519 range_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
524 bo_size
+= descriptor_size(pCreateInfo
->pPoolSizes
[i
].type
) *
525 pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
528 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
529 uint64_t host_size
= pCreateInfo
->maxSets
* sizeof(struct tu_descriptor_set
);
530 host_size
+= sizeof(struct tu_bo
*) * bo_count
;
531 host_size
+= sizeof(struct tu_descriptor_range
) * range_count
;
534 size
+= sizeof(struct tu_descriptor_pool_entry
) * pCreateInfo
->maxSets
;
537 pool
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
538 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
540 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
542 memset(pool
, 0, sizeof(*pool
));
544 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
545 pool
->host_memory_base
= (uint8_t*)pool
+ sizeof(struct tu_descriptor_pool
);
546 pool
->host_memory_ptr
= pool
->host_memory_base
;
547 pool
->host_memory_end
= (uint8_t*)pool
+ size
;
553 ret
= tu_bo_init_new(device
, &pool
->bo
, bo_size
);
554 assert(ret
== VK_SUCCESS
);
556 ret
= tu_bo_map(device
, &pool
->bo
);
557 assert(ret
== VK_SUCCESS
);
559 pool
->size
= bo_size
;
560 pool
->max_entry_count
= pCreateInfo
->maxSets
;
562 *pDescriptorPool
= tu_descriptor_pool_to_handle(pool
);
567 tu_DestroyDescriptorPool(VkDevice _device
,
568 VkDescriptorPool _pool
,
569 const VkAllocationCallbacks
*pAllocator
)
571 TU_FROM_HANDLE(tu_device
, device
, _device
);
572 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, _pool
);
577 if (!pool
->host_memory_base
) {
578 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
579 tu_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
584 tu_bo_finish(device
, &pool
->bo
);
585 vk_free2(&device
->alloc
, pAllocator
, pool
);
589 tu_ResetDescriptorPool(VkDevice _device
,
590 VkDescriptorPool descriptorPool
,
591 VkDescriptorPoolResetFlags flags
)
593 TU_FROM_HANDLE(tu_device
, device
, _device
);
594 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, descriptorPool
);
596 if (!pool
->host_memory_base
) {
597 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
598 tu_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
600 pool
->entry_count
= 0;
603 pool
->current_offset
= 0;
604 pool
->host_memory_ptr
= pool
->host_memory_base
;
610 tu_AllocateDescriptorSets(VkDevice _device
,
611 const VkDescriptorSetAllocateInfo
*pAllocateInfo
,
612 VkDescriptorSet
*pDescriptorSets
)
614 TU_FROM_HANDLE(tu_device
, device
, _device
);
615 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
617 VkResult result
= VK_SUCCESS
;
619 struct tu_descriptor_set
*set
= NULL
;
621 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT
*variable_counts
=
622 vk_find_struct_const(pAllocateInfo
->pNext
, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT
);
623 const uint32_t zero
= 0;
625 /* allocate a set of buffers for each shader to contain descriptors */
626 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
627 TU_FROM_HANDLE(tu_descriptor_set_layout
, layout
,
628 pAllocateInfo
->pSetLayouts
[i
]);
630 const uint32_t *variable_count
= NULL
;
631 if (variable_counts
) {
632 if (i
< variable_counts
->descriptorSetCount
)
633 variable_count
= variable_counts
->pDescriptorCounts
+ i
;
635 variable_count
= &zero
;
638 assert(!(layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
640 result
= tu_descriptor_set_create(device
, pool
, layout
, variable_count
, &set
);
641 if (result
!= VK_SUCCESS
)
644 pDescriptorSets
[i
] = tu_descriptor_set_to_handle(set
);
647 if (result
!= VK_SUCCESS
) {
648 tu_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
650 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
651 pDescriptorSets
[i
] = VK_NULL_HANDLE
;
658 tu_FreeDescriptorSets(VkDevice _device
,
659 VkDescriptorPool descriptorPool
,
661 const VkDescriptorSet
*pDescriptorSets
)
663 TU_FROM_HANDLE(tu_device
, device
, _device
);
664 TU_FROM_HANDLE(tu_descriptor_pool
, pool
, descriptorPool
);
666 for (uint32_t i
= 0; i
< count
; i
++) {
667 TU_FROM_HANDLE(tu_descriptor_set
, set
, pDescriptorSets
[i
]);
669 if (set
&& !pool
->host_memory_base
)
670 tu_descriptor_set_destroy(device
, pool
, set
, true);
675 static void write_texel_buffer_descriptor(struct tu_device
*device
,
676 struct tu_cmd_buffer
*cmd_buffer
,
678 struct tu_bo
**buffer_list
,
679 const VkBufferView _buffer_view
)
681 tu_finishme("texel buffer descriptor");
684 static void write_buffer_descriptor(struct tu_device
*device
,
685 struct tu_cmd_buffer
*cmd_buffer
,
687 struct tu_bo
**buffer_list
,
688 const VkDescriptorBufferInfo
*buffer_info
)
690 TU_FROM_HANDLE(tu_buffer
, buffer
, buffer_info
->buffer
);
692 uint64_t va
= tu_buffer_iova(buffer
) + buffer_info
->offset
;
697 tu_bo_list_add(&cmd_buffer
->bo_list
, buffer
->bo
, MSM_SUBMIT_BO_READ
);
699 *buffer_list
= buffer
->bo
;
702 static void write_dynamic_buffer_descriptor(struct tu_device
*device
,
703 struct tu_descriptor_range
*range
,
704 struct tu_bo
**buffer_list
,
705 const VkDescriptorBufferInfo
*buffer_info
)
707 TU_FROM_HANDLE(tu_buffer
, buffer
, buffer_info
->buffer
);
708 uint64_t va
= tu_buffer_iova(buffer
) + buffer_info
->offset
;
709 unsigned size
= buffer_info
->range
;
711 if (buffer_info
->range
== VK_WHOLE_SIZE
)
712 size
= buffer
->size
- buffer_info
->offset
;
717 *buffer_list
= buffer
->bo
;
721 write_image_descriptor(struct tu_device
*device
,
722 struct tu_cmd_buffer
*cmd_buffer
,
724 struct tu_bo
**buffer_list
,
725 VkDescriptorType descriptor_type
,
726 const VkDescriptorImageInfo
*image_info
)
728 TU_FROM_HANDLE(tu_image_view
, iview
, image_info
->imageView
);
729 uint32_t *descriptor
;
731 if (descriptor_type
== VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
) {
732 descriptor
= iview
->storage_descriptor
;
734 descriptor
= iview
->descriptor
;
737 memcpy(dst
, descriptor
, sizeof(iview
->descriptor
));
740 tu_bo_list_add(&cmd_buffer
->bo_list
, iview
->image
->bo
, MSM_SUBMIT_BO_READ
);
742 *buffer_list
= iview
->image
->bo
;
746 write_combined_image_sampler_descriptor(struct tu_device
*device
,
747 struct tu_cmd_buffer
*cmd_buffer
,
748 unsigned sampler_offset
,
750 struct tu_bo
**buffer_list
,
751 VkDescriptorType descriptor_type
,
752 const VkDescriptorImageInfo
*image_info
,
755 TU_FROM_HANDLE(tu_sampler
, sampler
, image_info
->sampler
);
757 write_image_descriptor(device
, cmd_buffer
, dst
, buffer_list
,
758 descriptor_type
, image_info
);
759 /* copy over sampler state */
761 memcpy(dst
+ sampler_offset
/ sizeof(*dst
), sampler
, sizeof(*sampler
));
766 write_sampler_descriptor(struct tu_device
*device
,
768 const VkDescriptorImageInfo
*image_info
)
770 TU_FROM_HANDLE(tu_sampler
, sampler
, image_info
->sampler
);
772 memcpy(dst
, sampler
, sizeof(*sampler
));
776 tu_update_descriptor_sets(struct tu_device
*device
,
777 struct tu_cmd_buffer
*cmd_buffer
,
778 VkDescriptorSet dstSetOverride
,
779 uint32_t descriptorWriteCount
,
780 const VkWriteDescriptorSet
*pDescriptorWrites
,
781 uint32_t descriptorCopyCount
,
782 const VkCopyDescriptorSet
*pDescriptorCopies
)
785 for (i
= 0; i
< descriptorWriteCount
; i
++) {
786 const VkWriteDescriptorSet
*writeset
= &pDescriptorWrites
[i
];
787 TU_FROM_HANDLE(tu_descriptor_set
, set
,
788 dstSetOverride
? dstSetOverride
: writeset
->dstSet
);
789 const struct tu_descriptor_set_binding_layout
*binding_layout
=
790 set
->layout
->binding
+ writeset
->dstBinding
;
791 uint32_t *ptr
= set
->mapped_ptr
;
792 struct tu_bo
**buffer_list
= set
->descriptors
;
794 const struct tu_sampler
*samplers
= tu_immutable_samplers(set
->layout
, binding_layout
);
796 ptr
+= binding_layout
->offset
/ 4;
798 ptr
+= binding_layout
->size
* writeset
->dstArrayElement
/ 4;
799 buffer_list
+= binding_layout
->buffer_offset
;
800 buffer_list
+= writeset
->dstArrayElement
;
801 for (j
= 0; j
< writeset
->descriptorCount
; ++j
) {
802 switch(writeset
->descriptorType
) {
803 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
804 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
805 unsigned idx
= writeset
->dstArrayElement
+ j
;
806 idx
+= binding_layout
->dynamic_offset_offset
;
807 assert(!(set
->layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
808 write_dynamic_buffer_descriptor(device
, set
->dynamic_descriptors
+ idx
,
809 buffer_list
, writeset
->pBufferInfo
+ j
);
813 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
814 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
815 write_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
816 writeset
->pBufferInfo
+ j
);
818 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
819 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
820 write_texel_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
821 writeset
->pTexelBufferView
[j
]);
823 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
824 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
825 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
826 write_image_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
827 writeset
->descriptorType
,
828 writeset
->pImageInfo
+ j
);
830 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
831 write_combined_image_sampler_descriptor(device
, cmd_buffer
,
832 A6XX_TEX_CONST_DWORDS
*4,
834 writeset
->descriptorType
,
835 writeset
->pImageInfo
+ j
,
836 !binding_layout
->immutable_samplers_offset
);
837 if (binding_layout
->immutable_samplers_offset
) {
838 const unsigned idx
= writeset
->dstArrayElement
+ j
;
839 memcpy((char*)ptr
+ A6XX_TEX_CONST_DWORDS
*4, &samplers
[idx
],
840 sizeof(struct tu_sampler
));
843 case VK_DESCRIPTOR_TYPE_SAMPLER
:
844 write_sampler_descriptor(device
, ptr
, writeset
->pImageInfo
+ j
);
847 unreachable("unimplemented descriptor type");
850 ptr
+= binding_layout
->size
/ 4;
855 for (i
= 0; i
< descriptorCopyCount
; i
++) {
856 const VkCopyDescriptorSet
*copyset
= &pDescriptorCopies
[i
];
857 TU_FROM_HANDLE(tu_descriptor_set
, src_set
,
859 TU_FROM_HANDLE(tu_descriptor_set
, dst_set
,
861 const struct tu_descriptor_set_binding_layout
*src_binding_layout
=
862 src_set
->layout
->binding
+ copyset
->srcBinding
;
863 const struct tu_descriptor_set_binding_layout
*dst_binding_layout
=
864 dst_set
->layout
->binding
+ copyset
->dstBinding
;
865 uint32_t *src_ptr
= src_set
->mapped_ptr
;
866 uint32_t *dst_ptr
= dst_set
->mapped_ptr
;
867 struct tu_bo
**src_buffer_list
= src_set
->descriptors
;
868 struct tu_bo
**dst_buffer_list
= dst_set
->descriptors
;
870 src_ptr
+= src_binding_layout
->offset
/ 4;
871 dst_ptr
+= dst_binding_layout
->offset
/ 4;
873 src_ptr
+= src_binding_layout
->size
* copyset
->srcArrayElement
/ 4;
874 dst_ptr
+= dst_binding_layout
->size
* copyset
->dstArrayElement
/ 4;
876 src_buffer_list
+= src_binding_layout
->buffer_offset
;
877 src_buffer_list
+= copyset
->srcArrayElement
;
879 dst_buffer_list
+= dst_binding_layout
->buffer_offset
;
880 dst_buffer_list
+= copyset
->dstArrayElement
;
882 for (j
= 0; j
< copyset
->descriptorCount
; ++j
) {
883 switch (src_binding_layout
->type
) {
884 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
885 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
886 unsigned src_idx
= copyset
->srcArrayElement
+ j
;
887 unsigned dst_idx
= copyset
->dstArrayElement
+ j
;
888 struct tu_descriptor_range
*src_range
, *dst_range
;
889 src_idx
+= src_binding_layout
->dynamic_offset_offset
;
890 dst_idx
+= dst_binding_layout
->dynamic_offset_offset
;
892 src_range
= src_set
->dynamic_descriptors
+ src_idx
;
893 dst_range
= dst_set
->dynamic_descriptors
+ dst_idx
;
894 *dst_range
= *src_range
;
898 memcpy(dst_ptr
, src_ptr
, src_binding_layout
->size
);
900 src_ptr
+= src_binding_layout
->size
/ 4;
901 dst_ptr
+= dst_binding_layout
->size
/ 4;
903 if (src_binding_layout
->type
!= VK_DESCRIPTOR_TYPE_SAMPLER
) {
904 /* Sampler descriptors don't have a buffer list. */
905 dst_buffer_list
[j
] = src_buffer_list
[j
];
912 tu_UpdateDescriptorSets(VkDevice _device
,
913 uint32_t descriptorWriteCount
,
914 const VkWriteDescriptorSet
*pDescriptorWrites
,
915 uint32_t descriptorCopyCount
,
916 const VkCopyDescriptorSet
*pDescriptorCopies
)
918 TU_FROM_HANDLE(tu_device
, device
, _device
);
920 tu_update_descriptor_sets(device
, NULL
, VK_NULL_HANDLE
,
921 descriptorWriteCount
, pDescriptorWrites
,
922 descriptorCopyCount
, pDescriptorCopies
);
926 tu_CreateDescriptorUpdateTemplate(
928 const VkDescriptorUpdateTemplateCreateInfo
*pCreateInfo
,
929 const VkAllocationCallbacks
*pAllocator
,
930 VkDescriptorUpdateTemplate
*pDescriptorUpdateTemplate
)
932 TU_FROM_HANDLE(tu_device
, device
, _device
);
933 TU_FROM_HANDLE(tu_descriptor_set_layout
, set_layout
,
934 pCreateInfo
->descriptorSetLayout
);
935 const uint32_t entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
937 sizeof(struct tu_descriptor_update_template
) +
938 sizeof(struct tu_descriptor_update_template_entry
) * entry_count
;
939 struct tu_descriptor_update_template
*templ
;
941 templ
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
942 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
944 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
946 *pDescriptorUpdateTemplate
=
947 tu_descriptor_update_template_to_handle(templ
);
949 tu_use_args(set_layout
);
955 tu_DestroyDescriptorUpdateTemplate(
957 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
958 const VkAllocationCallbacks
*pAllocator
)
960 TU_FROM_HANDLE(tu_device
, device
, _device
);
961 TU_FROM_HANDLE(tu_descriptor_update_template
, templ
,
962 descriptorUpdateTemplate
);
967 vk_free2(&device
->alloc
, pAllocator
, templ
);
971 tu_update_descriptor_set_with_template(
972 struct tu_device
*device
,
973 struct tu_cmd_buffer
*cmd_buffer
,
974 struct tu_descriptor_set
*set
,
975 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
978 TU_FROM_HANDLE(tu_descriptor_update_template
, templ
,
979 descriptorUpdateTemplate
);
984 tu_UpdateDescriptorSetWithTemplate(
986 VkDescriptorSet descriptorSet
,
987 VkDescriptorUpdateTemplate descriptorUpdateTemplate
,
990 TU_FROM_HANDLE(tu_device
, device
, _device
);
991 TU_FROM_HANDLE(tu_descriptor_set
, set
, descriptorSet
);
993 tu_update_descriptor_set_with_template(device
, NULL
, set
,
994 descriptorUpdateTemplate
, pData
);
998 tu_CreateSamplerYcbcrConversion(
1000 const VkSamplerYcbcrConversionCreateInfo
*pCreateInfo
,
1001 const VkAllocationCallbacks
*pAllocator
,
1002 VkSamplerYcbcrConversion
*pYcbcrConversion
)
1004 *pYcbcrConversion
= VK_NULL_HANDLE
;
1009 tu_DestroySamplerYcbcrConversion(VkDevice device
,
1010 VkSamplerYcbcrConversion ycbcrConversion
,
1011 const VkAllocationCallbacks
*pAllocator
)