turnip: Pretend to support Vulkan 1.2
[mesa.git] / src / freedreno / vulkan / tu_descriptor_set.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
25 /**
26 * @file
27 *
28 * The texture and sampler descriptors are laid out in a single global space
29 * across all shader stages, for both simplicity of implementation and because
30 * that seems to be how things have to be structured for border color
31 * handling.
32 *
33 * Each shader stage will declare its texture/sampler count based on the last
34 * descriptor set it uses. At draw emit time (though it really should be
35 * CmdBind time), we upload the descriptor sets used by each shader stage to
36 * their stage.
37 */
38
39 #include "tu_private.h"
40
41 #include <assert.h>
42 #include <fcntl.h>
43 #include <stdbool.h>
44 #include <string.h>
45 #include <unistd.h>
46
47 #include "util/mesa-sha1.h"
48 #include "vk_util.h"
49
50 static int
51 binding_compare(const void *av, const void *bv)
52 {
53 const VkDescriptorSetLayoutBinding *a =
54 (const VkDescriptorSetLayoutBinding *) av;
55 const VkDescriptorSetLayoutBinding *b =
56 (const VkDescriptorSetLayoutBinding *) bv;
57
58 return (a->binding < b->binding) ? -1 : (a->binding > b->binding) ? 1 : 0;
59 }
60
61 static VkDescriptorSetLayoutBinding *
62 create_sorted_bindings(const VkDescriptorSetLayoutBinding *bindings,
63 unsigned count)
64 {
65 VkDescriptorSetLayoutBinding *sorted_bindings =
66 malloc(count * sizeof(VkDescriptorSetLayoutBinding));
67 if (!sorted_bindings)
68 return NULL;
69
70 memcpy(sorted_bindings, bindings,
71 count * sizeof(VkDescriptorSetLayoutBinding));
72
73 qsort(sorted_bindings, count, sizeof(VkDescriptorSetLayoutBinding),
74 binding_compare);
75
76 return sorted_bindings;
77 }
78
79 static uint32_t
80 descriptor_size(enum VkDescriptorType type)
81 {
82 switch (type) {
83 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
84 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
85 return 0;
86 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
87 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
88 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
89 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
90 /* 64bit pointer */
91 return 8;
92 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
93 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
94 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
95 return A6XX_TEX_CONST_DWORDS*4;
96 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
97 /* texture const + tu_sampler struct (includes border color) */
98 return A6XX_TEX_CONST_DWORDS*4 + sizeof(struct tu_sampler);
99 case VK_DESCRIPTOR_TYPE_SAMPLER:
100 return sizeof(struct tu_sampler);
101 default:
102 unreachable("unknown descriptor type\n");
103 return 0;
104 }
105 }
106
107 VkResult
108 tu_CreateDescriptorSetLayout(
109 VkDevice _device,
110 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
111 const VkAllocationCallbacks *pAllocator,
112 VkDescriptorSetLayout *pSetLayout)
113 {
114 TU_FROM_HANDLE(tu_device, device, _device);
115 struct tu_descriptor_set_layout *set_layout;
116
117 assert(pCreateInfo->sType ==
118 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
119 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
120 vk_find_struct_const(
121 pCreateInfo->pNext,
122 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
123
124 uint32_t max_binding = 0;
125 uint32_t immutable_sampler_count = 0;
126 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
127 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
128 if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
129 pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
130 pCreateInfo->pBindings[j].pImmutableSamplers) {
131 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
132 }
133 }
134
135 uint32_t samplers_offset = sizeof(struct tu_descriptor_set_layout) +
136 (max_binding + 1) * sizeof(set_layout->binding[0]);
137 uint32_t size = samplers_offset + immutable_sampler_count * sizeof(struct tu_sampler);
138
139 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
140 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
141 if (!set_layout)
142 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
143
144 set_layout->flags = pCreateInfo->flags;
145
146 /* We just allocate all the samplers at the end of the struct */
147 struct tu_sampler *samplers = (void*) &set_layout->binding[max_binding + 1];
148
149 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
150 pCreateInfo->pBindings, pCreateInfo->bindingCount);
151 if (!bindings) {
152 vk_free2(&device->alloc, pAllocator, set_layout);
153 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
154 }
155
156 set_layout->binding_count = max_binding + 1;
157 set_layout->shader_stages = 0;
158 set_layout->dynamic_shader_stages = 0;
159 set_layout->has_immutable_samplers = false;
160 set_layout->size = 0;
161
162 memset(set_layout->binding, 0,
163 size - sizeof(struct tu_descriptor_set_layout));
164
165 uint32_t buffer_count = 0;
166 uint32_t dynamic_offset_count = 0;
167
168 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
169 const VkDescriptorSetLayoutBinding *binding = bindings + j;
170 uint32_t b = binding->binding;
171 uint32_t alignment = 4;
172 unsigned binding_buffer_count = 1;
173
174 switch (binding->descriptorType) {
175 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
176 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
177 assert(!(pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
178 set_layout->binding[b].dynamic_offset_count = 1;
179 break;
180 default:
181 break;
182 }
183
184 set_layout->size = align(set_layout->size, alignment);
185 set_layout->binding[b].type = binding->descriptorType;
186 set_layout->binding[b].array_size = binding->descriptorCount;
187 set_layout->binding[b].offset = set_layout->size;
188 set_layout->binding[b].buffer_offset = buffer_count;
189 set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
190 set_layout->binding[b].size = descriptor_size(binding->descriptorType);
191
192 if (variable_flags && binding->binding < variable_flags->bindingCount &&
193 (variable_flags->pBindingFlags[binding->binding] &
194 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
195 assert(!binding->pImmutableSamplers); /* Terribly ill defined how
196 many samplers are valid */
197 assert(binding->binding == max_binding);
198
199 set_layout->has_variable_descriptors = true;
200 }
201
202 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
203 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
204 binding->pImmutableSamplers) {
205 set_layout->binding[b].immutable_samplers_offset = samplers_offset;
206 set_layout->has_immutable_samplers = true;
207
208 for (uint32_t i = 0; i < binding->descriptorCount; i++)
209 samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);
210
211 samplers += binding->descriptorCount;
212 samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;
213 }
214
215 set_layout->size +=
216 binding->descriptorCount * set_layout->binding[b].size;
217 buffer_count += binding->descriptorCount * binding_buffer_count;
218 dynamic_offset_count += binding->descriptorCount *
219 set_layout->binding[b].dynamic_offset_count;
220 set_layout->shader_stages |= binding->stageFlags;
221 }
222
223 free(bindings);
224
225 set_layout->buffer_count = buffer_count;
226 set_layout->dynamic_offset_count = dynamic_offset_count;
227
228 *pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);
229
230 return VK_SUCCESS;
231 }
232
233 void
234 tu_DestroyDescriptorSetLayout(VkDevice _device,
235 VkDescriptorSetLayout _set_layout,
236 const VkAllocationCallbacks *pAllocator)
237 {
238 TU_FROM_HANDLE(tu_device, device, _device);
239 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);
240
241 if (!set_layout)
242 return;
243
244 vk_free2(&device->alloc, pAllocator, set_layout);
245 }
246
247 void
248 tu_GetDescriptorSetLayoutSupport(
249 VkDevice device,
250 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
251 VkDescriptorSetLayoutSupport *pSupport)
252 {
253 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
254 pCreateInfo->pBindings, pCreateInfo->bindingCount);
255 if (!bindings) {
256 pSupport->supported = false;
257 return;
258 }
259
260 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
261 vk_find_struct_const(
262 pCreateInfo->pNext,
263 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
264 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =
265 vk_find_struct(
266 (void *) pCreateInfo->pNext,
267 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);
268 if (variable_count) {
269 variable_count->maxVariableDescriptorCount = 0;
270 }
271
272 bool supported = true;
273 uint64_t size = 0;
274 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
275 const VkDescriptorSetLayoutBinding *binding = bindings + i;
276
277 uint64_t descriptor_sz = descriptor_size(binding->descriptorType);
278 uint64_t descriptor_alignment = 8;
279
280 if (size && !align_u64(size, descriptor_alignment)) {
281 supported = false;
282 }
283 size = align_u64(size, descriptor_alignment);
284
285 uint64_t max_count = UINT64_MAX;
286 if (descriptor_sz)
287 max_count = (UINT64_MAX - size) / descriptor_sz;
288
289 if (max_count < binding->descriptorCount) {
290 supported = false;
291 }
292 if (variable_flags && binding->binding < variable_flags->bindingCount &&
293 variable_count &&
294 (variable_flags->pBindingFlags[binding->binding] &
295 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
296 variable_count->maxVariableDescriptorCount =
297 MIN2(UINT32_MAX, max_count);
298 }
299 size += binding->descriptorCount * descriptor_sz;
300 }
301
302 free(bindings);
303
304 pSupport->supported = supported;
305 }
306
307 /*
308 * Pipeline layouts. These have nothing to do with the pipeline. They are
309 * just multiple descriptor set layouts pasted together.
310 */
311
312 VkResult
313 tu_CreatePipelineLayout(VkDevice _device,
314 const VkPipelineLayoutCreateInfo *pCreateInfo,
315 const VkAllocationCallbacks *pAllocator,
316 VkPipelineLayout *pPipelineLayout)
317 {
318 TU_FROM_HANDLE(tu_device, device, _device);
319 struct tu_pipeline_layout *layout;
320 struct mesa_sha1 ctx;
321
322 assert(pCreateInfo->sType ==
323 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
324
325 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
326 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
327 if (layout == NULL)
328 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
329
330 layout->num_sets = pCreateInfo->setLayoutCount;
331
332 unsigned dynamic_offset_count = 0;
333
334 _mesa_sha1_init(&ctx);
335 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
336 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
337 pCreateInfo->pSetLayouts[set]);
338 layout->set[set].layout = set_layout;
339
340 layout->set[set].dynamic_offset_start = dynamic_offset_count;
341 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
342 dynamic_offset_count += set_layout->binding[b].array_size *
343 set_layout->binding[b].dynamic_offset_count;
344 if (set_layout->binding[b].immutable_samplers_offset)
345 _mesa_sha1_update(
346 &ctx,
347 tu_immutable_samplers(set_layout, set_layout->binding + b),
348 set_layout->binding[b].array_size * 4 * sizeof(uint32_t));
349 }
350 _mesa_sha1_update(
351 &ctx, set_layout->binding,
352 sizeof(set_layout->binding[0]) * set_layout->binding_count);
353 }
354
355 layout->dynamic_offset_count = dynamic_offset_count;
356 layout->push_constant_size = 0;
357
358 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
359 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
360 layout->push_constant_size =
361 MAX2(layout->push_constant_size, range->offset + range->size);
362 }
363
364 layout->push_constant_size = align(layout->push_constant_size, 16);
365 _mesa_sha1_update(&ctx, &layout->push_constant_size,
366 sizeof(layout->push_constant_size));
367 _mesa_sha1_final(&ctx, layout->sha1);
368 *pPipelineLayout = tu_pipeline_layout_to_handle(layout);
369
370 return VK_SUCCESS;
371 }
372
373 void
374 tu_DestroyPipelineLayout(VkDevice _device,
375 VkPipelineLayout _pipelineLayout,
376 const VkAllocationCallbacks *pAllocator)
377 {
378 TU_FROM_HANDLE(tu_device, device, _device);
379 TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);
380
381 if (!pipeline_layout)
382 return;
383 vk_free2(&device->alloc, pAllocator, pipeline_layout);
384 }
385
386 #define EMPTY 1
387
388 static VkResult
389 tu_descriptor_set_create(struct tu_device *device,
390 struct tu_descriptor_pool *pool,
391 const struct tu_descriptor_set_layout *layout,
392 const uint32_t *variable_count,
393 struct tu_descriptor_set **out_set)
394 {
395 struct tu_descriptor_set *set;
396 uint32_t buffer_count = layout->buffer_count;
397 if (variable_count) {
398 unsigned stride = 1;
399 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_SAMPLER ||
400 layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
401 stride = 0;
402 buffer_count = layout->binding[layout->binding_count - 1].buffer_offset +
403 *variable_count * stride;
404 }
405 unsigned range_offset = sizeof(struct tu_descriptor_set) +
406 sizeof(struct tu_bo *) * buffer_count;
407 unsigned mem_size = range_offset +
408 sizeof(struct tu_descriptor_range) * layout->dynamic_offset_count;
409
410 if (pool->host_memory_base) {
411 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
412 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
413
414 set = (struct tu_descriptor_set*)pool->host_memory_ptr;
415 pool->host_memory_ptr += mem_size;
416 } else {
417 set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
418 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
419
420 if (!set)
421 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
422 }
423
424 memset(set, 0, mem_size);
425
426 if (layout->dynamic_offset_count) {
427 set->dynamic_descriptors = (struct tu_descriptor_range*)((uint8_t*)set + range_offset);
428 }
429
430 set->layout = layout;
431 uint32_t layout_size = layout->size;
432 if (variable_count) {
433 assert(layout->has_variable_descriptors);
434 uint32_t stride = layout->binding[layout->binding_count - 1].size;
435 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
436 stride = 1;
437
438 layout_size = layout->binding[layout->binding_count - 1].offset +
439 *variable_count * stride;
440 }
441
442 if (layout_size) {
443 set->size = layout_size;
444
445 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
446 vk_free2(&device->alloc, NULL, set);
447 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
448 }
449
450 /* try to allocate linearly first, so that we don't spend
451 * time looking for gaps if the app only allocates &
452 * resets via the pool. */
453 if (pool->current_offset + layout_size <= pool->size) {
454 set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);
455 set->va = pool->bo.iova + pool->current_offset;
456 if (!pool->host_memory_base) {
457 pool->entries[pool->entry_count].offset = pool->current_offset;
458 pool->entries[pool->entry_count].size = layout_size;
459 pool->entries[pool->entry_count].set = set;
460 pool->entry_count++;
461 }
462 pool->current_offset += layout_size;
463 } else if (!pool->host_memory_base) {
464 uint64_t offset = 0;
465 int index;
466
467 for (index = 0; index < pool->entry_count; ++index) {
468 if (pool->entries[index].offset - offset >= layout_size)
469 break;
470 offset = pool->entries[index].offset + pool->entries[index].size;
471 }
472
473 if (pool->size - offset < layout_size) {
474 vk_free2(&device->alloc, NULL, set);
475 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
476 }
477
478 set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);
479 set->va = pool->bo.iova + offset;
480 memmove(&pool->entries[index + 1], &pool->entries[index],
481 sizeof(pool->entries[0]) * (pool->entry_count - index));
482 pool->entries[index].offset = offset;
483 pool->entries[index].size = layout_size;
484 pool->entries[index].set = set;
485 pool->entry_count++;
486 } else
487 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
488 }
489
490 *out_set = set;
491 return VK_SUCCESS;
492 }
493
494 static void
495 tu_descriptor_set_destroy(struct tu_device *device,
496 struct tu_descriptor_pool *pool,
497 struct tu_descriptor_set *set,
498 bool free_bo)
499 {
500 assert(!pool->host_memory_base);
501
502 if (free_bo && set->size && !pool->host_memory_base) {
503 uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;
504 for (int i = 0; i < pool->entry_count; ++i) {
505 if (pool->entries[i].offset == offset) {
506 memmove(&pool->entries[i], &pool->entries[i+1],
507 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
508 --pool->entry_count;
509 break;
510 }
511 }
512 }
513 vk_free2(&device->alloc, NULL, set);
514 }
515
516 VkResult
517 tu_CreateDescriptorPool(VkDevice _device,
518 const VkDescriptorPoolCreateInfo *pCreateInfo,
519 const VkAllocationCallbacks *pAllocator,
520 VkDescriptorPool *pDescriptorPool)
521 {
522 TU_FROM_HANDLE(tu_device, device, _device);
523 struct tu_descriptor_pool *pool;
524 uint64_t size = sizeof(struct tu_descriptor_pool);
525 uint64_t bo_size = 0, bo_count = 0, range_count = 0;
526
527 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
528 if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
529 bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
530
531 switch(pCreateInfo->pPoolSizes[i].type) {
532 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
533 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
534 range_count += pCreateInfo->pPoolSizes[i].descriptorCount;
535 default:
536 break;
537 }
538
539 bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *
540 pCreateInfo->pPoolSizes[i].descriptorCount;
541 }
542
543 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
544 uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);
545 host_size += sizeof(struct tu_bo*) * bo_count;
546 host_size += sizeof(struct tu_descriptor_range) * range_count;
547 size += host_size;
548 } else {
549 size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;
550 }
551
552 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
553 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
554 if (!pool)
555 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
556
557 memset(pool, 0, sizeof(*pool));
558
559 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
560 pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);
561 pool->host_memory_ptr = pool->host_memory_base;
562 pool->host_memory_end = (uint8_t*)pool + size;
563 }
564
565 if (bo_size) {
566 VkResult ret;
567
568 ret = tu_bo_init_new(device, &pool->bo, bo_size);
569 assert(ret == VK_SUCCESS);
570
571 ret = tu_bo_map(device, &pool->bo);
572 assert(ret == VK_SUCCESS);
573 }
574 pool->size = bo_size;
575 pool->max_entry_count = pCreateInfo->maxSets;
576
577 *pDescriptorPool = tu_descriptor_pool_to_handle(pool);
578 return VK_SUCCESS;
579 }
580
581 void
582 tu_DestroyDescriptorPool(VkDevice _device,
583 VkDescriptorPool _pool,
584 const VkAllocationCallbacks *pAllocator)
585 {
586 TU_FROM_HANDLE(tu_device, device, _device);
587 TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);
588
589 if (!pool)
590 return;
591
592 if (!pool->host_memory_base) {
593 for(int i = 0; i < pool->entry_count; ++i) {
594 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
595 }
596 }
597
598 if (pool->size)
599 tu_bo_finish(device, &pool->bo);
600 vk_free2(&device->alloc, pAllocator, pool);
601 }
602
603 VkResult
604 tu_ResetDescriptorPool(VkDevice _device,
605 VkDescriptorPool descriptorPool,
606 VkDescriptorPoolResetFlags flags)
607 {
608 TU_FROM_HANDLE(tu_device, device, _device);
609 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
610
611 if (!pool->host_memory_base) {
612 for(int i = 0; i < pool->entry_count; ++i) {
613 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
614 }
615 pool->entry_count = 0;
616 }
617
618 pool->current_offset = 0;
619 pool->host_memory_ptr = pool->host_memory_base;
620
621 return VK_SUCCESS;
622 }
623
624 VkResult
625 tu_AllocateDescriptorSets(VkDevice _device,
626 const VkDescriptorSetAllocateInfo *pAllocateInfo,
627 VkDescriptorSet *pDescriptorSets)
628 {
629 TU_FROM_HANDLE(tu_device, device, _device);
630 TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);
631
632 VkResult result = VK_SUCCESS;
633 uint32_t i;
634 struct tu_descriptor_set *set = NULL;
635
636 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =
637 vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);
638 const uint32_t zero = 0;
639
640 /* allocate a set of buffers for each shader to contain descriptors */
641 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
642 TU_FROM_HANDLE(tu_descriptor_set_layout, layout,
643 pAllocateInfo->pSetLayouts[i]);
644
645 const uint32_t *variable_count = NULL;
646 if (variable_counts) {
647 if (i < variable_counts->descriptorSetCount)
648 variable_count = variable_counts->pDescriptorCounts + i;
649 else
650 variable_count = &zero;
651 }
652
653 assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
654
655 result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);
656 if (result != VK_SUCCESS)
657 break;
658
659 pDescriptorSets[i] = tu_descriptor_set_to_handle(set);
660 }
661
662 if (result != VK_SUCCESS) {
663 tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
664 i, pDescriptorSets);
665 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
666 pDescriptorSets[i] = VK_NULL_HANDLE;
667 }
668 }
669 return result;
670 }
671
672 VkResult
673 tu_FreeDescriptorSets(VkDevice _device,
674 VkDescriptorPool descriptorPool,
675 uint32_t count,
676 const VkDescriptorSet *pDescriptorSets)
677 {
678 TU_FROM_HANDLE(tu_device, device, _device);
679 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
680
681 for (uint32_t i = 0; i < count; i++) {
682 TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);
683
684 if (set && !pool->host_memory_base)
685 tu_descriptor_set_destroy(device, pool, set, true);
686 }
687 return VK_SUCCESS;
688 }
689
690 static void write_texel_buffer_descriptor(struct tu_device *device,
691 struct tu_cmd_buffer *cmd_buffer,
692 unsigned *dst,
693 struct tu_bo **buffer_list,
694 const VkBufferView _buffer_view)
695 {
696 tu_finishme("texel buffer descriptor");
697 }
698
699 static void write_buffer_descriptor(struct tu_device *device,
700 struct tu_cmd_buffer *cmd_buffer,
701 unsigned *dst,
702 struct tu_bo **buffer_list,
703 const VkDescriptorBufferInfo *buffer_info)
704 {
705 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
706
707 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
708 dst[0] = va;
709 dst[1] = va >> 32;
710
711 if (cmd_buffer)
712 tu_bo_list_add(&cmd_buffer->bo_list, buffer->bo, MSM_SUBMIT_BO_READ);
713 else
714 *buffer_list = buffer->bo;
715 }
716
717 static void write_dynamic_buffer_descriptor(struct tu_device *device,
718 struct tu_descriptor_range *range,
719 struct tu_bo **buffer_list,
720 const VkDescriptorBufferInfo *buffer_info)
721 {
722 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
723 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
724 unsigned size = buffer_info->range;
725
726 if (buffer_info->range == VK_WHOLE_SIZE)
727 size = buffer->size - buffer_info->offset;
728
729 range->va = va;
730 range->size = size;
731
732 *buffer_list = buffer->bo;
733 }
734
735 static void
736 write_image_descriptor(struct tu_device *device,
737 struct tu_cmd_buffer *cmd_buffer,
738 unsigned *dst,
739 struct tu_bo **buffer_list,
740 VkDescriptorType descriptor_type,
741 const VkDescriptorImageInfo *image_info)
742 {
743 TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);
744 uint32_t *descriptor;
745
746 if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
747 descriptor = iview->storage_descriptor;
748 } else {
749 descriptor = iview->descriptor;
750 }
751
752 memcpy(dst, descriptor, sizeof(iview->descriptor));
753
754 if (cmd_buffer)
755 tu_bo_list_add(&cmd_buffer->bo_list, iview->image->bo, MSM_SUBMIT_BO_READ);
756 else
757 *buffer_list = iview->image->bo;
758 }
759
760 static void
761 write_combined_image_sampler_descriptor(struct tu_device *device,
762 struct tu_cmd_buffer *cmd_buffer,
763 unsigned sampler_offset,
764 unsigned *dst,
765 struct tu_bo **buffer_list,
766 VkDescriptorType descriptor_type,
767 const VkDescriptorImageInfo *image_info,
768 bool has_sampler)
769 {
770 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
771
772 write_image_descriptor(device, cmd_buffer, dst, buffer_list,
773 descriptor_type, image_info);
774 /* copy over sampler state */
775 if (has_sampler) {
776 memcpy(dst + sampler_offset / sizeof(*dst), sampler, sizeof(*sampler));
777 }
778 }
779
780 static void
781 write_sampler_descriptor(struct tu_device *device,
782 unsigned *dst,
783 const VkDescriptorImageInfo *image_info)
784 {
785 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
786
787 memcpy(dst, sampler, sizeof(*sampler));
788 }
789
790 void
791 tu_update_descriptor_sets(struct tu_device *device,
792 struct tu_cmd_buffer *cmd_buffer,
793 VkDescriptorSet dstSetOverride,
794 uint32_t descriptorWriteCount,
795 const VkWriteDescriptorSet *pDescriptorWrites,
796 uint32_t descriptorCopyCount,
797 const VkCopyDescriptorSet *pDescriptorCopies)
798 {
799 uint32_t i, j;
800 for (i = 0; i < descriptorWriteCount; i++) {
801 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
802 TU_FROM_HANDLE(tu_descriptor_set, set,
803 dstSetOverride ? dstSetOverride : writeset->dstSet);
804 const struct tu_descriptor_set_binding_layout *binding_layout =
805 set->layout->binding + writeset->dstBinding;
806 uint32_t *ptr = set->mapped_ptr;
807 struct tu_bo **buffer_list = set->descriptors;
808
809 ptr += binding_layout->offset / 4;
810
811 ptr += binding_layout->size * writeset->dstArrayElement / 4;
812 buffer_list += binding_layout->buffer_offset;
813 buffer_list += writeset->dstArrayElement;
814 for (j = 0; j < writeset->descriptorCount; ++j) {
815 switch(writeset->descriptorType) {
816 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
817 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
818 unsigned idx = writeset->dstArrayElement + j;
819 idx += binding_layout->dynamic_offset_offset;
820 assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
821 write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
822 buffer_list, writeset->pBufferInfo + j);
823 break;
824 }
825
826 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
827 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
828 write_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
829 writeset->pBufferInfo + j);
830 break;
831 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
832 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
833 write_texel_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
834 writeset->pTexelBufferView[j]);
835 break;
836 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
837 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
838 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
839 write_image_descriptor(device, cmd_buffer, ptr, buffer_list,
840 writeset->descriptorType,
841 writeset->pImageInfo + j);
842 break;
843 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
844 write_combined_image_sampler_descriptor(device, cmd_buffer,
845 A6XX_TEX_CONST_DWORDS*4,
846 ptr, buffer_list,
847 writeset->descriptorType,
848 writeset->pImageInfo + j,
849 !binding_layout->immutable_samplers_offset);
850 break;
851 case VK_DESCRIPTOR_TYPE_SAMPLER:
852 write_sampler_descriptor(device, ptr, writeset->pImageInfo + j);
853 break;
854 default:
855 unreachable("unimplemented descriptor type");
856 break;
857 }
858 ptr += binding_layout->size / 4;
859 ++buffer_list;
860 }
861 }
862
863 for (i = 0; i < descriptorCopyCount; i++) {
864 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
865 TU_FROM_HANDLE(tu_descriptor_set, src_set,
866 copyset->srcSet);
867 TU_FROM_HANDLE(tu_descriptor_set, dst_set,
868 copyset->dstSet);
869 const struct tu_descriptor_set_binding_layout *src_binding_layout =
870 src_set->layout->binding + copyset->srcBinding;
871 const struct tu_descriptor_set_binding_layout *dst_binding_layout =
872 dst_set->layout->binding + copyset->dstBinding;
873 uint32_t *src_ptr = src_set->mapped_ptr;
874 uint32_t *dst_ptr = dst_set->mapped_ptr;
875 struct tu_bo **src_buffer_list = src_set->descriptors;
876 struct tu_bo **dst_buffer_list = dst_set->descriptors;
877
878 src_ptr += src_binding_layout->offset / 4;
879 dst_ptr += dst_binding_layout->offset / 4;
880
881 src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
882 dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
883
884 src_buffer_list += src_binding_layout->buffer_offset;
885 src_buffer_list += copyset->srcArrayElement;
886
887 dst_buffer_list += dst_binding_layout->buffer_offset;
888 dst_buffer_list += copyset->dstArrayElement;
889
890 for (j = 0; j < copyset->descriptorCount; ++j) {
891 switch (src_binding_layout->type) {
892 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
893 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
894 unsigned src_idx = copyset->srcArrayElement + j;
895 unsigned dst_idx = copyset->dstArrayElement + j;
896 struct tu_descriptor_range *src_range, *dst_range;
897 src_idx += src_binding_layout->dynamic_offset_offset;
898 dst_idx += dst_binding_layout->dynamic_offset_offset;
899
900 src_range = src_set->dynamic_descriptors + src_idx;
901 dst_range = dst_set->dynamic_descriptors + dst_idx;
902 *dst_range = *src_range;
903 break;
904 }
905 default:
906 memcpy(dst_ptr, src_ptr, src_binding_layout->size);
907 }
908 src_ptr += src_binding_layout->size / 4;
909 dst_ptr += dst_binding_layout->size / 4;
910
911 if (src_binding_layout->type != VK_DESCRIPTOR_TYPE_SAMPLER) {
912 /* Sampler descriptors don't have a buffer list. */
913 dst_buffer_list[j] = src_buffer_list[j];
914 }
915 }
916 }
917 }
918
919 void
920 tu_UpdateDescriptorSets(VkDevice _device,
921 uint32_t descriptorWriteCount,
922 const VkWriteDescriptorSet *pDescriptorWrites,
923 uint32_t descriptorCopyCount,
924 const VkCopyDescriptorSet *pDescriptorCopies)
925 {
926 TU_FROM_HANDLE(tu_device, device, _device);
927
928 tu_update_descriptor_sets(device, NULL, VK_NULL_HANDLE,
929 descriptorWriteCount, pDescriptorWrites,
930 descriptorCopyCount, pDescriptorCopies);
931 }
932
933 VkResult
934 tu_CreateDescriptorUpdateTemplate(
935 VkDevice _device,
936 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
937 const VkAllocationCallbacks *pAllocator,
938 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
939 {
940 TU_FROM_HANDLE(tu_device, device, _device);
941 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
942 pCreateInfo->descriptorSetLayout);
943 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
944 const size_t size =
945 sizeof(struct tu_descriptor_update_template) +
946 sizeof(struct tu_descriptor_update_template_entry) * entry_count;
947 struct tu_descriptor_update_template *templ;
948
949 templ = vk_alloc2(&device->alloc, pAllocator, size, 8,
950 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
951 if (!templ)
952 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
953
954 *pDescriptorUpdateTemplate =
955 tu_descriptor_update_template_to_handle(templ);
956
957 tu_use_args(set_layout);
958 tu_stub();
959 return VK_SUCCESS;
960 }
961
962 void
963 tu_DestroyDescriptorUpdateTemplate(
964 VkDevice _device,
965 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
966 const VkAllocationCallbacks *pAllocator)
967 {
968 TU_FROM_HANDLE(tu_device, device, _device);
969 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
970 descriptorUpdateTemplate);
971
972 if (!templ)
973 return;
974
975 vk_free2(&device->alloc, pAllocator, templ);
976 }
977
978 void
979 tu_update_descriptor_set_with_template(
980 struct tu_device *device,
981 struct tu_cmd_buffer *cmd_buffer,
982 struct tu_descriptor_set *set,
983 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
984 const void *pData)
985 {
986 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
987 descriptorUpdateTemplate);
988 tu_use_args(templ);
989 }
990
991 void
992 tu_UpdateDescriptorSetWithTemplate(
993 VkDevice _device,
994 VkDescriptorSet descriptorSet,
995 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
996 const void *pData)
997 {
998 TU_FROM_HANDLE(tu_device, device, _device);
999 TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);
1000
1001 tu_update_descriptor_set_with_template(device, NULL, set,
1002 descriptorUpdateTemplate, pData);
1003 }
1004
1005 VkResult
1006 tu_CreateSamplerYcbcrConversion(
1007 VkDevice device,
1008 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1009 const VkAllocationCallbacks *pAllocator,
1010 VkSamplerYcbcrConversion *pYcbcrConversion)
1011 {
1012 *pYcbcrConversion = VK_NULL_HANDLE;
1013 return VK_SUCCESS;
1014 }
1015
1016 void
1017 tu_DestroySamplerYcbcrConversion(VkDevice device,
1018 VkSamplerYcbcrConversion ycbcrConversion,
1019 const VkAllocationCallbacks *pAllocator)
1020 {
1021 /* Do nothing. */
1022 }