turnip: fix invalid VK_ERROR_OUT_OF_POOL_MEMORY
[mesa.git] / src / freedreno / vulkan / tu_descriptor_set.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
25 /**
26 * @file
27 *
28 * The texture and sampler descriptors are laid out in a single global space
29 * across all shader stages, for both simplicity of implementation and because
30 * that seems to be how things have to be structured for border color
31 * handling.
32 *
33 * Each shader stage will declare its texture/sampler count based on the last
34 * descriptor set it uses. At draw emit time (though it really should be
35 * CmdBind time), we upload the descriptor sets used by each shader stage to
36 * their stage.
37 */
38
39 #include "tu_private.h"
40
41 #include <assert.h>
42 #include <fcntl.h>
43 #include <stdbool.h>
44 #include <string.h>
45 #include <unistd.h>
46
47 #include "util/mesa-sha1.h"
48 #include "vk_util.h"
49
50 static int
51 binding_compare(const void *av, const void *bv)
52 {
53 const VkDescriptorSetLayoutBinding *a =
54 (const VkDescriptorSetLayoutBinding *) av;
55 const VkDescriptorSetLayoutBinding *b =
56 (const VkDescriptorSetLayoutBinding *) bv;
57
58 return (a->binding < b->binding) ? -1 : (a->binding > b->binding) ? 1 : 0;
59 }
60
61 static VkDescriptorSetLayoutBinding *
62 create_sorted_bindings(const VkDescriptorSetLayoutBinding *bindings,
63 unsigned count)
64 {
65 VkDescriptorSetLayoutBinding *sorted_bindings =
66 malloc(count * sizeof(VkDescriptorSetLayoutBinding));
67 if (!sorted_bindings)
68 return NULL;
69
70 memcpy(sorted_bindings, bindings,
71 count * sizeof(VkDescriptorSetLayoutBinding));
72
73 qsort(sorted_bindings, count, sizeof(VkDescriptorSetLayoutBinding),
74 binding_compare);
75
76 return sorted_bindings;
77 }
78
79 static uint32_t
80 descriptor_size(enum VkDescriptorType type)
81 {
82 switch (type) {
83 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
84 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
85 return 0;
86 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
87 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
88 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
89 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
90 /* 64bit pointer */
91 return 8;
92 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
93 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
94 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
95 return A6XX_TEX_CONST_DWORDS*4;
96 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
97 /* texture const + tu_sampler struct (includes border color) */
98 return A6XX_TEX_CONST_DWORDS*4 + sizeof(struct tu_sampler);
99 case VK_DESCRIPTOR_TYPE_SAMPLER:
100 return sizeof(struct tu_sampler);
101 default:
102 unreachable("unknown descriptor type\n");
103 return 0;
104 }
105 }
106
107 VkResult
108 tu_CreateDescriptorSetLayout(
109 VkDevice _device,
110 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
111 const VkAllocationCallbacks *pAllocator,
112 VkDescriptorSetLayout *pSetLayout)
113 {
114 TU_FROM_HANDLE(tu_device, device, _device);
115 struct tu_descriptor_set_layout *set_layout;
116
117 assert(pCreateInfo->sType ==
118 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
119 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
120 vk_find_struct_const(
121 pCreateInfo->pNext,
122 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
123
124 uint32_t max_binding = 0;
125 uint32_t immutable_sampler_count = 0;
126 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
127 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
128 if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
129 pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
130 pCreateInfo->pBindings[j].pImmutableSamplers) {
131 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
132 }
133 }
134
135 uint32_t samplers_offset = sizeof(struct tu_descriptor_set_layout) +
136 (max_binding + 1) * sizeof(set_layout->binding[0]);
137 uint32_t size = samplers_offset + immutable_sampler_count * sizeof(struct tu_sampler);
138
139 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
140 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
141 if (!set_layout)
142 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
143
144 set_layout->flags = pCreateInfo->flags;
145
146 /* We just allocate all the samplers at the end of the struct */
147 struct tu_sampler *samplers = (void*) &set_layout->binding[max_binding + 1];
148
149 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
150 pCreateInfo->pBindings, pCreateInfo->bindingCount);
151 if (!bindings) {
152 vk_free2(&device->alloc, pAllocator, set_layout);
153 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
154 }
155
156 set_layout->binding_count = max_binding + 1;
157 set_layout->shader_stages = 0;
158 set_layout->dynamic_shader_stages = 0;
159 set_layout->has_immutable_samplers = false;
160 set_layout->size = 0;
161
162 memset(set_layout->binding, 0,
163 size - sizeof(struct tu_descriptor_set_layout));
164
165 uint32_t buffer_count = 0;
166 uint32_t dynamic_offset_count = 0;
167
168 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
169 const VkDescriptorSetLayoutBinding *binding = bindings + j;
170 uint32_t b = binding->binding;
171 uint32_t alignment = 4;
172 unsigned binding_buffer_count = 1;
173
174 switch (binding->descriptorType) {
175 case VK_DESCRIPTOR_TYPE_SAMPLER:
176 binding_buffer_count = 0;
177 break;
178 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
179 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
180 assert(!(pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
181 set_layout->binding[b].dynamic_offset_count = 1;
182 break;
183 default:
184 break;
185 }
186
187 set_layout->size = align(set_layout->size, alignment);
188 set_layout->binding[b].type = binding->descriptorType;
189 set_layout->binding[b].array_size = binding->descriptorCount;
190 set_layout->binding[b].offset = set_layout->size;
191 set_layout->binding[b].buffer_offset = buffer_count;
192 set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
193 set_layout->binding[b].size = descriptor_size(binding->descriptorType);
194
195 if (variable_flags && binding->binding < variable_flags->bindingCount &&
196 (variable_flags->pBindingFlags[binding->binding] &
197 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
198 assert(!binding->pImmutableSamplers); /* Terribly ill defined how
199 many samplers are valid */
200 assert(binding->binding == max_binding);
201
202 set_layout->has_variable_descriptors = true;
203 }
204
205 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
206 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
207 binding->pImmutableSamplers) {
208 set_layout->binding[b].immutable_samplers_offset = samplers_offset;
209 set_layout->has_immutable_samplers = true;
210
211 for (uint32_t i = 0; i < binding->descriptorCount; i++)
212 samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);
213
214 samplers += binding->descriptorCount;
215 samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;
216 }
217
218 set_layout->size +=
219 binding->descriptorCount * set_layout->binding[b].size;
220 buffer_count += binding->descriptorCount * binding_buffer_count;
221 dynamic_offset_count += binding->descriptorCount *
222 set_layout->binding[b].dynamic_offset_count;
223 set_layout->shader_stages |= binding->stageFlags;
224 }
225
226 free(bindings);
227
228 set_layout->buffer_count = buffer_count;
229 set_layout->dynamic_offset_count = dynamic_offset_count;
230
231 *pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);
232
233 return VK_SUCCESS;
234 }
235
236 void
237 tu_DestroyDescriptorSetLayout(VkDevice _device,
238 VkDescriptorSetLayout _set_layout,
239 const VkAllocationCallbacks *pAllocator)
240 {
241 TU_FROM_HANDLE(tu_device, device, _device);
242 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);
243
244 if (!set_layout)
245 return;
246
247 vk_free2(&device->alloc, pAllocator, set_layout);
248 }
249
250 void
251 tu_GetDescriptorSetLayoutSupport(
252 VkDevice device,
253 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
254 VkDescriptorSetLayoutSupport *pSupport)
255 {
256 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
257 pCreateInfo->pBindings, pCreateInfo->bindingCount);
258 if (!bindings) {
259 pSupport->supported = false;
260 return;
261 }
262
263 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
264 vk_find_struct_const(
265 pCreateInfo->pNext,
266 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
267 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =
268 vk_find_struct(
269 (void *) pCreateInfo->pNext,
270 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);
271 if (variable_count) {
272 variable_count->maxVariableDescriptorCount = 0;
273 }
274
275 bool supported = true;
276 uint64_t size = 0;
277 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
278 const VkDescriptorSetLayoutBinding *binding = bindings + i;
279
280 uint64_t descriptor_sz = descriptor_size(binding->descriptorType);
281 uint64_t descriptor_alignment = 8;
282
283 if (size && !align_u64(size, descriptor_alignment)) {
284 supported = false;
285 }
286 size = align_u64(size, descriptor_alignment);
287
288 uint64_t max_count = UINT64_MAX;
289 if (descriptor_sz)
290 max_count = (UINT64_MAX - size) / descriptor_sz;
291
292 if (max_count < binding->descriptorCount) {
293 supported = false;
294 }
295 if (variable_flags && binding->binding < variable_flags->bindingCount &&
296 variable_count &&
297 (variable_flags->pBindingFlags[binding->binding] &
298 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
299 variable_count->maxVariableDescriptorCount =
300 MIN2(UINT32_MAX, max_count);
301 }
302 size += binding->descriptorCount * descriptor_sz;
303 }
304
305 free(bindings);
306
307 pSupport->supported = supported;
308 }
309
310 /*
311 * Pipeline layouts. These have nothing to do with the pipeline. They are
312 * just multiple descriptor set layouts pasted together.
313 */
314
315 VkResult
316 tu_CreatePipelineLayout(VkDevice _device,
317 const VkPipelineLayoutCreateInfo *pCreateInfo,
318 const VkAllocationCallbacks *pAllocator,
319 VkPipelineLayout *pPipelineLayout)
320 {
321 TU_FROM_HANDLE(tu_device, device, _device);
322 struct tu_pipeline_layout *layout;
323 struct mesa_sha1 ctx;
324
325 assert(pCreateInfo->sType ==
326 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
327
328 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
329 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
330 if (layout == NULL)
331 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
332
333 layout->num_sets = pCreateInfo->setLayoutCount;
334
335 unsigned dynamic_offset_count = 0;
336
337 _mesa_sha1_init(&ctx);
338 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
339 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
340 pCreateInfo->pSetLayouts[set]);
341 layout->set[set].layout = set_layout;
342
343 layout->set[set].dynamic_offset_start = dynamic_offset_count;
344 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
345 dynamic_offset_count += set_layout->binding[b].array_size *
346 set_layout->binding[b].dynamic_offset_count;
347 if (set_layout->binding[b].immutable_samplers_offset)
348 _mesa_sha1_update(
349 &ctx,
350 tu_immutable_samplers(set_layout, set_layout->binding + b),
351 set_layout->binding[b].array_size * 4 * sizeof(uint32_t));
352 }
353 _mesa_sha1_update(
354 &ctx, set_layout->binding,
355 sizeof(set_layout->binding[0]) * set_layout->binding_count);
356 }
357
358 layout->dynamic_offset_count = dynamic_offset_count;
359 layout->push_constant_size = 0;
360
361 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
362 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
363 layout->push_constant_size =
364 MAX2(layout->push_constant_size, range->offset + range->size);
365 }
366
367 layout->push_constant_size = align(layout->push_constant_size, 16);
368 _mesa_sha1_update(&ctx, &layout->push_constant_size,
369 sizeof(layout->push_constant_size));
370 _mesa_sha1_final(&ctx, layout->sha1);
371 *pPipelineLayout = tu_pipeline_layout_to_handle(layout);
372
373 return VK_SUCCESS;
374 }
375
376 void
377 tu_DestroyPipelineLayout(VkDevice _device,
378 VkPipelineLayout _pipelineLayout,
379 const VkAllocationCallbacks *pAllocator)
380 {
381 TU_FROM_HANDLE(tu_device, device, _device);
382 TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);
383
384 if (!pipeline_layout)
385 return;
386 vk_free2(&device->alloc, pAllocator, pipeline_layout);
387 }
388
389 #define EMPTY 1
390
391 static VkResult
392 tu_descriptor_set_create(struct tu_device *device,
393 struct tu_descriptor_pool *pool,
394 const struct tu_descriptor_set_layout *layout,
395 const uint32_t *variable_count,
396 struct tu_descriptor_set **out_set)
397 {
398 struct tu_descriptor_set *set;
399 uint32_t buffer_count = layout->buffer_count;
400 if (variable_count) {
401 unsigned stride = 1;
402 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_SAMPLER ||
403 layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
404 stride = 0;
405 buffer_count = layout->binding[layout->binding_count - 1].buffer_offset +
406 *variable_count * stride;
407 }
408 unsigned range_offset = sizeof(struct tu_descriptor_set) +
409 sizeof(struct tu_bo *) * buffer_count;
410 unsigned mem_size = range_offset +
411 sizeof(struct tu_descriptor_range) * layout->dynamic_offset_count;
412
413 if (pool->host_memory_base) {
414 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
415 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
416
417 set = (struct tu_descriptor_set*)pool->host_memory_ptr;
418 pool->host_memory_ptr += mem_size;
419 } else {
420 set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
421 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
422
423 if (!set)
424 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
425 }
426
427 memset(set, 0, mem_size);
428
429 if (layout->dynamic_offset_count) {
430 set->dynamic_descriptors = (struct tu_descriptor_range*)((uint8_t*)set + range_offset);
431 }
432
433 set->layout = layout;
434 uint32_t layout_size = layout->size;
435 if (variable_count) {
436 assert(layout->has_variable_descriptors);
437 uint32_t stride = layout->binding[layout->binding_count - 1].size;
438 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
439 stride = 1;
440
441 layout_size = layout->binding[layout->binding_count - 1].offset +
442 *variable_count * stride;
443 }
444
445 if (layout_size) {
446 set->size = layout_size;
447
448 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
449 vk_free2(&device->alloc, NULL, set);
450 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
451 }
452
453 /* try to allocate linearly first, so that we don't spend
454 * time looking for gaps if the app only allocates &
455 * resets via the pool. */
456 if (pool->current_offset + layout_size <= pool->size) {
457 set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);
458 set->va = pool->bo.iova + pool->current_offset;
459 if (!pool->host_memory_base) {
460 pool->entries[pool->entry_count].offset = pool->current_offset;
461 pool->entries[pool->entry_count].size = layout_size;
462 pool->entries[pool->entry_count].set = set;
463 pool->entry_count++;
464 }
465 pool->current_offset += layout_size;
466 } else if (!pool->host_memory_base) {
467 uint64_t offset = 0;
468 int index;
469
470 for (index = 0; index < pool->entry_count; ++index) {
471 if (pool->entries[index].offset - offset >= layout_size)
472 break;
473 offset = pool->entries[index].offset + pool->entries[index].size;
474 }
475
476 if (pool->size - offset < layout_size) {
477 vk_free2(&device->alloc, NULL, set);
478 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
479 }
480
481 set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);
482 set->va = pool->bo.iova + offset;
483 memmove(&pool->entries[index + 1], &pool->entries[index],
484 sizeof(pool->entries[0]) * (pool->entry_count - index));
485 pool->entries[index].offset = offset;
486 pool->entries[index].size = layout_size;
487 pool->entries[index].set = set;
488 pool->entry_count++;
489 } else
490 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
491 }
492
493 *out_set = set;
494 return VK_SUCCESS;
495 }
496
497 static void
498 tu_descriptor_set_destroy(struct tu_device *device,
499 struct tu_descriptor_pool *pool,
500 struct tu_descriptor_set *set,
501 bool free_bo)
502 {
503 assert(!pool->host_memory_base);
504
505 if (free_bo && set->size && !pool->host_memory_base) {
506 uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;
507 for (int i = 0; i < pool->entry_count; ++i) {
508 if (pool->entries[i].offset == offset) {
509 memmove(&pool->entries[i], &pool->entries[i+1],
510 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
511 --pool->entry_count;
512 break;
513 }
514 }
515 }
516 vk_free2(&device->alloc, NULL, set);
517 }
518
519 VkResult
520 tu_CreateDescriptorPool(VkDevice _device,
521 const VkDescriptorPoolCreateInfo *pCreateInfo,
522 const VkAllocationCallbacks *pAllocator,
523 VkDescriptorPool *pDescriptorPool)
524 {
525 TU_FROM_HANDLE(tu_device, device, _device);
526 struct tu_descriptor_pool *pool;
527 uint64_t size = sizeof(struct tu_descriptor_pool);
528 uint64_t bo_size = 0, bo_count = 0, range_count = 0;
529
530 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
531 if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
532 bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
533
534 switch(pCreateInfo->pPoolSizes[i].type) {
535 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
536 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
537 range_count += pCreateInfo->pPoolSizes[i].descriptorCount;
538 default:
539 break;
540 }
541
542 bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *
543 pCreateInfo->pPoolSizes[i].descriptorCount;
544 }
545
546 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
547 uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);
548 host_size += sizeof(struct tu_bo*) * bo_count;
549 host_size += sizeof(struct tu_descriptor_range) * range_count;
550 size += host_size;
551 } else {
552 size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;
553 }
554
555 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
556 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
557 if (!pool)
558 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
559
560 memset(pool, 0, sizeof(*pool));
561
562 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
563 pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);
564 pool->host_memory_ptr = pool->host_memory_base;
565 pool->host_memory_end = (uint8_t*)pool + size;
566 }
567
568 if (bo_size) {
569 VkResult ret;
570
571 ret = tu_bo_init_new(device, &pool->bo, bo_size);
572 assert(ret == VK_SUCCESS);
573
574 ret = tu_bo_map(device, &pool->bo);
575 assert(ret == VK_SUCCESS);
576 }
577 pool->size = bo_size;
578 pool->max_entry_count = pCreateInfo->maxSets;
579
580 *pDescriptorPool = tu_descriptor_pool_to_handle(pool);
581 return VK_SUCCESS;
582 }
583
584 void
585 tu_DestroyDescriptorPool(VkDevice _device,
586 VkDescriptorPool _pool,
587 const VkAllocationCallbacks *pAllocator)
588 {
589 TU_FROM_HANDLE(tu_device, device, _device);
590 TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);
591
592 if (!pool)
593 return;
594
595 if (!pool->host_memory_base) {
596 for(int i = 0; i < pool->entry_count; ++i) {
597 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
598 }
599 }
600
601 if (pool->size)
602 tu_bo_finish(device, &pool->bo);
603 vk_free2(&device->alloc, pAllocator, pool);
604 }
605
606 VkResult
607 tu_ResetDescriptorPool(VkDevice _device,
608 VkDescriptorPool descriptorPool,
609 VkDescriptorPoolResetFlags flags)
610 {
611 TU_FROM_HANDLE(tu_device, device, _device);
612 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
613
614 if (!pool->host_memory_base) {
615 for(int i = 0; i < pool->entry_count; ++i) {
616 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
617 }
618 pool->entry_count = 0;
619 }
620
621 pool->current_offset = 0;
622 pool->host_memory_ptr = pool->host_memory_base;
623
624 return VK_SUCCESS;
625 }
626
627 VkResult
628 tu_AllocateDescriptorSets(VkDevice _device,
629 const VkDescriptorSetAllocateInfo *pAllocateInfo,
630 VkDescriptorSet *pDescriptorSets)
631 {
632 TU_FROM_HANDLE(tu_device, device, _device);
633 TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);
634
635 VkResult result = VK_SUCCESS;
636 uint32_t i;
637 struct tu_descriptor_set *set = NULL;
638
639 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =
640 vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);
641 const uint32_t zero = 0;
642
643 /* allocate a set of buffers for each shader to contain descriptors */
644 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
645 TU_FROM_HANDLE(tu_descriptor_set_layout, layout,
646 pAllocateInfo->pSetLayouts[i]);
647
648 const uint32_t *variable_count = NULL;
649 if (variable_counts) {
650 if (i < variable_counts->descriptorSetCount)
651 variable_count = variable_counts->pDescriptorCounts + i;
652 else
653 variable_count = &zero;
654 }
655
656 assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
657
658 result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);
659 if (result != VK_SUCCESS)
660 break;
661
662 pDescriptorSets[i] = tu_descriptor_set_to_handle(set);
663 }
664
665 if (result != VK_SUCCESS) {
666 tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
667 i, pDescriptorSets);
668 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
669 pDescriptorSets[i] = VK_NULL_HANDLE;
670 }
671 }
672 return result;
673 }
674
675 VkResult
676 tu_FreeDescriptorSets(VkDevice _device,
677 VkDescriptorPool descriptorPool,
678 uint32_t count,
679 const VkDescriptorSet *pDescriptorSets)
680 {
681 TU_FROM_HANDLE(tu_device, device, _device);
682 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
683
684 for (uint32_t i = 0; i < count; i++) {
685 TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);
686
687 if (set && !pool->host_memory_base)
688 tu_descriptor_set_destroy(device, pool, set, true);
689 }
690 return VK_SUCCESS;
691 }
692
693 static void write_texel_buffer_descriptor(struct tu_device *device,
694 struct tu_cmd_buffer *cmd_buffer,
695 unsigned *dst,
696 struct tu_bo **buffer_list,
697 const VkBufferView _buffer_view)
698 {
699 tu_finishme("texel buffer descriptor");
700 }
701
702 static void write_buffer_descriptor(struct tu_device *device,
703 struct tu_cmd_buffer *cmd_buffer,
704 unsigned *dst,
705 struct tu_bo **buffer_list,
706 const VkDescriptorBufferInfo *buffer_info)
707 {
708 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
709
710 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
711 dst[0] = va;
712 dst[1] = va >> 32;
713
714 if (cmd_buffer)
715 tu_bo_list_add(&cmd_buffer->bo_list, buffer->bo, MSM_SUBMIT_BO_READ);
716 else
717 *buffer_list = buffer->bo;
718 }
719
720 static void write_dynamic_buffer_descriptor(struct tu_device *device,
721 struct tu_descriptor_range *range,
722 struct tu_bo **buffer_list,
723 const VkDescriptorBufferInfo *buffer_info)
724 {
725 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
726 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
727 unsigned size = buffer_info->range;
728
729 if (buffer_info->range == VK_WHOLE_SIZE)
730 size = buffer->size - buffer_info->offset;
731
732 range->va = va;
733 range->size = size;
734
735 *buffer_list = buffer->bo;
736 }
737
738 static void
739 write_image_descriptor(struct tu_device *device,
740 struct tu_cmd_buffer *cmd_buffer,
741 unsigned *dst,
742 struct tu_bo **buffer_list,
743 VkDescriptorType descriptor_type,
744 const VkDescriptorImageInfo *image_info)
745 {
746 TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);
747 uint32_t *descriptor;
748
749 if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
750 descriptor = iview->storage_descriptor;
751 } else {
752 descriptor = iview->descriptor;
753 }
754
755 memcpy(dst, descriptor, sizeof(iview->descriptor));
756
757 if (cmd_buffer)
758 tu_bo_list_add(&cmd_buffer->bo_list, iview->image->bo, MSM_SUBMIT_BO_READ);
759 else
760 *buffer_list = iview->image->bo;
761 }
762
763 static void
764 write_combined_image_sampler_descriptor(struct tu_device *device,
765 struct tu_cmd_buffer *cmd_buffer,
766 unsigned sampler_offset,
767 unsigned *dst,
768 struct tu_bo **buffer_list,
769 VkDescriptorType descriptor_type,
770 const VkDescriptorImageInfo *image_info,
771 bool has_sampler)
772 {
773 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
774
775 write_image_descriptor(device, cmd_buffer, dst, buffer_list,
776 descriptor_type, image_info);
777 /* copy over sampler state */
778 if (has_sampler) {
779 memcpy(dst + sampler_offset / sizeof(*dst), sampler, sizeof(*sampler));
780 }
781 }
782
783 static void
784 write_sampler_descriptor(struct tu_device *device,
785 unsigned *dst,
786 const VkDescriptorImageInfo *image_info)
787 {
788 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
789
790 memcpy(dst, sampler, sizeof(*sampler));
791 }
792
793 void
794 tu_update_descriptor_sets(struct tu_device *device,
795 struct tu_cmd_buffer *cmd_buffer,
796 VkDescriptorSet dstSetOverride,
797 uint32_t descriptorWriteCount,
798 const VkWriteDescriptorSet *pDescriptorWrites,
799 uint32_t descriptorCopyCount,
800 const VkCopyDescriptorSet *pDescriptorCopies)
801 {
802 uint32_t i, j;
803 for (i = 0; i < descriptorWriteCount; i++) {
804 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
805 TU_FROM_HANDLE(tu_descriptor_set, set,
806 dstSetOverride ? dstSetOverride : writeset->dstSet);
807 const struct tu_descriptor_set_binding_layout *binding_layout =
808 set->layout->binding + writeset->dstBinding;
809 uint32_t *ptr = set->mapped_ptr;
810 struct tu_bo **buffer_list = set->descriptors;
811
812 ptr += binding_layout->offset / 4;
813
814 ptr += binding_layout->size * writeset->dstArrayElement / 4;
815 buffer_list += binding_layout->buffer_offset;
816 buffer_list += writeset->dstArrayElement;
817 for (j = 0; j < writeset->descriptorCount; ++j) {
818 switch(writeset->descriptorType) {
819 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
820 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
821 unsigned idx = writeset->dstArrayElement + j;
822 idx += binding_layout->dynamic_offset_offset;
823 assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
824 write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
825 buffer_list, writeset->pBufferInfo + j);
826 break;
827 }
828
829 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
830 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
831 write_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
832 writeset->pBufferInfo + j);
833 break;
834 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
835 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
836 write_texel_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
837 writeset->pTexelBufferView[j]);
838 break;
839 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
840 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
841 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
842 write_image_descriptor(device, cmd_buffer, ptr, buffer_list,
843 writeset->descriptorType,
844 writeset->pImageInfo + j);
845 break;
846 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
847 write_combined_image_sampler_descriptor(device, cmd_buffer,
848 A6XX_TEX_CONST_DWORDS*4,
849 ptr, buffer_list,
850 writeset->descriptorType,
851 writeset->pImageInfo + j,
852 !binding_layout->immutable_samplers_offset);
853 break;
854 case VK_DESCRIPTOR_TYPE_SAMPLER:
855 write_sampler_descriptor(device, ptr, writeset->pImageInfo + j);
856 break;
857 default:
858 unreachable("unimplemented descriptor type");
859 break;
860 }
861 ptr += binding_layout->size / 4;
862 ++buffer_list;
863 }
864 }
865
866 for (i = 0; i < descriptorCopyCount; i++) {
867 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
868 TU_FROM_HANDLE(tu_descriptor_set, src_set,
869 copyset->srcSet);
870 TU_FROM_HANDLE(tu_descriptor_set, dst_set,
871 copyset->dstSet);
872 const struct tu_descriptor_set_binding_layout *src_binding_layout =
873 src_set->layout->binding + copyset->srcBinding;
874 const struct tu_descriptor_set_binding_layout *dst_binding_layout =
875 dst_set->layout->binding + copyset->dstBinding;
876 uint32_t *src_ptr = src_set->mapped_ptr;
877 uint32_t *dst_ptr = dst_set->mapped_ptr;
878 struct tu_bo **src_buffer_list = src_set->descriptors;
879 struct tu_bo **dst_buffer_list = dst_set->descriptors;
880
881 src_ptr += src_binding_layout->offset / 4;
882 dst_ptr += dst_binding_layout->offset / 4;
883
884 src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
885 dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
886
887 src_buffer_list += src_binding_layout->buffer_offset;
888 src_buffer_list += copyset->srcArrayElement;
889
890 dst_buffer_list += dst_binding_layout->buffer_offset;
891 dst_buffer_list += copyset->dstArrayElement;
892
893 for (j = 0; j < copyset->descriptorCount; ++j) {
894 switch (src_binding_layout->type) {
895 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
896 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
897 unsigned src_idx = copyset->srcArrayElement + j;
898 unsigned dst_idx = copyset->dstArrayElement + j;
899 struct tu_descriptor_range *src_range, *dst_range;
900 src_idx += src_binding_layout->dynamic_offset_offset;
901 dst_idx += dst_binding_layout->dynamic_offset_offset;
902
903 src_range = src_set->dynamic_descriptors + src_idx;
904 dst_range = dst_set->dynamic_descriptors + dst_idx;
905 *dst_range = *src_range;
906 break;
907 }
908 default:
909 memcpy(dst_ptr, src_ptr, src_binding_layout->size);
910 }
911 src_ptr += src_binding_layout->size / 4;
912 dst_ptr += dst_binding_layout->size / 4;
913
914 if (src_binding_layout->type != VK_DESCRIPTOR_TYPE_SAMPLER) {
915 /* Sampler descriptors don't have a buffer list. */
916 dst_buffer_list[j] = src_buffer_list[j];
917 }
918 }
919 }
920 }
921
922 void
923 tu_UpdateDescriptorSets(VkDevice _device,
924 uint32_t descriptorWriteCount,
925 const VkWriteDescriptorSet *pDescriptorWrites,
926 uint32_t descriptorCopyCount,
927 const VkCopyDescriptorSet *pDescriptorCopies)
928 {
929 TU_FROM_HANDLE(tu_device, device, _device);
930
931 tu_update_descriptor_sets(device, NULL, VK_NULL_HANDLE,
932 descriptorWriteCount, pDescriptorWrites,
933 descriptorCopyCount, pDescriptorCopies);
934 }
935
936 VkResult
937 tu_CreateDescriptorUpdateTemplate(
938 VkDevice _device,
939 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
940 const VkAllocationCallbacks *pAllocator,
941 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
942 {
943 TU_FROM_HANDLE(tu_device, device, _device);
944 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
945 pCreateInfo->descriptorSetLayout);
946 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
947 const size_t size =
948 sizeof(struct tu_descriptor_update_template) +
949 sizeof(struct tu_descriptor_update_template_entry) * entry_count;
950 struct tu_descriptor_update_template *templ;
951
952 templ = vk_alloc2(&device->alloc, pAllocator, size, 8,
953 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
954 if (!templ)
955 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
956
957 *pDescriptorUpdateTemplate =
958 tu_descriptor_update_template_to_handle(templ);
959
960 tu_use_args(set_layout);
961 tu_stub();
962 return VK_SUCCESS;
963 }
964
965 void
966 tu_DestroyDescriptorUpdateTemplate(
967 VkDevice _device,
968 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
969 const VkAllocationCallbacks *pAllocator)
970 {
971 TU_FROM_HANDLE(tu_device, device, _device);
972 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
973 descriptorUpdateTemplate);
974
975 if (!templ)
976 return;
977
978 vk_free2(&device->alloc, pAllocator, templ);
979 }
980
981 void
982 tu_update_descriptor_set_with_template(
983 struct tu_device *device,
984 struct tu_cmd_buffer *cmd_buffer,
985 struct tu_descriptor_set *set,
986 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
987 const void *pData)
988 {
989 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
990 descriptorUpdateTemplate);
991 tu_use_args(templ);
992 }
993
994 void
995 tu_UpdateDescriptorSetWithTemplate(
996 VkDevice _device,
997 VkDescriptorSet descriptorSet,
998 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
999 const void *pData)
1000 {
1001 TU_FROM_HANDLE(tu_device, device, _device);
1002 TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);
1003
1004 tu_update_descriptor_set_with_template(device, NULL, set,
1005 descriptorUpdateTemplate, pData);
1006 }
1007
1008 VkResult
1009 tu_CreateSamplerYcbcrConversion(
1010 VkDevice device,
1011 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1012 const VkAllocationCallbacks *pAllocator,
1013 VkSamplerYcbcrConversion *pYcbcrConversion)
1014 {
1015 *pYcbcrConversion = VK_NULL_HANDLE;
1016 return VK_SUCCESS;
1017 }
1018
1019 void
1020 tu_DestroySamplerYcbcrConversion(VkDevice device,
1021 VkSamplerYcbcrConversion ycbcrConversion,
1022 const VkAllocationCallbacks *pAllocator)
1023 {
1024 /* Do nothing. */
1025 }