turnip: Add a helper function for getting tu_buffer iovas.
[mesa.git] / src / freedreno / vulkan / tu_descriptor_set.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24 #include "tu_private.h"
25
26 #include <assert.h>
27 #include <fcntl.h>
28 #include <stdbool.h>
29 #include <string.h>
30 #include <unistd.h>
31
32 #include "util/mesa-sha1.h"
33 #include "vk_util.h"
34
35 static int
36 binding_compare(const void *av, const void *bv)
37 {
38 const VkDescriptorSetLayoutBinding *a =
39 (const VkDescriptorSetLayoutBinding *) av;
40 const VkDescriptorSetLayoutBinding *b =
41 (const VkDescriptorSetLayoutBinding *) bv;
42
43 return (a->binding < b->binding) ? -1 : (a->binding > b->binding) ? 1 : 0;
44 }
45
46 static VkDescriptorSetLayoutBinding *
47 create_sorted_bindings(const VkDescriptorSetLayoutBinding *bindings,
48 unsigned count)
49 {
50 VkDescriptorSetLayoutBinding *sorted_bindings =
51 malloc(count * sizeof(VkDescriptorSetLayoutBinding));
52 if (!sorted_bindings)
53 return NULL;
54
55 memcpy(sorted_bindings, bindings,
56 count * sizeof(VkDescriptorSetLayoutBinding));
57
58 qsort(sorted_bindings, count, sizeof(VkDescriptorSetLayoutBinding),
59 binding_compare);
60
61 return sorted_bindings;
62 }
63
64 static uint32_t
65 descriptor_size(enum VkDescriptorType type)
66 {
67 switch (type) {
68 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
69 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
70 return 0;
71 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
72 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
73 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
74 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
75 /* 64bit pointer */
76 return 8;
77 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
78 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
79 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
80 return A6XX_TEX_CONST_DWORDS*4;
81 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
82 /* texture const + tu_sampler struct (includes border color) */
83 return A6XX_TEX_CONST_DWORDS*4 + sizeof(struct tu_sampler);
84 case VK_DESCRIPTOR_TYPE_SAMPLER:
85 return sizeof(struct tu_sampler);
86 default:
87 unreachable("unknown descriptor type\n");
88 return 0;
89 }
90 }
91
92 VkResult
93 tu_CreateDescriptorSetLayout(
94 VkDevice _device,
95 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
96 const VkAllocationCallbacks *pAllocator,
97 VkDescriptorSetLayout *pSetLayout)
98 {
99 TU_FROM_HANDLE(tu_device, device, _device);
100 struct tu_descriptor_set_layout *set_layout;
101
102 assert(pCreateInfo->sType ==
103 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
104 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
105 vk_find_struct_const(
106 pCreateInfo->pNext,
107 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
108
109 uint32_t max_binding = 0;
110 uint32_t immutable_sampler_count = 0;
111 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
112 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
113 if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
114 pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
115 pCreateInfo->pBindings[j].pImmutableSamplers) {
116 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
117 }
118 }
119
120 uint32_t samplers_offset = sizeof(struct tu_descriptor_set_layout) +
121 (max_binding + 1) * sizeof(set_layout->binding[0]);
122 uint32_t size = samplers_offset + immutable_sampler_count * sizeof(struct tu_sampler);
123
124 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
125 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
126 if (!set_layout)
127 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
128
129 set_layout->flags = pCreateInfo->flags;
130
131 /* We just allocate all the samplers at the end of the struct */
132 struct tu_sampler *samplers = (void*) &set_layout->binding[max_binding + 1];
133
134 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
135 pCreateInfo->pBindings, pCreateInfo->bindingCount);
136 if (!bindings) {
137 vk_free2(&device->alloc, pAllocator, set_layout);
138 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
139 }
140
141 set_layout->binding_count = max_binding + 1;
142 set_layout->shader_stages = 0;
143 set_layout->dynamic_shader_stages = 0;
144 set_layout->has_immutable_samplers = false;
145 set_layout->size = 0;
146
147 memset(set_layout->binding, 0,
148 size - sizeof(struct tu_descriptor_set_layout));
149
150 uint32_t buffer_count = 0;
151 uint32_t dynamic_offset_count = 0;
152
153 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
154 const VkDescriptorSetLayoutBinding *binding = bindings + j;
155 uint32_t b = binding->binding;
156 uint32_t alignment = 4;
157 unsigned binding_buffer_count = 1;
158
159 switch (binding->descriptorType) {
160 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
161 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
162 assert(!(pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
163 set_layout->binding[b].dynamic_offset_count = 1;
164 break;
165 default:
166 break;
167 }
168
169 set_layout->size = align(set_layout->size, alignment);
170 set_layout->binding[b].type = binding->descriptorType;
171 set_layout->binding[b].array_size = binding->descriptorCount;
172 set_layout->binding[b].offset = set_layout->size;
173 set_layout->binding[b].buffer_offset = buffer_count;
174 set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
175 set_layout->binding[b].size = descriptor_size(binding->descriptorType);
176
177 if (variable_flags && binding->binding < variable_flags->bindingCount &&
178 (variable_flags->pBindingFlags[binding->binding] &
179 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
180 assert(!binding->pImmutableSamplers); /* Terribly ill defined how
181 many samplers are valid */
182 assert(binding->binding == max_binding);
183
184 set_layout->has_variable_descriptors = true;
185 }
186
187 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
188 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
189 binding->pImmutableSamplers) {
190 set_layout->binding[b].immutable_samplers_offset = samplers_offset;
191 set_layout->has_immutable_samplers = true;
192
193 for (uint32_t i = 0; i < binding->descriptorCount; i++)
194 samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);
195
196 samplers += binding->descriptorCount;
197 samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;
198 }
199
200 set_layout->size +=
201 binding->descriptorCount * set_layout->binding[b].size;
202 buffer_count += binding->descriptorCount * binding_buffer_count;
203 dynamic_offset_count += binding->descriptorCount *
204 set_layout->binding[b].dynamic_offset_count;
205 set_layout->shader_stages |= binding->stageFlags;
206 }
207
208 free(bindings);
209
210 set_layout->buffer_count = buffer_count;
211 set_layout->dynamic_offset_count = dynamic_offset_count;
212
213 *pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);
214
215 return VK_SUCCESS;
216 }
217
218 void
219 tu_DestroyDescriptorSetLayout(VkDevice _device,
220 VkDescriptorSetLayout _set_layout,
221 const VkAllocationCallbacks *pAllocator)
222 {
223 TU_FROM_HANDLE(tu_device, device, _device);
224 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);
225
226 if (!set_layout)
227 return;
228
229 vk_free2(&device->alloc, pAllocator, set_layout);
230 }
231
232 void
233 tu_GetDescriptorSetLayoutSupport(
234 VkDevice device,
235 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
236 VkDescriptorSetLayoutSupport *pSupport)
237 {
238 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
239 pCreateInfo->pBindings, pCreateInfo->bindingCount);
240 if (!bindings) {
241 pSupport->supported = false;
242 return;
243 }
244
245 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
246 vk_find_struct_const(
247 pCreateInfo->pNext,
248 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
249 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =
250 vk_find_struct(
251 (void *) pCreateInfo->pNext,
252 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);
253 if (variable_count) {
254 variable_count->maxVariableDescriptorCount = 0;
255 }
256
257 bool supported = true;
258 uint64_t size = 0;
259 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
260 const VkDescriptorSetLayoutBinding *binding = bindings + i;
261
262 uint64_t descriptor_sz = descriptor_size(binding->descriptorType);
263 uint64_t descriptor_alignment = 8;
264
265 if (size && !align_u64(size, descriptor_alignment)) {
266 supported = false;
267 }
268 size = align_u64(size, descriptor_alignment);
269
270 uint64_t max_count = UINT64_MAX;
271 if (descriptor_sz)
272 max_count = (UINT64_MAX - size) / descriptor_sz;
273
274 if (max_count < binding->descriptorCount) {
275 supported = false;
276 }
277 if (variable_flags && binding->binding < variable_flags->bindingCount &&
278 variable_count &&
279 (variable_flags->pBindingFlags[binding->binding] &
280 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
281 variable_count->maxVariableDescriptorCount =
282 MIN2(UINT32_MAX, max_count);
283 }
284 size += binding->descriptorCount * descriptor_sz;
285 }
286
287 free(bindings);
288
289 pSupport->supported = supported;
290 }
291
292 /*
293 * Pipeline layouts. These have nothing to do with the pipeline. They are
294 * just multiple descriptor set layouts pasted together.
295 */
296
297 VkResult
298 tu_CreatePipelineLayout(VkDevice _device,
299 const VkPipelineLayoutCreateInfo *pCreateInfo,
300 const VkAllocationCallbacks *pAllocator,
301 VkPipelineLayout *pPipelineLayout)
302 {
303 TU_FROM_HANDLE(tu_device, device, _device);
304 struct tu_pipeline_layout *layout;
305 struct mesa_sha1 ctx;
306
307 assert(pCreateInfo->sType ==
308 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
309
310 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
311 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
312 if (layout == NULL)
313 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
314
315 layout->num_sets = pCreateInfo->setLayoutCount;
316
317 unsigned dynamic_offset_count = 0;
318
319 _mesa_sha1_init(&ctx);
320 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
321 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
322 pCreateInfo->pSetLayouts[set]);
323 layout->set[set].layout = set_layout;
324
325 layout->set[set].dynamic_offset_start = dynamic_offset_count;
326 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
327 dynamic_offset_count += set_layout->binding[b].array_size *
328 set_layout->binding[b].dynamic_offset_count;
329 if (set_layout->binding[b].immutable_samplers_offset)
330 _mesa_sha1_update(
331 &ctx,
332 tu_immutable_samplers(set_layout, set_layout->binding + b),
333 set_layout->binding[b].array_size * 4 * sizeof(uint32_t));
334 }
335 _mesa_sha1_update(
336 &ctx, set_layout->binding,
337 sizeof(set_layout->binding[0]) * set_layout->binding_count);
338 }
339
340 layout->dynamic_offset_count = dynamic_offset_count;
341 layout->push_constant_size = 0;
342
343 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
344 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
345 layout->push_constant_size =
346 MAX2(layout->push_constant_size, range->offset + range->size);
347 }
348
349 layout->push_constant_size = align(layout->push_constant_size, 16);
350 _mesa_sha1_update(&ctx, &layout->push_constant_size,
351 sizeof(layout->push_constant_size));
352 _mesa_sha1_final(&ctx, layout->sha1);
353 *pPipelineLayout = tu_pipeline_layout_to_handle(layout);
354
355 return VK_SUCCESS;
356 }
357
358 void
359 tu_DestroyPipelineLayout(VkDevice _device,
360 VkPipelineLayout _pipelineLayout,
361 const VkAllocationCallbacks *pAllocator)
362 {
363 TU_FROM_HANDLE(tu_device, device, _device);
364 TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);
365
366 if (!pipeline_layout)
367 return;
368 vk_free2(&device->alloc, pAllocator, pipeline_layout);
369 }
370
371 #define EMPTY 1
372
373 static VkResult
374 tu_descriptor_set_create(struct tu_device *device,
375 struct tu_descriptor_pool *pool,
376 const struct tu_descriptor_set_layout *layout,
377 const uint32_t *variable_count,
378 struct tu_descriptor_set **out_set)
379 {
380 struct tu_descriptor_set *set;
381 uint32_t buffer_count = layout->buffer_count;
382 if (variable_count) {
383 unsigned stride = 1;
384 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_SAMPLER ||
385 layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
386 stride = 0;
387 buffer_count = layout->binding[layout->binding_count - 1].buffer_offset +
388 *variable_count * stride;
389 }
390 unsigned range_offset = sizeof(struct tu_descriptor_set) +
391 sizeof(struct tu_bo *) * buffer_count;
392 unsigned mem_size = range_offset +
393 sizeof(struct tu_descriptor_range) * layout->dynamic_offset_count;
394
395 if (pool->host_memory_base) {
396 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
397 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
398
399 set = (struct tu_descriptor_set*)pool->host_memory_ptr;
400 pool->host_memory_ptr += mem_size;
401 } else {
402 set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
403 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
404
405 if (!set)
406 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
407 }
408
409 memset(set, 0, mem_size);
410
411 if (layout->dynamic_offset_count) {
412 set->dynamic_descriptors = (struct tu_descriptor_range*)((uint8_t*)set + range_offset);
413 }
414
415 set->layout = layout;
416 uint32_t layout_size = layout->size;
417 if (variable_count) {
418 assert(layout->has_variable_descriptors);
419 uint32_t stride = layout->binding[layout->binding_count - 1].size;
420 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
421 stride = 1;
422
423 layout_size = layout->binding[layout->binding_count - 1].offset +
424 *variable_count * stride;
425 }
426
427 if (layout_size) {
428 set->size = layout_size;
429
430 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
431 vk_free2(&device->alloc, NULL, set);
432 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
433 }
434
435 /* try to allocate linearly first, so that we don't spend
436 * time looking for gaps if the app only allocates &
437 * resets via the pool. */
438 if (pool->current_offset + layout_size <= pool->size) {
439 set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);
440 set->va = pool->bo.iova + pool->current_offset;
441 if (!pool->host_memory_base) {
442 pool->entries[pool->entry_count].offset = pool->current_offset;
443 pool->entries[pool->entry_count].size = layout_size;
444 pool->entries[pool->entry_count].set = set;
445 pool->entry_count++;
446 }
447 pool->current_offset += layout_size;
448 } else if (!pool->host_memory_base) {
449 uint64_t offset = 0;
450 int index;
451
452 for (index = 0; index < pool->entry_count; ++index) {
453 if (pool->entries[index].offset - offset >= layout_size)
454 break;
455 offset = pool->entries[index].offset + pool->entries[index].size;
456 }
457
458 if (pool->size - offset < layout_size) {
459 vk_free2(&device->alloc, NULL, set);
460 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
461 }
462
463 set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);
464 set->va = pool->bo.iova + offset;
465 memmove(&pool->entries[index + 1], &pool->entries[index],
466 sizeof(pool->entries[0]) * (pool->entry_count - index));
467 pool->entries[index].offset = offset;
468 pool->entries[index].size = layout_size;
469 pool->entries[index].set = set;
470 pool->entry_count++;
471 } else
472 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
473 }
474
475 *out_set = set;
476 return VK_SUCCESS;
477 }
478
479 static void
480 tu_descriptor_set_destroy(struct tu_device *device,
481 struct tu_descriptor_pool *pool,
482 struct tu_descriptor_set *set,
483 bool free_bo)
484 {
485 assert(!pool->host_memory_base);
486
487 if (free_bo && set->size && !pool->host_memory_base) {
488 uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;
489 for (int i = 0; i < pool->entry_count; ++i) {
490 if (pool->entries[i].offset == offset) {
491 memmove(&pool->entries[i], &pool->entries[i+1],
492 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
493 --pool->entry_count;
494 break;
495 }
496 }
497 }
498 vk_free2(&device->alloc, NULL, set);
499 }
500
501 VkResult
502 tu_CreateDescriptorPool(VkDevice _device,
503 const VkDescriptorPoolCreateInfo *pCreateInfo,
504 const VkAllocationCallbacks *pAllocator,
505 VkDescriptorPool *pDescriptorPool)
506 {
507 TU_FROM_HANDLE(tu_device, device, _device);
508 struct tu_descriptor_pool *pool;
509 uint64_t size = sizeof(struct tu_descriptor_pool);
510 uint64_t bo_size = 0, bo_count = 0, range_count = 0;
511
512 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
513 if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
514 bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
515
516 switch(pCreateInfo->pPoolSizes[i].type) {
517 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
518 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
519 range_count += pCreateInfo->pPoolSizes[i].descriptorCount;
520 default:
521 break;
522 }
523
524 bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *
525 pCreateInfo->pPoolSizes[i].descriptorCount;
526 }
527
528 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
529 uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);
530 host_size += sizeof(struct tu_bo*) * bo_count;
531 host_size += sizeof(struct tu_descriptor_range) * range_count;
532 size += host_size;
533 } else {
534 size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;
535 }
536
537 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
538 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
539 if (!pool)
540 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
541
542 memset(pool, 0, sizeof(*pool));
543
544 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
545 pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);
546 pool->host_memory_ptr = pool->host_memory_base;
547 pool->host_memory_end = (uint8_t*)pool + size;
548 }
549
550 if (bo_size) {
551 VkResult ret;
552
553 ret = tu_bo_init_new(device, &pool->bo, bo_size);
554 assert(ret == VK_SUCCESS);
555
556 ret = tu_bo_map(device, &pool->bo);
557 assert(ret == VK_SUCCESS);
558 }
559 pool->size = bo_size;
560 pool->max_entry_count = pCreateInfo->maxSets;
561
562 *pDescriptorPool = tu_descriptor_pool_to_handle(pool);
563 return VK_SUCCESS;
564 }
565
566 void
567 tu_DestroyDescriptorPool(VkDevice _device,
568 VkDescriptorPool _pool,
569 const VkAllocationCallbacks *pAllocator)
570 {
571 TU_FROM_HANDLE(tu_device, device, _device);
572 TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);
573
574 if (!pool)
575 return;
576
577 if (!pool->host_memory_base) {
578 for(int i = 0; i < pool->entry_count; ++i) {
579 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
580 }
581 }
582
583 if (pool->size)
584 tu_bo_finish(device, &pool->bo);
585 vk_free2(&device->alloc, pAllocator, pool);
586 }
587
588 VkResult
589 tu_ResetDescriptorPool(VkDevice _device,
590 VkDescriptorPool descriptorPool,
591 VkDescriptorPoolResetFlags flags)
592 {
593 TU_FROM_HANDLE(tu_device, device, _device);
594 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
595
596 if (!pool->host_memory_base) {
597 for(int i = 0; i < pool->entry_count; ++i) {
598 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
599 }
600 pool->entry_count = 0;
601 }
602
603 pool->current_offset = 0;
604 pool->host_memory_ptr = pool->host_memory_base;
605
606 return VK_SUCCESS;
607 }
608
609 VkResult
610 tu_AllocateDescriptorSets(VkDevice _device,
611 const VkDescriptorSetAllocateInfo *pAllocateInfo,
612 VkDescriptorSet *pDescriptorSets)
613 {
614 TU_FROM_HANDLE(tu_device, device, _device);
615 TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);
616
617 VkResult result = VK_SUCCESS;
618 uint32_t i;
619 struct tu_descriptor_set *set = NULL;
620
621 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =
622 vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);
623 const uint32_t zero = 0;
624
625 /* allocate a set of buffers for each shader to contain descriptors */
626 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
627 TU_FROM_HANDLE(tu_descriptor_set_layout, layout,
628 pAllocateInfo->pSetLayouts[i]);
629
630 const uint32_t *variable_count = NULL;
631 if (variable_counts) {
632 if (i < variable_counts->descriptorSetCount)
633 variable_count = variable_counts->pDescriptorCounts + i;
634 else
635 variable_count = &zero;
636 }
637
638 assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
639
640 result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);
641 if (result != VK_SUCCESS)
642 break;
643
644 pDescriptorSets[i] = tu_descriptor_set_to_handle(set);
645 }
646
647 if (result != VK_SUCCESS) {
648 tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
649 i, pDescriptorSets);
650 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
651 pDescriptorSets[i] = VK_NULL_HANDLE;
652 }
653 }
654 return result;
655 }
656
657 VkResult
658 tu_FreeDescriptorSets(VkDevice _device,
659 VkDescriptorPool descriptorPool,
660 uint32_t count,
661 const VkDescriptorSet *pDescriptorSets)
662 {
663 TU_FROM_HANDLE(tu_device, device, _device);
664 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
665
666 for (uint32_t i = 0; i < count; i++) {
667 TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);
668
669 if (set && !pool->host_memory_base)
670 tu_descriptor_set_destroy(device, pool, set, true);
671 }
672 return VK_SUCCESS;
673 }
674
675 static void write_texel_buffer_descriptor(struct tu_device *device,
676 struct tu_cmd_buffer *cmd_buffer,
677 unsigned *dst,
678 struct tu_bo **buffer_list,
679 const VkBufferView _buffer_view)
680 {
681 tu_finishme("texel buffer descriptor");
682 }
683
684 static void write_buffer_descriptor(struct tu_device *device,
685 struct tu_cmd_buffer *cmd_buffer,
686 unsigned *dst,
687 struct tu_bo **buffer_list,
688 const VkDescriptorBufferInfo *buffer_info)
689 {
690 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
691
692 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
693 dst[0] = va;
694 dst[1] = va >> 32;
695
696 if (cmd_buffer)
697 tu_bo_list_add(&cmd_buffer->bo_list, buffer->bo, MSM_SUBMIT_BO_READ);
698 else
699 *buffer_list = buffer->bo;
700 }
701
702 static void write_dynamic_buffer_descriptor(struct tu_device *device,
703 struct tu_descriptor_range *range,
704 struct tu_bo **buffer_list,
705 const VkDescriptorBufferInfo *buffer_info)
706 {
707 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
708 uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
709 unsigned size = buffer_info->range;
710
711 if (buffer_info->range == VK_WHOLE_SIZE)
712 size = buffer->size - buffer_info->offset;
713
714 range->va = va;
715 range->size = size;
716
717 *buffer_list = buffer->bo;
718 }
719
720 static void
721 write_image_descriptor(struct tu_device *device,
722 struct tu_cmd_buffer *cmd_buffer,
723 unsigned *dst,
724 struct tu_bo **buffer_list,
725 VkDescriptorType descriptor_type,
726 const VkDescriptorImageInfo *image_info)
727 {
728 TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);
729 uint32_t *descriptor;
730
731 if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
732 descriptor = iview->storage_descriptor;
733 } else {
734 descriptor = iview->descriptor;
735 }
736
737 memcpy(dst, descriptor, sizeof(iview->descriptor));
738
739 if (cmd_buffer)
740 tu_bo_list_add(&cmd_buffer->bo_list, iview->image->bo, MSM_SUBMIT_BO_READ);
741 else
742 *buffer_list = iview->image->bo;
743 }
744
745 static void
746 write_combined_image_sampler_descriptor(struct tu_device *device,
747 struct tu_cmd_buffer *cmd_buffer,
748 unsigned sampler_offset,
749 unsigned *dst,
750 struct tu_bo **buffer_list,
751 VkDescriptorType descriptor_type,
752 const VkDescriptorImageInfo *image_info,
753 bool has_sampler)
754 {
755 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
756
757 write_image_descriptor(device, cmd_buffer, dst, buffer_list,
758 descriptor_type, image_info);
759 /* copy over sampler state */
760 if (has_sampler) {
761 memcpy(dst + sampler_offset / sizeof(*dst), sampler, sizeof(*sampler));
762 }
763 }
764
765 static void
766 write_sampler_descriptor(struct tu_device *device,
767 unsigned *dst,
768 const VkDescriptorImageInfo *image_info)
769 {
770 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
771
772 memcpy(dst, sampler, sizeof(*sampler));
773 }
774
775 void
776 tu_update_descriptor_sets(struct tu_device *device,
777 struct tu_cmd_buffer *cmd_buffer,
778 VkDescriptorSet dstSetOverride,
779 uint32_t descriptorWriteCount,
780 const VkWriteDescriptorSet *pDescriptorWrites,
781 uint32_t descriptorCopyCount,
782 const VkCopyDescriptorSet *pDescriptorCopies)
783 {
784 uint32_t i, j;
785 for (i = 0; i < descriptorWriteCount; i++) {
786 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
787 TU_FROM_HANDLE(tu_descriptor_set, set,
788 dstSetOverride ? dstSetOverride : writeset->dstSet);
789 const struct tu_descriptor_set_binding_layout *binding_layout =
790 set->layout->binding + writeset->dstBinding;
791 uint32_t *ptr = set->mapped_ptr;
792 struct tu_bo **buffer_list = set->descriptors;
793
794 const struct tu_sampler *samplers = tu_immutable_samplers(set->layout, binding_layout);
795
796 ptr += binding_layout->offset / 4;
797
798 ptr += binding_layout->size * writeset->dstArrayElement / 4;
799 buffer_list += binding_layout->buffer_offset;
800 buffer_list += writeset->dstArrayElement;
801 for (j = 0; j < writeset->descriptorCount; ++j) {
802 switch(writeset->descriptorType) {
803 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
804 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
805 unsigned idx = writeset->dstArrayElement + j;
806 idx += binding_layout->dynamic_offset_offset;
807 assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
808 write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
809 buffer_list, writeset->pBufferInfo + j);
810 break;
811 }
812
813 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
814 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
815 write_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
816 writeset->pBufferInfo + j);
817 break;
818 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
819 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
820 write_texel_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
821 writeset->pTexelBufferView[j]);
822 break;
823 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
824 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
825 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
826 write_image_descriptor(device, cmd_buffer, ptr, buffer_list,
827 writeset->descriptorType,
828 writeset->pImageInfo + j);
829 break;
830 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
831 write_combined_image_sampler_descriptor(device, cmd_buffer,
832 A6XX_TEX_CONST_DWORDS*4,
833 ptr, buffer_list,
834 writeset->descriptorType,
835 writeset->pImageInfo + j,
836 !binding_layout->immutable_samplers_offset);
837 if (binding_layout->immutable_samplers_offset) {
838 const unsigned idx = writeset->dstArrayElement + j;
839 memcpy((char*)ptr + A6XX_TEX_CONST_DWORDS*4, &samplers[idx],
840 sizeof(struct tu_sampler));
841 }
842 break;
843 case VK_DESCRIPTOR_TYPE_SAMPLER:
844 write_sampler_descriptor(device, ptr, writeset->pImageInfo + j);
845 break;
846 default:
847 unreachable("unimplemented descriptor type");
848 break;
849 }
850 ptr += binding_layout->size / 4;
851 ++buffer_list;
852 }
853 }
854
855 for (i = 0; i < descriptorCopyCount; i++) {
856 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
857 TU_FROM_HANDLE(tu_descriptor_set, src_set,
858 copyset->srcSet);
859 TU_FROM_HANDLE(tu_descriptor_set, dst_set,
860 copyset->dstSet);
861 const struct tu_descriptor_set_binding_layout *src_binding_layout =
862 src_set->layout->binding + copyset->srcBinding;
863 const struct tu_descriptor_set_binding_layout *dst_binding_layout =
864 dst_set->layout->binding + copyset->dstBinding;
865 uint32_t *src_ptr = src_set->mapped_ptr;
866 uint32_t *dst_ptr = dst_set->mapped_ptr;
867 struct tu_bo **src_buffer_list = src_set->descriptors;
868 struct tu_bo **dst_buffer_list = dst_set->descriptors;
869
870 src_ptr += src_binding_layout->offset / 4;
871 dst_ptr += dst_binding_layout->offset / 4;
872
873 src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
874 dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
875
876 src_buffer_list += src_binding_layout->buffer_offset;
877 src_buffer_list += copyset->srcArrayElement;
878
879 dst_buffer_list += dst_binding_layout->buffer_offset;
880 dst_buffer_list += copyset->dstArrayElement;
881
882 for (j = 0; j < copyset->descriptorCount; ++j) {
883 switch (src_binding_layout->type) {
884 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
885 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
886 unsigned src_idx = copyset->srcArrayElement + j;
887 unsigned dst_idx = copyset->dstArrayElement + j;
888 struct tu_descriptor_range *src_range, *dst_range;
889 src_idx += src_binding_layout->dynamic_offset_offset;
890 dst_idx += dst_binding_layout->dynamic_offset_offset;
891
892 src_range = src_set->dynamic_descriptors + src_idx;
893 dst_range = dst_set->dynamic_descriptors + dst_idx;
894 *dst_range = *src_range;
895 break;
896 }
897 default:
898 memcpy(dst_ptr, src_ptr, src_binding_layout->size);
899 }
900 src_ptr += src_binding_layout->size / 4;
901 dst_ptr += dst_binding_layout->size / 4;
902
903 if (src_binding_layout->type != VK_DESCRIPTOR_TYPE_SAMPLER) {
904 /* Sampler descriptors don't have a buffer list. */
905 dst_buffer_list[j] = src_buffer_list[j];
906 }
907 }
908 }
909 }
910
911 void
912 tu_UpdateDescriptorSets(VkDevice _device,
913 uint32_t descriptorWriteCount,
914 const VkWriteDescriptorSet *pDescriptorWrites,
915 uint32_t descriptorCopyCount,
916 const VkCopyDescriptorSet *pDescriptorCopies)
917 {
918 TU_FROM_HANDLE(tu_device, device, _device);
919
920 tu_update_descriptor_sets(device, NULL, VK_NULL_HANDLE,
921 descriptorWriteCount, pDescriptorWrites,
922 descriptorCopyCount, pDescriptorCopies);
923 }
924
925 VkResult
926 tu_CreateDescriptorUpdateTemplate(
927 VkDevice _device,
928 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
929 const VkAllocationCallbacks *pAllocator,
930 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
931 {
932 TU_FROM_HANDLE(tu_device, device, _device);
933 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
934 pCreateInfo->descriptorSetLayout);
935 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
936 const size_t size =
937 sizeof(struct tu_descriptor_update_template) +
938 sizeof(struct tu_descriptor_update_template_entry) * entry_count;
939 struct tu_descriptor_update_template *templ;
940
941 templ = vk_alloc2(&device->alloc, pAllocator, size, 8,
942 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
943 if (!templ)
944 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
945
946 *pDescriptorUpdateTemplate =
947 tu_descriptor_update_template_to_handle(templ);
948
949 tu_use_args(set_layout);
950 tu_stub();
951 return VK_SUCCESS;
952 }
953
954 void
955 tu_DestroyDescriptorUpdateTemplate(
956 VkDevice _device,
957 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
958 const VkAllocationCallbacks *pAllocator)
959 {
960 TU_FROM_HANDLE(tu_device, device, _device);
961 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
962 descriptorUpdateTemplate);
963
964 if (!templ)
965 return;
966
967 vk_free2(&device->alloc, pAllocator, templ);
968 }
969
970 void
971 tu_update_descriptor_set_with_template(
972 struct tu_device *device,
973 struct tu_cmd_buffer *cmd_buffer,
974 struct tu_descriptor_set *set,
975 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
976 const void *pData)
977 {
978 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
979 descriptorUpdateTemplate);
980 tu_use_args(templ);
981 }
982
983 void
984 tu_UpdateDescriptorSetWithTemplate(
985 VkDevice _device,
986 VkDescriptorSet descriptorSet,
987 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
988 const void *pData)
989 {
990 TU_FROM_HANDLE(tu_device, device, _device);
991 TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);
992
993 tu_update_descriptor_set_with_template(device, NULL, set,
994 descriptorUpdateTemplate, pData);
995 }
996
997 VkResult
998 tu_CreateSamplerYcbcrConversion(
999 VkDevice device,
1000 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1001 const VkAllocationCallbacks *pAllocator,
1002 VkSamplerYcbcrConversion *pYcbcrConversion)
1003 {
1004 *pYcbcrConversion = VK_NULL_HANDLE;
1005 return VK_SUCCESS;
1006 }
1007
1008 void
1009 tu_DestroySamplerYcbcrConversion(VkDevice device,
1010 VkSamplerYcbcrConversion ycbcrConversion,
1011 const VkAllocationCallbacks *pAllocator)
1012 {
1013 /* Do nothing. */
1014 }