turnip: Fix unused variable warnings.
[mesa.git] / src / freedreno / vulkan / tu_descriptor_set.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24 #include "tu_private.h"
25
26 #include <assert.h>
27 #include <fcntl.h>
28 #include <stdbool.h>
29 #include <string.h>
30 #include <unistd.h>
31
32 #include "util/mesa-sha1.h"
33 #include "vk_util.h"
34
35 static int
36 binding_compare(const void *av, const void *bv)
37 {
38 const VkDescriptorSetLayoutBinding *a =
39 (const VkDescriptorSetLayoutBinding *) av;
40 const VkDescriptorSetLayoutBinding *b =
41 (const VkDescriptorSetLayoutBinding *) bv;
42
43 return (a->binding < b->binding) ? -1 : (a->binding > b->binding) ? 1 : 0;
44 }
45
46 static VkDescriptorSetLayoutBinding *
47 create_sorted_bindings(const VkDescriptorSetLayoutBinding *bindings,
48 unsigned count)
49 {
50 VkDescriptorSetLayoutBinding *sorted_bindings =
51 malloc(count * sizeof(VkDescriptorSetLayoutBinding));
52 if (!sorted_bindings)
53 return NULL;
54
55 memcpy(sorted_bindings, bindings,
56 count * sizeof(VkDescriptorSetLayoutBinding));
57
58 qsort(sorted_bindings, count, sizeof(VkDescriptorSetLayoutBinding),
59 binding_compare);
60
61 return sorted_bindings;
62 }
63
64 static uint32_t
65 descriptor_size(enum VkDescriptorType type)
66 {
67 switch (type) {
68 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
69 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
70 return 0;
71 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
72 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
73 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
74 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
75 /* 64bit pointer */
76 return 8;
77 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
78 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
79 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
80 return A6XX_TEX_CONST_DWORDS*4;
81 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
82 /* texture const + tu_sampler struct (includes border color) */
83 return A6XX_TEX_CONST_DWORDS*4 + sizeof(struct tu_sampler);
84 case VK_DESCRIPTOR_TYPE_SAMPLER:
85 return sizeof(struct tu_sampler);
86 default:
87 unreachable("unknown descriptor type\n");
88 return 0;
89 }
90 }
91
92 VkResult
93 tu_CreateDescriptorSetLayout(
94 VkDevice _device,
95 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
96 const VkAllocationCallbacks *pAllocator,
97 VkDescriptorSetLayout *pSetLayout)
98 {
99 TU_FROM_HANDLE(tu_device, device, _device);
100 struct tu_descriptor_set_layout *set_layout;
101
102 assert(pCreateInfo->sType ==
103 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
104 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
105 vk_find_struct_const(
106 pCreateInfo->pNext,
107 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
108
109 uint32_t max_binding = 0;
110 uint32_t immutable_sampler_count = 0;
111 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
112 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
113 if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
114 pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
115 pCreateInfo->pBindings[j].pImmutableSamplers) {
116 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
117 }
118 }
119
120 uint32_t samplers_offset = sizeof(struct tu_descriptor_set_layout) +
121 (max_binding + 1) * sizeof(set_layout->binding[0]);
122 uint32_t size = samplers_offset + immutable_sampler_count * sizeof(struct tu_sampler);
123
124 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
125 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
126 if (!set_layout)
127 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
128
129 set_layout->flags = pCreateInfo->flags;
130
131 /* We just allocate all the samplers at the end of the struct */
132 struct tu_sampler *samplers = (void*) &set_layout->binding[max_binding + 1];
133
134 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
135 pCreateInfo->pBindings, pCreateInfo->bindingCount);
136 if (!bindings) {
137 vk_free2(&device->alloc, pAllocator, set_layout);
138 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
139 }
140
141 set_layout->binding_count = max_binding + 1;
142 set_layout->shader_stages = 0;
143 set_layout->dynamic_shader_stages = 0;
144 set_layout->has_immutable_samplers = false;
145 set_layout->size = 0;
146
147 memset(set_layout->binding, 0,
148 size - sizeof(struct tu_descriptor_set_layout));
149
150 uint32_t buffer_count = 0;
151 uint32_t dynamic_offset_count = 0;
152
153 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
154 const VkDescriptorSetLayoutBinding *binding = bindings + j;
155 uint32_t b = binding->binding;
156 uint32_t alignment = 4;
157 unsigned binding_buffer_count = 1;
158
159 switch (binding->descriptorType) {
160 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
161 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
162 assert(!(pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
163 set_layout->binding[b].dynamic_offset_count = 1;
164 break;
165 default:
166 break;
167 }
168
169 set_layout->size = align(set_layout->size, alignment);
170 set_layout->binding[b].type = binding->descriptorType;
171 set_layout->binding[b].array_size = binding->descriptorCount;
172 set_layout->binding[b].offset = set_layout->size;
173 set_layout->binding[b].buffer_offset = buffer_count;
174 set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
175 set_layout->binding[b].size = descriptor_size(binding->descriptorType);
176
177 if (variable_flags && binding->binding < variable_flags->bindingCount &&
178 (variable_flags->pBindingFlags[binding->binding] &
179 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
180 assert(!binding->pImmutableSamplers); /* Terribly ill defined how
181 many samplers are valid */
182 assert(binding->binding == max_binding);
183
184 set_layout->has_variable_descriptors = true;
185 }
186
187 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
188 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
189 binding->pImmutableSamplers) {
190 set_layout->binding[b].immutable_samplers_offset = samplers_offset;
191 set_layout->has_immutable_samplers = true;
192
193 for (uint32_t i = 0; i < binding->descriptorCount; i++)
194 samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);
195
196 samplers += binding->descriptorCount;
197 samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;
198 }
199
200 set_layout->size +=
201 binding->descriptorCount * set_layout->binding[b].size;
202 buffer_count += binding->descriptorCount * binding_buffer_count;
203 dynamic_offset_count += binding->descriptorCount *
204 set_layout->binding[b].dynamic_offset_count;
205 set_layout->shader_stages |= binding->stageFlags;
206 }
207
208 free(bindings);
209
210 set_layout->buffer_count = buffer_count;
211 set_layout->dynamic_offset_count = dynamic_offset_count;
212
213 *pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);
214
215 return VK_SUCCESS;
216 }
217
218 void
219 tu_DestroyDescriptorSetLayout(VkDevice _device,
220 VkDescriptorSetLayout _set_layout,
221 const VkAllocationCallbacks *pAllocator)
222 {
223 TU_FROM_HANDLE(tu_device, device, _device);
224 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);
225
226 if (!set_layout)
227 return;
228
229 vk_free2(&device->alloc, pAllocator, set_layout);
230 }
231
232 void
233 tu_GetDescriptorSetLayoutSupport(
234 VkDevice device,
235 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
236 VkDescriptorSetLayoutSupport *pSupport)
237 {
238 VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(
239 pCreateInfo->pBindings, pCreateInfo->bindingCount);
240 if (!bindings) {
241 pSupport->supported = false;
242 return;
243 }
244
245 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
246 vk_find_struct_const(
247 pCreateInfo->pNext,
248 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
249 VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =
250 vk_find_struct(
251 (void *) pCreateInfo->pNext,
252 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);
253 if (variable_count) {
254 variable_count->maxVariableDescriptorCount = 0;
255 }
256
257 bool supported = true;
258 uint64_t size = 0;
259 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
260 const VkDescriptorSetLayoutBinding *binding = bindings + i;
261
262 uint64_t descriptor_sz = descriptor_size(binding->descriptorType);
263 uint64_t descriptor_alignment = 8;
264
265 if (size && !align_u64(size, descriptor_alignment)) {
266 supported = false;
267 }
268 size = align_u64(size, descriptor_alignment);
269
270 uint64_t max_count = UINT64_MAX;
271 if (descriptor_sz)
272 max_count = (UINT64_MAX - size) / descriptor_sz;
273
274 if (max_count < binding->descriptorCount) {
275 supported = false;
276 }
277 if (variable_flags && binding->binding < variable_flags->bindingCount &&
278 variable_count &&
279 (variable_flags->pBindingFlags[binding->binding] &
280 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
281 variable_count->maxVariableDescriptorCount =
282 MIN2(UINT32_MAX, max_count);
283 }
284 size += binding->descriptorCount * descriptor_sz;
285 }
286
287 free(bindings);
288
289 pSupport->supported = supported;
290 }
291
292 /*
293 * Pipeline layouts. These have nothing to do with the pipeline. They are
294 * just multiple descriptor set layouts pasted together.
295 */
296
297 VkResult
298 tu_CreatePipelineLayout(VkDevice _device,
299 const VkPipelineLayoutCreateInfo *pCreateInfo,
300 const VkAllocationCallbacks *pAllocator,
301 VkPipelineLayout *pPipelineLayout)
302 {
303 TU_FROM_HANDLE(tu_device, device, _device);
304 struct tu_pipeline_layout *layout;
305 struct mesa_sha1 ctx;
306
307 assert(pCreateInfo->sType ==
308 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
309
310 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
311 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
312 if (layout == NULL)
313 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
314
315 layout->num_sets = pCreateInfo->setLayoutCount;
316
317 unsigned dynamic_offset_count = 0;
318
319 _mesa_sha1_init(&ctx);
320 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
321 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
322 pCreateInfo->pSetLayouts[set]);
323 layout->set[set].layout = set_layout;
324
325 layout->set[set].dynamic_offset_start = dynamic_offset_count;
326 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
327 dynamic_offset_count += set_layout->binding[b].array_size *
328 set_layout->binding[b].dynamic_offset_count;
329 if (set_layout->binding[b].immutable_samplers_offset)
330 _mesa_sha1_update(
331 &ctx,
332 tu_immutable_samplers(set_layout, set_layout->binding + b),
333 set_layout->binding[b].array_size * 4 * sizeof(uint32_t));
334 }
335 _mesa_sha1_update(
336 &ctx, set_layout->binding,
337 sizeof(set_layout->binding[0]) * set_layout->binding_count);
338 }
339
340 layout->dynamic_offset_count = dynamic_offset_count;
341 layout->push_constant_size = 0;
342
343 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
344 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
345 layout->push_constant_size =
346 MAX2(layout->push_constant_size, range->offset + range->size);
347 }
348
349 layout->push_constant_size = align(layout->push_constant_size, 16);
350 _mesa_sha1_update(&ctx, &layout->push_constant_size,
351 sizeof(layout->push_constant_size));
352 _mesa_sha1_final(&ctx, layout->sha1);
353 *pPipelineLayout = tu_pipeline_layout_to_handle(layout);
354
355 return VK_SUCCESS;
356 }
357
358 void
359 tu_DestroyPipelineLayout(VkDevice _device,
360 VkPipelineLayout _pipelineLayout,
361 const VkAllocationCallbacks *pAllocator)
362 {
363 TU_FROM_HANDLE(tu_device, device, _device);
364 TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);
365
366 if (!pipeline_layout)
367 return;
368 vk_free2(&device->alloc, pAllocator, pipeline_layout);
369 }
370
371 #define EMPTY 1
372
373 static VkResult
374 tu_descriptor_set_create(struct tu_device *device,
375 struct tu_descriptor_pool *pool,
376 const struct tu_descriptor_set_layout *layout,
377 const uint32_t *variable_count,
378 struct tu_descriptor_set **out_set)
379 {
380 struct tu_descriptor_set *set;
381 uint32_t buffer_count = layout->buffer_count;
382 if (variable_count) {
383 unsigned stride = 1;
384 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_SAMPLER ||
385 layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
386 stride = 0;
387 buffer_count = layout->binding[layout->binding_count - 1].buffer_offset +
388 *variable_count * stride;
389 }
390 unsigned range_offset = sizeof(struct tu_descriptor_set) +
391 sizeof(struct tu_bo *) * buffer_count;
392 unsigned mem_size = range_offset +
393 sizeof(struct tu_descriptor_range) * layout->dynamic_offset_count;
394
395 if (pool->host_memory_base) {
396 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
397 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
398
399 set = (struct tu_descriptor_set*)pool->host_memory_ptr;
400 pool->host_memory_ptr += mem_size;
401 } else {
402 set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
403 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
404
405 if (!set)
406 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
407 }
408
409 memset(set, 0, mem_size);
410
411 if (layout->dynamic_offset_count) {
412 set->dynamic_descriptors = (struct tu_descriptor_range*)((uint8_t*)set + range_offset);
413 }
414
415 set->layout = layout;
416 uint32_t layout_size = layout->size;
417 if (variable_count) {
418 assert(layout->has_variable_descriptors);
419 uint32_t stride = layout->binding[layout->binding_count - 1].size;
420 if (layout->binding[layout->binding_count - 1].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
421 stride = 1;
422
423 layout_size = layout->binding[layout->binding_count - 1].offset +
424 *variable_count * stride;
425 }
426
427 if (layout_size) {
428 set->size = layout_size;
429
430 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
431 vk_free2(&device->alloc, NULL, set);
432 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
433 }
434
435 /* try to allocate linearly first, so that we don't spend
436 * time looking for gaps if the app only allocates &
437 * resets via the pool. */
438 if (pool->current_offset + layout_size <= pool->size) {
439 set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);
440 set->va = pool->bo.iova + pool->current_offset;
441 if (!pool->host_memory_base) {
442 pool->entries[pool->entry_count].offset = pool->current_offset;
443 pool->entries[pool->entry_count].size = layout_size;
444 pool->entries[pool->entry_count].set = set;
445 pool->entry_count++;
446 }
447 pool->current_offset += layout_size;
448 } else if (!pool->host_memory_base) {
449 uint64_t offset = 0;
450 int index;
451
452 for (index = 0; index < pool->entry_count; ++index) {
453 if (pool->entries[index].offset - offset >= layout_size)
454 break;
455 offset = pool->entries[index].offset + pool->entries[index].size;
456 }
457
458 if (pool->size - offset < layout_size) {
459 vk_free2(&device->alloc, NULL, set);
460 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
461 }
462
463 set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);
464 set->va = pool->bo.iova + offset;
465 memmove(&pool->entries[index + 1], &pool->entries[index],
466 sizeof(pool->entries[0]) * (pool->entry_count - index));
467 pool->entries[index].offset = offset;
468 pool->entries[index].size = layout_size;
469 pool->entries[index].set = set;
470 pool->entry_count++;
471 } else
472 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
473 }
474
475 *out_set = set;
476 return VK_SUCCESS;
477 }
478
479 static void
480 tu_descriptor_set_destroy(struct tu_device *device,
481 struct tu_descriptor_pool *pool,
482 struct tu_descriptor_set *set,
483 bool free_bo)
484 {
485 assert(!pool->host_memory_base);
486
487 if (free_bo && set->size && !pool->host_memory_base) {
488 uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;
489 for (int i = 0; i < pool->entry_count; ++i) {
490 if (pool->entries[i].offset == offset) {
491 memmove(&pool->entries[i], &pool->entries[i+1],
492 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
493 --pool->entry_count;
494 break;
495 }
496 }
497 }
498 vk_free2(&device->alloc, NULL, set);
499 }
500
501 VkResult
502 tu_CreateDescriptorPool(VkDevice _device,
503 const VkDescriptorPoolCreateInfo *pCreateInfo,
504 const VkAllocationCallbacks *pAllocator,
505 VkDescriptorPool *pDescriptorPool)
506 {
507 TU_FROM_HANDLE(tu_device, device, _device);
508 struct tu_descriptor_pool *pool;
509 uint64_t size = sizeof(struct tu_descriptor_pool);
510 uint64_t bo_size = 0, bo_count = 0, range_count = 0;
511
512 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
513 if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
514 bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
515
516 switch(pCreateInfo->pPoolSizes[i].type) {
517 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
518 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
519 range_count += pCreateInfo->pPoolSizes[i].descriptorCount;
520 default:
521 break;
522 }
523
524 bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *
525 pCreateInfo->pPoolSizes[i].descriptorCount;
526 }
527
528 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
529 uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);
530 host_size += sizeof(struct tu_bo*) * bo_count;
531 host_size += sizeof(struct tu_descriptor_range) * range_count;
532 size += host_size;
533 } else {
534 size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;
535 }
536
537 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
538 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
539 if (!pool)
540 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
541
542 memset(pool, 0, sizeof(*pool));
543
544 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
545 pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);
546 pool->host_memory_ptr = pool->host_memory_base;
547 pool->host_memory_end = (uint8_t*)pool + size;
548 }
549
550 if (bo_size) {
551 VkResult ret;
552
553 ret = tu_bo_init_new(device, &pool->bo, bo_size);
554 assert(ret == VK_SUCCESS);
555
556 ret = tu_bo_map(device, &pool->bo);
557 assert(ret == VK_SUCCESS);
558 }
559 pool->size = bo_size;
560 pool->max_entry_count = pCreateInfo->maxSets;
561
562 *pDescriptorPool = tu_descriptor_pool_to_handle(pool);
563 return VK_SUCCESS;
564 }
565
566 void
567 tu_DestroyDescriptorPool(VkDevice _device,
568 VkDescriptorPool _pool,
569 const VkAllocationCallbacks *pAllocator)
570 {
571 TU_FROM_HANDLE(tu_device, device, _device);
572 TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);
573
574 if (!pool)
575 return;
576
577 if (!pool->host_memory_base) {
578 for(int i = 0; i < pool->entry_count; ++i) {
579 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
580 }
581 }
582
583 if (pool->size)
584 tu_bo_finish(device, &pool->bo);
585 vk_free2(&device->alloc, pAllocator, pool);
586 }
587
588 VkResult
589 tu_ResetDescriptorPool(VkDevice _device,
590 VkDescriptorPool descriptorPool,
591 VkDescriptorPoolResetFlags flags)
592 {
593 TU_FROM_HANDLE(tu_device, device, _device);
594 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
595
596 if (!pool->host_memory_base) {
597 for(int i = 0; i < pool->entry_count; ++i) {
598 tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
599 }
600 pool->entry_count = 0;
601 }
602
603 pool->current_offset = 0;
604 pool->host_memory_ptr = pool->host_memory_base;
605
606 return VK_SUCCESS;
607 }
608
609 VkResult
610 tu_AllocateDescriptorSets(VkDevice _device,
611 const VkDescriptorSetAllocateInfo *pAllocateInfo,
612 VkDescriptorSet *pDescriptorSets)
613 {
614 TU_FROM_HANDLE(tu_device, device, _device);
615 TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);
616
617 VkResult result = VK_SUCCESS;
618 uint32_t i;
619 struct tu_descriptor_set *set = NULL;
620
621 const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =
622 vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);
623 const uint32_t zero = 0;
624
625 /* allocate a set of buffers for each shader to contain descriptors */
626 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
627 TU_FROM_HANDLE(tu_descriptor_set_layout, layout,
628 pAllocateInfo->pSetLayouts[i]);
629
630 const uint32_t *variable_count = NULL;
631 if (variable_counts) {
632 if (i < variable_counts->descriptorSetCount)
633 variable_count = variable_counts->pDescriptorCounts + i;
634 else
635 variable_count = &zero;
636 }
637
638 assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
639
640 result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);
641 if (result != VK_SUCCESS)
642 break;
643
644 pDescriptorSets[i] = tu_descriptor_set_to_handle(set);
645 }
646
647 if (result != VK_SUCCESS) {
648 tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
649 i, pDescriptorSets);
650 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
651 pDescriptorSets[i] = VK_NULL_HANDLE;
652 }
653 }
654 return result;
655 }
656
657 VkResult
658 tu_FreeDescriptorSets(VkDevice _device,
659 VkDescriptorPool descriptorPool,
660 uint32_t count,
661 const VkDescriptorSet *pDescriptorSets)
662 {
663 TU_FROM_HANDLE(tu_device, device, _device);
664 TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
665
666 for (uint32_t i = 0; i < count; i++) {
667 TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);
668
669 if (set && !pool->host_memory_base)
670 tu_descriptor_set_destroy(device, pool, set, true);
671 }
672 return VK_SUCCESS;
673 }
674
675 static void write_texel_buffer_descriptor(struct tu_device *device,
676 struct tu_cmd_buffer *cmd_buffer,
677 unsigned *dst,
678 struct tu_bo **buffer_list,
679 const VkBufferView _buffer_view)
680 {
681 tu_finishme("texel buffer descriptor");
682 }
683
684 static void write_buffer_descriptor(struct tu_device *device,
685 struct tu_cmd_buffer *cmd_buffer,
686 unsigned *dst,
687 struct tu_bo **buffer_list,
688 const VkDescriptorBufferInfo *buffer_info)
689 {
690 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
691 uint64_t va = buffer->bo->iova;
692
693 va += buffer_info->offset + buffer->bo_offset;
694 dst[0] = va;
695 dst[1] = va >> 32;
696
697 if (cmd_buffer)
698 tu_bo_list_add(&cmd_buffer->bo_list, buffer->bo, MSM_SUBMIT_BO_READ);
699 else
700 *buffer_list = buffer->bo;
701 }
702
703 static void write_dynamic_buffer_descriptor(struct tu_device *device,
704 struct tu_descriptor_range *range,
705 struct tu_bo **buffer_list,
706 const VkDescriptorBufferInfo *buffer_info)
707 {
708 TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
709 uint64_t va = buffer->bo->iova;
710 unsigned size = buffer_info->range;
711
712 if (buffer_info->range == VK_WHOLE_SIZE)
713 size = buffer->size - buffer_info->offset;
714
715 va += buffer_info->offset + buffer->bo_offset;
716 range->va = va;
717 range->size = size;
718
719 *buffer_list = buffer->bo;
720 }
721
722 static void
723 write_image_descriptor(struct tu_device *device,
724 struct tu_cmd_buffer *cmd_buffer,
725 unsigned *dst,
726 struct tu_bo **buffer_list,
727 VkDescriptorType descriptor_type,
728 const VkDescriptorImageInfo *image_info)
729 {
730 TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);
731 uint32_t *descriptor;
732
733 if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
734 descriptor = iview->storage_descriptor;
735 } else {
736 descriptor = iview->descriptor;
737 }
738
739 memcpy(dst, descriptor, sizeof(iview->descriptor));
740
741 if (cmd_buffer)
742 tu_bo_list_add(&cmd_buffer->bo_list, iview->image->bo, MSM_SUBMIT_BO_READ);
743 else
744 *buffer_list = iview->image->bo;
745 }
746
747 static void
748 write_combined_image_sampler_descriptor(struct tu_device *device,
749 struct tu_cmd_buffer *cmd_buffer,
750 unsigned sampler_offset,
751 unsigned *dst,
752 struct tu_bo **buffer_list,
753 VkDescriptorType descriptor_type,
754 const VkDescriptorImageInfo *image_info,
755 bool has_sampler)
756 {
757 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
758
759 write_image_descriptor(device, cmd_buffer, dst, buffer_list,
760 descriptor_type, image_info);
761 /* copy over sampler state */
762 if (has_sampler) {
763 memcpy(dst + sampler_offset / sizeof(*dst), sampler, sizeof(*sampler));
764 }
765 }
766
767 static void
768 write_sampler_descriptor(struct tu_device *device,
769 unsigned *dst,
770 const VkDescriptorImageInfo *image_info)
771 {
772 TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
773
774 memcpy(dst, sampler, sizeof(*sampler));
775 }
776
777 void
778 tu_update_descriptor_sets(struct tu_device *device,
779 struct tu_cmd_buffer *cmd_buffer,
780 VkDescriptorSet dstSetOverride,
781 uint32_t descriptorWriteCount,
782 const VkWriteDescriptorSet *pDescriptorWrites,
783 uint32_t descriptorCopyCount,
784 const VkCopyDescriptorSet *pDescriptorCopies)
785 {
786 uint32_t i, j;
787 for (i = 0; i < descriptorWriteCount; i++) {
788 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
789 TU_FROM_HANDLE(tu_descriptor_set, set,
790 dstSetOverride ? dstSetOverride : writeset->dstSet);
791 const struct tu_descriptor_set_binding_layout *binding_layout =
792 set->layout->binding + writeset->dstBinding;
793 uint32_t *ptr = set->mapped_ptr;
794 struct tu_bo **buffer_list = set->descriptors;
795
796 const struct tu_sampler *samplers = tu_immutable_samplers(set->layout, binding_layout);
797
798 ptr += binding_layout->offset / 4;
799
800 ptr += binding_layout->size * writeset->dstArrayElement / 4;
801 buffer_list += binding_layout->buffer_offset;
802 buffer_list += writeset->dstArrayElement;
803 for (j = 0; j < writeset->descriptorCount; ++j) {
804 switch(writeset->descriptorType) {
805 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
806 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
807 unsigned idx = writeset->dstArrayElement + j;
808 idx += binding_layout->dynamic_offset_offset;
809 assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
810 write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
811 buffer_list, writeset->pBufferInfo + j);
812 break;
813 }
814
815 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
816 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
817 write_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
818 writeset->pBufferInfo + j);
819 break;
820 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
821 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
822 write_texel_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
823 writeset->pTexelBufferView[j]);
824 break;
825 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
826 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
827 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
828 write_image_descriptor(device, cmd_buffer, ptr, buffer_list,
829 writeset->descriptorType,
830 writeset->pImageInfo + j);
831 break;
832 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
833 write_combined_image_sampler_descriptor(device, cmd_buffer,
834 A6XX_TEX_CONST_DWORDS*4,
835 ptr, buffer_list,
836 writeset->descriptorType,
837 writeset->pImageInfo + j,
838 !binding_layout->immutable_samplers_offset);
839 if (binding_layout->immutable_samplers_offset) {
840 const unsigned idx = writeset->dstArrayElement + j;
841 memcpy((char*)ptr + A6XX_TEX_CONST_DWORDS*4, &samplers[idx],
842 sizeof(struct tu_sampler));
843 }
844 break;
845 case VK_DESCRIPTOR_TYPE_SAMPLER:
846 write_sampler_descriptor(device, ptr, writeset->pImageInfo + j);
847 break;
848 default:
849 unreachable("unimplemented descriptor type");
850 break;
851 }
852 ptr += binding_layout->size / 4;
853 ++buffer_list;
854 }
855 }
856
857 for (i = 0; i < descriptorCopyCount; i++) {
858 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
859 TU_FROM_HANDLE(tu_descriptor_set, src_set,
860 copyset->srcSet);
861 TU_FROM_HANDLE(tu_descriptor_set, dst_set,
862 copyset->dstSet);
863 const struct tu_descriptor_set_binding_layout *src_binding_layout =
864 src_set->layout->binding + copyset->srcBinding;
865 const struct tu_descriptor_set_binding_layout *dst_binding_layout =
866 dst_set->layout->binding + copyset->dstBinding;
867 uint32_t *src_ptr = src_set->mapped_ptr;
868 uint32_t *dst_ptr = dst_set->mapped_ptr;
869 struct tu_bo **src_buffer_list = src_set->descriptors;
870 struct tu_bo **dst_buffer_list = dst_set->descriptors;
871
872 src_ptr += src_binding_layout->offset / 4;
873 dst_ptr += dst_binding_layout->offset / 4;
874
875 src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
876 dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
877
878 src_buffer_list += src_binding_layout->buffer_offset;
879 src_buffer_list += copyset->srcArrayElement;
880
881 dst_buffer_list += dst_binding_layout->buffer_offset;
882 dst_buffer_list += copyset->dstArrayElement;
883
884 for (j = 0; j < copyset->descriptorCount; ++j) {
885 switch (src_binding_layout->type) {
886 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
887 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
888 unsigned src_idx = copyset->srcArrayElement + j;
889 unsigned dst_idx = copyset->dstArrayElement + j;
890 struct tu_descriptor_range *src_range, *dst_range;
891 src_idx += src_binding_layout->dynamic_offset_offset;
892 dst_idx += dst_binding_layout->dynamic_offset_offset;
893
894 src_range = src_set->dynamic_descriptors + src_idx;
895 dst_range = dst_set->dynamic_descriptors + dst_idx;
896 *dst_range = *src_range;
897 break;
898 }
899 default:
900 memcpy(dst_ptr, src_ptr, src_binding_layout->size);
901 }
902 src_ptr += src_binding_layout->size / 4;
903 dst_ptr += dst_binding_layout->size / 4;
904
905 if (src_binding_layout->type != VK_DESCRIPTOR_TYPE_SAMPLER) {
906 /* Sampler descriptors don't have a buffer list. */
907 dst_buffer_list[j] = src_buffer_list[j];
908 }
909 }
910 }
911 }
912
913 void
914 tu_UpdateDescriptorSets(VkDevice _device,
915 uint32_t descriptorWriteCount,
916 const VkWriteDescriptorSet *pDescriptorWrites,
917 uint32_t descriptorCopyCount,
918 const VkCopyDescriptorSet *pDescriptorCopies)
919 {
920 TU_FROM_HANDLE(tu_device, device, _device);
921
922 tu_update_descriptor_sets(device, NULL, VK_NULL_HANDLE,
923 descriptorWriteCount, pDescriptorWrites,
924 descriptorCopyCount, pDescriptorCopies);
925 }
926
927 VkResult
928 tu_CreateDescriptorUpdateTemplate(
929 VkDevice _device,
930 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
931 const VkAllocationCallbacks *pAllocator,
932 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
933 {
934 TU_FROM_HANDLE(tu_device, device, _device);
935 TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
936 pCreateInfo->descriptorSetLayout);
937 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
938 const size_t size =
939 sizeof(struct tu_descriptor_update_template) +
940 sizeof(struct tu_descriptor_update_template_entry) * entry_count;
941 struct tu_descriptor_update_template *templ;
942
943 templ = vk_alloc2(&device->alloc, pAllocator, size, 8,
944 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
945 if (!templ)
946 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
947
948 *pDescriptorUpdateTemplate =
949 tu_descriptor_update_template_to_handle(templ);
950
951 tu_use_args(set_layout);
952 tu_stub();
953 return VK_SUCCESS;
954 }
955
956 void
957 tu_DestroyDescriptorUpdateTemplate(
958 VkDevice _device,
959 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
960 const VkAllocationCallbacks *pAllocator)
961 {
962 TU_FROM_HANDLE(tu_device, device, _device);
963 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
964 descriptorUpdateTemplate);
965
966 if (!templ)
967 return;
968
969 vk_free2(&device->alloc, pAllocator, templ);
970 }
971
972 void
973 tu_update_descriptor_set_with_template(
974 struct tu_device *device,
975 struct tu_cmd_buffer *cmd_buffer,
976 struct tu_descriptor_set *set,
977 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
978 const void *pData)
979 {
980 TU_FROM_HANDLE(tu_descriptor_update_template, templ,
981 descriptorUpdateTemplate);
982 tu_use_args(templ);
983 }
984
985 void
986 tu_UpdateDescriptorSetWithTemplate(
987 VkDevice _device,
988 VkDescriptorSet descriptorSet,
989 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
990 const void *pData)
991 {
992 TU_FROM_HANDLE(tu_device, device, _device);
993 TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);
994
995 tu_update_descriptor_set_with_template(device, NULL, set,
996 descriptorUpdateTemplate, pData);
997 }
998
999 VkResult
1000 tu_CreateSamplerYcbcrConversion(
1001 VkDevice device,
1002 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1003 const VkAllocationCallbacks *pAllocator,
1004 VkSamplerYcbcrConversion *pYcbcrConversion)
1005 {
1006 *pYcbcrConversion = VK_NULL_HANDLE;
1007 return VK_SUCCESS;
1008 }
1009
1010 void
1011 tu_DestroySamplerYcbcrConversion(VkDevice device,
1012 VkSamplerYcbcrConversion ycbcrConversion,
1013 const VkAllocationCallbacks *pAllocator)
1014 {
1015 /* Do nothing. */
1016 }