anv: Rework arguments to anv_descriptor_set_write_*
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31
32 #include "anv_private.h"
33
34 /*
35 * Descriptor set layouts.
36 */
37
38 void anv_GetDescriptorSetLayoutSupport(
39 VkDevice device,
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 VkDescriptorSetLayoutSupport* pSupport)
42 {
43 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
44
45 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
46 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
47
48 switch (binding->descriptorType) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER:
50 /* There is no real limit on samplers */
51 break;
52
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
54 if (binding->pImmutableSamplers) {
55 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
56 ANV_FROM_HANDLE(anv_sampler, sampler,
57 binding->pImmutableSamplers[i]);
58 anv_foreach_stage(s, binding->stageFlags)
59 surface_count[s] += sampler->n_planes;
60 }
61 } else {
62 anv_foreach_stage(s, binding->stageFlags)
63 surface_count[s] += binding->descriptorCount;
64 }
65 break;
66
67 default:
68 anv_foreach_stage(s, binding->stageFlags)
69 surface_count[s] += binding->descriptorCount;
70 break;
71 }
72 }
73
74 bool supported = true;
75 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
76 /* Our maximum binding table size is 250 and we need to reserve 8 for
77 * render targets. 240 is a nice round number.
78 */
79 if (surface_count[s] >= 240)
80 supported = false;
81 }
82
83 pSupport->supported = supported;
84 }
85
86 VkResult anv_CreateDescriptorSetLayout(
87 VkDevice _device,
88 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
89 const VkAllocationCallbacks* pAllocator,
90 VkDescriptorSetLayout* pSetLayout)
91 {
92 ANV_FROM_HANDLE(anv_device, device, _device);
93
94 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
95
96 uint32_t max_binding = 0;
97 uint32_t immutable_sampler_count = 0;
98 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
99 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
100
101 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
102 *
103 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
104 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
105 * pImmutableSamplers can be used to initialize a set of immutable
106 * samplers. [...] If descriptorType is not one of these descriptor
107 * types, then pImmutableSamplers is ignored.
108 *
109 * We need to be careful here and only parse pImmutableSamplers if we
110 * have one of the right descriptor types.
111 */
112 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
113 if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
114 desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
115 pCreateInfo->pBindings[j].pImmutableSamplers)
116 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
117 }
118
119 struct anv_descriptor_set_layout *set_layout;
120 struct anv_descriptor_set_binding_layout *bindings;
121 struct anv_sampler **samplers;
122
123 /* We need to allocate decriptor set layouts off the device allocator
124 * with DEVICE scope because they are reference counted and may not be
125 * destroyed when vkDestroyDescriptorSetLayout is called.
126 */
127 ANV_MULTIALLOC(ma);
128 anv_multialloc_add(&ma, &set_layout, 1);
129 anv_multialloc_add(&ma, &bindings, max_binding + 1);
130 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
131
132 if (!anv_multialloc_alloc(&ma, &device->alloc,
133 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
134 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
135
136 memset(set_layout, 0, sizeof(*set_layout));
137 set_layout->ref_cnt = 1;
138 set_layout->binding_count = max_binding + 1;
139
140 for (uint32_t b = 0; b <= max_binding; b++) {
141 /* Initialize all binding_layout entries to -1 */
142 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
143
144 set_layout->binding[b].array_size = 0;
145 set_layout->binding[b].immutable_samplers = NULL;
146 }
147
148 /* Initialize all samplers to 0 */
149 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
150
151 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
152 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
153 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
154 uint32_t buffer_count = 0;
155 uint32_t dynamic_offset_count = 0;
156
157 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
158 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
159 uint32_t b = binding->binding;
160 /* We temporarily store the pointer to the binding in the
161 * immutable_samplers pointer. This provides us with a quick-and-dirty
162 * way to sort the bindings by binding number.
163 */
164 set_layout->binding[b].immutable_samplers = (void *)binding;
165 }
166
167 for (uint32_t b = 0; b <= max_binding; b++) {
168 const VkDescriptorSetLayoutBinding *binding =
169 (void *)set_layout->binding[b].immutable_samplers;
170
171 if (binding == NULL)
172 continue;
173
174 /* We temporarily stashed the pointer to the binding in the
175 * immutable_samplers pointer. Now that we've pulled it back out
176 * again, we reset immutable_samplers to NULL.
177 */
178 set_layout->binding[b].immutable_samplers = NULL;
179
180 if (binding->descriptorCount == 0)
181 continue;
182
183 #ifndef NDEBUG
184 set_layout->binding[b].type = binding->descriptorType;
185 #endif
186 set_layout->binding[b].array_size = binding->descriptorCount;
187 set_layout->binding[b].descriptor_index = set_layout->size;
188 set_layout->size += binding->descriptorCount;
189
190 switch (binding->descriptorType) {
191 case VK_DESCRIPTOR_TYPE_SAMPLER:
192 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
193 anv_foreach_stage(s, binding->stageFlags) {
194 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
195 sampler_count[s] += binding->descriptorCount;
196 }
197
198 if (binding->pImmutableSamplers) {
199 set_layout->binding[b].immutable_samplers = samplers;
200 samplers += binding->descriptorCount;
201
202 for (uint32_t i = 0; i < binding->descriptorCount; i++)
203 set_layout->binding[b].immutable_samplers[i] =
204 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
205 }
206 break;
207 default:
208 break;
209 }
210
211 switch (binding->descriptorType) {
212 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
213 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
214 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
215 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
216 set_layout->binding[b].buffer_index = buffer_count;
217 buffer_count += binding->descriptorCount;
218 /* fall through */
219
220 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
221 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
222 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
223 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
224 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
225 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
226 anv_foreach_stage(s, binding->stageFlags) {
227 set_layout->binding[b].stage[s].surface_index = surface_count[s];
228 surface_count[s] += binding->descriptorCount;
229 }
230 break;
231 default:
232 break;
233 }
234
235 switch (binding->descriptorType) {
236 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
237 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
238 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
239 dynamic_offset_count += binding->descriptorCount;
240 break;
241 default:
242 break;
243 }
244
245 switch (binding->descriptorType) {
246 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
247 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
248 anv_foreach_stage(s, binding->stageFlags) {
249 set_layout->binding[b].stage[s].image_index = image_count[s];
250 image_count[s] += binding->descriptorCount;
251 }
252 break;
253 default:
254 break;
255 }
256
257 set_layout->shader_stages |= binding->stageFlags;
258 }
259
260 set_layout->buffer_count = buffer_count;
261 set_layout->dynamic_offset_count = dynamic_offset_count;
262
263 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
264
265 return VK_SUCCESS;
266 }
267
268 void anv_DestroyDescriptorSetLayout(
269 VkDevice _device,
270 VkDescriptorSetLayout _set_layout,
271 const VkAllocationCallbacks* pAllocator)
272 {
273 ANV_FROM_HANDLE(anv_device, device, _device);
274 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
275
276 if (!set_layout)
277 return;
278
279 anv_descriptor_set_layout_unref(device, set_layout);
280 }
281
282 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
283
284 static void
285 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
286 const struct anv_sampler *sampler)
287 {
288 if (!sampler->conversion)
289 return;
290
291 /* The only thing that affects the shader is ycbcr conversion */
292 _mesa_sha1_update(ctx, sampler->conversion,
293 sizeof(*sampler->conversion));
294 }
295
296 static void
297 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
298 const struct anv_descriptor_set_binding_layout *layout)
299 {
300 SHA1_UPDATE_VALUE(ctx, layout->array_size);
301 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
302 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
303 SHA1_UPDATE_VALUE(ctx, layout->buffer_index);
304 _mesa_sha1_update(ctx, layout->stage, sizeof(layout->stage));
305
306 if (layout->immutable_samplers) {
307 for (uint16_t i = 0; i < layout->array_size; i++)
308 sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
309 }
310 }
311
312 static void
313 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
314 const struct anv_descriptor_set_layout *layout)
315 {
316 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
317 SHA1_UPDATE_VALUE(ctx, layout->size);
318 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
319 SHA1_UPDATE_VALUE(ctx, layout->buffer_count);
320 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
321
322 for (uint16_t i = 0; i < layout->binding_count; i++)
323 sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
324 }
325
326 /*
327 * Pipeline layouts. These have nothing to do with the pipeline. They are
328 * just multiple descriptor set layouts pasted together
329 */
330
331 VkResult anv_CreatePipelineLayout(
332 VkDevice _device,
333 const VkPipelineLayoutCreateInfo* pCreateInfo,
334 const VkAllocationCallbacks* pAllocator,
335 VkPipelineLayout* pPipelineLayout)
336 {
337 ANV_FROM_HANDLE(anv_device, device, _device);
338 struct anv_pipeline_layout *layout;
339
340 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
341
342 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
343 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
344 if (layout == NULL)
345 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
346
347 layout->num_sets = pCreateInfo->setLayoutCount;
348
349 unsigned dynamic_offset_count = 0;
350
351 memset(layout->stage, 0, sizeof(layout->stage));
352 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
353 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
354 pCreateInfo->pSetLayouts[set]);
355 layout->set[set].layout = set_layout;
356 anv_descriptor_set_layout_ref(set_layout);
357
358 layout->set[set].dynamic_offset_start = dynamic_offset_count;
359 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
360 if (set_layout->binding[b].dynamic_offset_index < 0)
361 continue;
362
363 dynamic_offset_count += set_layout->binding[b].array_size;
364 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
365 if (set_layout->binding[b].stage[s].surface_index >= 0)
366 layout->stage[s].has_dynamic_offsets = true;
367 }
368 }
369 }
370
371 struct mesa_sha1 ctx;
372 _mesa_sha1_init(&ctx);
373 for (unsigned s = 0; s < layout->num_sets; s++) {
374 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
375 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
376 sizeof(layout->set[s].dynamic_offset_start));
377 }
378 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
379 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
380 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
381 sizeof(layout->stage[s].has_dynamic_offsets));
382 }
383 _mesa_sha1_final(&ctx, layout->sha1);
384
385 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
386
387 return VK_SUCCESS;
388 }
389
390 void anv_DestroyPipelineLayout(
391 VkDevice _device,
392 VkPipelineLayout _pipelineLayout,
393 const VkAllocationCallbacks* pAllocator)
394 {
395 ANV_FROM_HANDLE(anv_device, device, _device);
396 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
397
398 if (!pipeline_layout)
399 return;
400
401 for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
402 anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
403
404 vk_free2(&device->alloc, pAllocator, pipeline_layout);
405 }
406
407 /*
408 * Descriptor pools.
409 *
410 * These are implemented using a big pool of memory and a free-list for the
411 * host memory allocations and a state_stream and a free list for the buffer
412 * view surface state. The spec allows us to fail to allocate due to
413 * fragmentation in all cases but two: 1) after pool reset, allocating up
414 * until the pool size with no freeing must succeed and 2) allocating and
415 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
416 * and the free lists lets us recycle blocks for case 2).
417 */
418
419 #define EMPTY 1
420
421 VkResult anv_CreateDescriptorPool(
422 VkDevice _device,
423 const VkDescriptorPoolCreateInfo* pCreateInfo,
424 const VkAllocationCallbacks* pAllocator,
425 VkDescriptorPool* pDescriptorPool)
426 {
427 ANV_FROM_HANDLE(anv_device, device, _device);
428 struct anv_descriptor_pool *pool;
429
430 uint32_t descriptor_count = 0;
431 uint32_t buffer_count = 0;
432 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
433 switch (pCreateInfo->pPoolSizes[i].type) {
434 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
435 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
436 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
437 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
438 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
439 default:
440 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
441 break;
442 }
443 }
444
445 const size_t pool_size =
446 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
447 descriptor_count * sizeof(struct anv_descriptor) +
448 buffer_count * sizeof(struct anv_buffer_view);
449 const size_t total_size = sizeof(*pool) + pool_size;
450
451 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
452 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
453 if (!pool)
454 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
455
456 pool->size = pool_size;
457 pool->next = 0;
458 pool->free_list = EMPTY;
459
460 anv_state_stream_init(&pool->surface_state_stream,
461 &device->surface_state_pool, 4096);
462 pool->surface_state_free_list = NULL;
463
464 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
465
466 return VK_SUCCESS;
467 }
468
469 void anv_DestroyDescriptorPool(
470 VkDevice _device,
471 VkDescriptorPool _pool,
472 const VkAllocationCallbacks* pAllocator)
473 {
474 ANV_FROM_HANDLE(anv_device, device, _device);
475 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
476
477 if (!pool)
478 return;
479
480 anv_state_stream_finish(&pool->surface_state_stream);
481 vk_free2(&device->alloc, pAllocator, pool);
482 }
483
484 VkResult anv_ResetDescriptorPool(
485 VkDevice _device,
486 VkDescriptorPool descriptorPool,
487 VkDescriptorPoolResetFlags flags)
488 {
489 ANV_FROM_HANDLE(anv_device, device, _device);
490 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
491
492 pool->next = 0;
493 pool->free_list = EMPTY;
494 anv_state_stream_finish(&pool->surface_state_stream);
495 anv_state_stream_init(&pool->surface_state_stream,
496 &device->surface_state_pool, 4096);
497 pool->surface_state_free_list = NULL;
498
499 return VK_SUCCESS;
500 }
501
502 struct pool_free_list_entry {
503 uint32_t next;
504 uint32_t size;
505 };
506
507 static VkResult
508 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
509 uint32_t size,
510 struct anv_descriptor_set **set)
511 {
512 if (size <= pool->size - pool->next) {
513 *set = (struct anv_descriptor_set *) (pool->data + pool->next);
514 pool->next += size;
515 return VK_SUCCESS;
516 } else {
517 struct pool_free_list_entry *entry;
518 uint32_t *link = &pool->free_list;
519 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
520 entry = (struct pool_free_list_entry *) (pool->data + f);
521 if (size <= entry->size) {
522 *link = entry->next;
523 *set = (struct anv_descriptor_set *) entry;
524 return VK_SUCCESS;
525 }
526 link = &entry->next;
527 }
528
529 if (pool->free_list != EMPTY) {
530 return vk_error(VK_ERROR_FRAGMENTED_POOL);
531 } else {
532 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
533 }
534 }
535 }
536
537 static void
538 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
539 struct anv_descriptor_set *set)
540 {
541 /* Put the descriptor set allocation back on the free list. */
542 const uint32_t index = (char *) set - pool->data;
543 if (index + set->size == pool->next) {
544 pool->next = index;
545 } else {
546 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
547 entry->next = pool->free_list;
548 entry->size = set->size;
549 pool->free_list = (char *) entry - pool->data;
550 }
551 }
552
553 struct surface_state_free_list_entry {
554 void *next;
555 struct anv_state state;
556 };
557
558 static struct anv_state
559 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
560 {
561 struct surface_state_free_list_entry *entry =
562 pool->surface_state_free_list;
563
564 if (entry) {
565 struct anv_state state = entry->state;
566 pool->surface_state_free_list = entry->next;
567 assert(state.alloc_size == 64);
568 return state;
569 } else {
570 return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
571 }
572 }
573
574 static void
575 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
576 struct anv_state state)
577 {
578 /* Put the buffer view surface state back on the free list. */
579 struct surface_state_free_list_entry *entry = state.map;
580 entry->next = pool->surface_state_free_list;
581 entry->state = state;
582 pool->surface_state_free_list = entry;
583 }
584
585 size_t
586 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
587 {
588 return
589 sizeof(struct anv_descriptor_set) +
590 layout->size * sizeof(struct anv_descriptor) +
591 layout->buffer_count * sizeof(struct anv_buffer_view);
592 }
593
594 VkResult
595 anv_descriptor_set_create(struct anv_device *device,
596 struct anv_descriptor_pool *pool,
597 struct anv_descriptor_set_layout *layout,
598 struct anv_descriptor_set **out_set)
599 {
600 struct anv_descriptor_set *set;
601 const size_t size = anv_descriptor_set_layout_size(layout);
602
603 VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
604 if (result != VK_SUCCESS)
605 return result;
606
607 set->layout = layout;
608 anv_descriptor_set_layout_ref(layout);
609
610 set->size = size;
611 set->buffer_views =
612 (struct anv_buffer_view *) &set->descriptors[layout->size];
613 set->buffer_count = layout->buffer_count;
614
615 /* By defining the descriptors to be zero now, we can later verify that
616 * a descriptor has not been populated with user data.
617 */
618 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
619
620 /* Go through and fill out immutable samplers if we have any */
621 struct anv_descriptor *desc = set->descriptors;
622 for (uint32_t b = 0; b < layout->binding_count; b++) {
623 if (layout->binding[b].immutable_samplers) {
624 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
625 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
626 * UpdateDescriptorSets if needed. However, if the descriptor
627 * set has an immutable sampler, UpdateDescriptorSets may never
628 * touch it, so we need to make sure it's 100% valid now.
629 */
630 desc[i] = (struct anv_descriptor) {
631 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
632 .sampler = layout->binding[b].immutable_samplers[i],
633 };
634 }
635 }
636 desc += layout->binding[b].array_size;
637 }
638
639 /* Allocate surface state for the buffer views. */
640 for (uint32_t b = 0; b < layout->buffer_count; b++) {
641 set->buffer_views[b].surface_state =
642 anv_descriptor_pool_alloc_state(pool);
643 }
644
645 *out_set = set;
646
647 return VK_SUCCESS;
648 }
649
650 void
651 anv_descriptor_set_destroy(struct anv_device *device,
652 struct anv_descriptor_pool *pool,
653 struct anv_descriptor_set *set)
654 {
655 anv_descriptor_set_layout_unref(device, set->layout);
656
657 for (uint32_t b = 0; b < set->buffer_count; b++)
658 anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
659
660 anv_descriptor_pool_free_set(pool, set);
661 }
662
663 VkResult anv_AllocateDescriptorSets(
664 VkDevice _device,
665 const VkDescriptorSetAllocateInfo* pAllocateInfo,
666 VkDescriptorSet* pDescriptorSets)
667 {
668 ANV_FROM_HANDLE(anv_device, device, _device);
669 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
670
671 VkResult result = VK_SUCCESS;
672 struct anv_descriptor_set *set;
673 uint32_t i;
674
675 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
676 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
677 pAllocateInfo->pSetLayouts[i]);
678
679 result = anv_descriptor_set_create(device, pool, layout, &set);
680 if (result != VK_SUCCESS)
681 break;
682
683 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
684 }
685
686 if (result != VK_SUCCESS)
687 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
688 i, pDescriptorSets);
689
690 return result;
691 }
692
693 VkResult anv_FreeDescriptorSets(
694 VkDevice _device,
695 VkDescriptorPool descriptorPool,
696 uint32_t count,
697 const VkDescriptorSet* pDescriptorSets)
698 {
699 ANV_FROM_HANDLE(anv_device, device, _device);
700 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
701
702 for (uint32_t i = 0; i < count; i++) {
703 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
704
705 if (!set)
706 continue;
707
708 anv_descriptor_set_destroy(device, pool, set);
709 }
710
711 return VK_SUCCESS;
712 }
713
714 void
715 anv_descriptor_set_write_image_view(struct anv_device *device,
716 struct anv_descriptor_set *set,
717 const VkDescriptorImageInfo * const info,
718 VkDescriptorType type,
719 uint32_t binding,
720 uint32_t element)
721 {
722 const struct anv_descriptor_set_binding_layout *bind_layout =
723 &set->layout->binding[binding];
724 struct anv_descriptor *desc =
725 &set->descriptors[bind_layout->descriptor_index + element];
726 struct anv_image_view *image_view = NULL;
727 struct anv_sampler *sampler = NULL;
728
729 assert(type == bind_layout->type);
730
731 switch (type) {
732 case VK_DESCRIPTOR_TYPE_SAMPLER:
733 sampler = anv_sampler_from_handle(info->sampler);
734 break;
735
736 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
737 image_view = anv_image_view_from_handle(info->imageView);
738 sampler = anv_sampler_from_handle(info->sampler);
739 break;
740
741 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
742 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
743 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
744 image_view = anv_image_view_from_handle(info->imageView);
745 break;
746
747 default:
748 unreachable("invalid descriptor type");
749 }
750
751 /* If this descriptor has an immutable sampler, we don't want to stomp on
752 * it.
753 */
754 sampler = bind_layout->immutable_samplers ?
755 bind_layout->immutable_samplers[element] :
756 sampler;
757
758 *desc = (struct anv_descriptor) {
759 .type = type,
760 .layout = info->imageLayout,
761 .image_view = image_view,
762 .sampler = sampler,
763 };
764 }
765
766 void
767 anv_descriptor_set_write_buffer_view(struct anv_device *device,
768 struct anv_descriptor_set *set,
769 VkDescriptorType type,
770 struct anv_buffer_view *buffer_view,
771 uint32_t binding,
772 uint32_t element)
773 {
774 const struct anv_descriptor_set_binding_layout *bind_layout =
775 &set->layout->binding[binding];
776 struct anv_descriptor *desc =
777 &set->descriptors[bind_layout->descriptor_index + element];
778
779 assert(type == bind_layout->type);
780
781 *desc = (struct anv_descriptor) {
782 .type = type,
783 .buffer_view = buffer_view,
784 };
785 }
786
787 void
788 anv_descriptor_set_write_buffer(struct anv_device *device,
789 struct anv_descriptor_set *set,
790 struct anv_state_stream *alloc_stream,
791 VkDescriptorType type,
792 struct anv_buffer *buffer,
793 uint32_t binding,
794 uint32_t element,
795 VkDeviceSize offset,
796 VkDeviceSize range)
797 {
798 const struct anv_descriptor_set_binding_layout *bind_layout =
799 &set->layout->binding[binding];
800 struct anv_descriptor *desc =
801 &set->descriptors[bind_layout->descriptor_index + element];
802
803 assert(type == bind_layout->type);
804
805 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
806 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
807 *desc = (struct anv_descriptor) {
808 .type = type,
809 .buffer = buffer,
810 .offset = offset,
811 .range = range,
812 };
813 } else {
814 struct anv_buffer_view *bview =
815 &set->buffer_views[bind_layout->buffer_index + element];
816
817 bview->format = anv_isl_format_for_descriptor_type(type);
818 bview->range = anv_buffer_get_range(buffer, offset, range);
819 bview->address = anv_address_add(buffer->address, offset);
820
821 /* If we're writing descriptors through a push command, we need to
822 * allocate the surface state from the command buffer. Otherwise it will
823 * be allocated by the descriptor pool when calling
824 * vkAllocateDescriptorSets. */
825 if (alloc_stream)
826 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
827
828 anv_fill_buffer_surface_state(device, bview->surface_state,
829 bview->format,
830 bview->address, bview->range, 1);
831
832 *desc = (struct anv_descriptor) {
833 .type = type,
834 .buffer_view = bview,
835 };
836 }
837 }
838
839 void anv_UpdateDescriptorSets(
840 VkDevice _device,
841 uint32_t descriptorWriteCount,
842 const VkWriteDescriptorSet* pDescriptorWrites,
843 uint32_t descriptorCopyCount,
844 const VkCopyDescriptorSet* pDescriptorCopies)
845 {
846 ANV_FROM_HANDLE(anv_device, device, _device);
847
848 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
849 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
850 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
851
852 switch (write->descriptorType) {
853 case VK_DESCRIPTOR_TYPE_SAMPLER:
854 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
855 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
856 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
857 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
858 for (uint32_t j = 0; j < write->descriptorCount; j++) {
859 anv_descriptor_set_write_image_view(device, set,
860 write->pImageInfo + j,
861 write->descriptorType,
862 write->dstBinding,
863 write->dstArrayElement + j);
864 }
865 break;
866
867 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
868 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
869 for (uint32_t j = 0; j < write->descriptorCount; j++) {
870 ANV_FROM_HANDLE(anv_buffer_view, bview,
871 write->pTexelBufferView[j]);
872
873 anv_descriptor_set_write_buffer_view(device, set,
874 write->descriptorType,
875 bview,
876 write->dstBinding,
877 write->dstArrayElement + j);
878 }
879 break;
880
881 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
882 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
883 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
884 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
885 for (uint32_t j = 0; j < write->descriptorCount; j++) {
886 assert(write->pBufferInfo[j].buffer);
887 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
888 assert(buffer);
889
890 anv_descriptor_set_write_buffer(device, set,
891 NULL,
892 write->descriptorType,
893 buffer,
894 write->dstBinding,
895 write->dstArrayElement + j,
896 write->pBufferInfo[j].offset,
897 write->pBufferInfo[j].range);
898 }
899 break;
900
901 default:
902 break;
903 }
904 }
905
906 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
907 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
908 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
909 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
910
911 const struct anv_descriptor_set_binding_layout *src_layout =
912 &src->layout->binding[copy->srcBinding];
913 struct anv_descriptor *src_desc =
914 &src->descriptors[src_layout->descriptor_index];
915 src_desc += copy->srcArrayElement;
916
917 const struct anv_descriptor_set_binding_layout *dst_layout =
918 &dst->layout->binding[copy->dstBinding];
919 struct anv_descriptor *dst_desc =
920 &dst->descriptors[dst_layout->descriptor_index];
921 dst_desc += copy->dstArrayElement;
922
923 for (uint32_t j = 0; j < copy->descriptorCount; j++)
924 dst_desc[j] = src_desc[j];
925 }
926 }
927
928 /*
929 * Descriptor update templates.
930 */
931
932 void
933 anv_descriptor_set_write_template(struct anv_device *device,
934 struct anv_descriptor_set *set,
935 struct anv_state_stream *alloc_stream,
936 const struct anv_descriptor_update_template *template,
937 const void *data)
938 {
939 for (uint32_t i = 0; i < template->entry_count; i++) {
940 const struct anv_descriptor_template_entry *entry =
941 &template->entries[i];
942
943 switch (entry->type) {
944 case VK_DESCRIPTOR_TYPE_SAMPLER:
945 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
946 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
947 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
948 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
949 for (uint32_t j = 0; j < entry->array_count; j++) {
950 const VkDescriptorImageInfo *info =
951 data + entry->offset + j * entry->stride;
952 anv_descriptor_set_write_image_view(device, set,
953 info, entry->type,
954 entry->binding,
955 entry->array_element + j);
956 }
957 break;
958
959 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
960 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
961 for (uint32_t j = 0; j < entry->array_count; j++) {
962 const VkBufferView *_bview =
963 data + entry->offset + j * entry->stride;
964 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
965
966 anv_descriptor_set_write_buffer_view(device, set,
967 entry->type,
968 bview,
969 entry->binding,
970 entry->array_element + j);
971 }
972 break;
973
974 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
975 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
976 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
977 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
978 for (uint32_t j = 0; j < entry->array_count; j++) {
979 const VkDescriptorBufferInfo *info =
980 data + entry->offset + j * entry->stride;
981 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
982
983 anv_descriptor_set_write_buffer(device, set,
984 alloc_stream,
985 entry->type,
986 buffer,
987 entry->binding,
988 entry->array_element + j,
989 info->offset, info->range);
990 }
991 break;
992
993 default:
994 break;
995 }
996 }
997 }
998
999 VkResult anv_CreateDescriptorUpdateTemplate(
1000 VkDevice _device,
1001 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
1002 const VkAllocationCallbacks* pAllocator,
1003 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
1004 {
1005 ANV_FROM_HANDLE(anv_device, device, _device);
1006 struct anv_descriptor_update_template *template;
1007
1008 size_t size = sizeof(*template) +
1009 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1010 template = vk_alloc2(&device->alloc, pAllocator, size, 8,
1011 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1012 if (template == NULL)
1013 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1014
1015 template->bind_point = pCreateInfo->pipelineBindPoint;
1016
1017 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
1018 template->set = pCreateInfo->set;
1019
1020 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1021 for (uint32_t i = 0; i < template->entry_count; i++) {
1022 const VkDescriptorUpdateTemplateEntry *pEntry =
1023 &pCreateInfo->pDescriptorUpdateEntries[i];
1024
1025 template->entries[i] = (struct anv_descriptor_template_entry) {
1026 .type = pEntry->descriptorType,
1027 .binding = pEntry->dstBinding,
1028 .array_element = pEntry->dstArrayElement,
1029 .array_count = pEntry->descriptorCount,
1030 .offset = pEntry->offset,
1031 .stride = pEntry->stride,
1032 };
1033 }
1034
1035 *pDescriptorUpdateTemplate =
1036 anv_descriptor_update_template_to_handle(template);
1037
1038 return VK_SUCCESS;
1039 }
1040
1041 void anv_DestroyDescriptorUpdateTemplate(
1042 VkDevice _device,
1043 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1044 const VkAllocationCallbacks* pAllocator)
1045 {
1046 ANV_FROM_HANDLE(anv_device, device, _device);
1047 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1048 descriptorUpdateTemplate);
1049
1050 vk_free2(&device->alloc, pAllocator, template);
1051 }
1052
1053 void anv_UpdateDescriptorSetWithTemplate(
1054 VkDevice _device,
1055 VkDescriptorSet descriptorSet,
1056 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1057 const void* pData)
1058 {
1059 ANV_FROM_HANDLE(anv_device, device, _device);
1060 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1061 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1062 descriptorUpdateTemplate);
1063
1064 anv_descriptor_set_write_template(device, set, NULL, template, pData);
1065 }